1 /* Expands front end tree to back end RTL for GCC
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
38 #include "coretypes.h"
47 #include "insn-config.h"
50 #include "hard-reg-set.h"
57 #include "langhooks.h"
63 /* Functions and data structures for expanding case statements. */
65 /* Case label structure, used to hold info on labels within case
66 statements. We handle "range" labels; for a single-value label
67 as in C, the high and low limits are the same.
69 An AVL tree of case nodes is initially created, and later transformed
70 to a list linked via the RIGHT fields in the nodes. Nodes with
71 higher case values are later in the list.
73 Switch statements can be output in one of two forms. A branch table
74 is used if there are more than a few labels and the labels are dense
75 within the range between the smallest and largest case value. If a
76 branch table is used, no further manipulations are done with the case
79 The alternative to the use of a branch table is to generate a series
80 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
81 and PARENT fields to hold a binary tree. Initially the tree is
82 totally unbalanced, with everything on the right. We balance the tree
83 with nodes on the left having lower case values than the parent
84 and nodes on the right having higher values. We then output the tree
87 struct case_node GTY(())
89 struct case_node *left; /* Left son in binary tree */
90 struct case_node *right; /* Right son in binary tree; also node chain */
91 struct case_node *parent; /* Parent of node in binary tree */
92 tree low; /* Lowest index value for this label */
93 tree high; /* Highest index value for this label */
94 tree code_label; /* Label to jump to when node matches */
98 typedef struct case_node case_node;
99 typedef struct case_node *case_node_ptr;
101 /* These are used by estimate_case_costs and balance_case_nodes. */
103 /* This must be a signed type, and non-ANSI compilers lack signed char. */
104 static short cost_table_[129];
105 static int use_cost_table;
106 static int cost_table_initialized;
108 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
110 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
112 /* Stack of control and binding constructs we are currently inside.
114 These constructs begin when you call `expand_start_WHATEVER'
115 and end when you call `expand_end_WHATEVER'. This stack records
116 info about how the construct began that tells the end-function
117 what to do. It also may provide information about the construct
118 to alter the behavior of other constructs within the body.
119 For example, they may affect the behavior of C `break' and `continue'.
121 Each construct gets one `struct nesting' object.
122 All of these objects are chained through the `all' field.
123 `nesting_stack' points to the first object (innermost construct).
124 The position of an entry on `nesting_stack' is in its `depth' field.
126 Each type of construct has its own individual stack.
127 For example, loops have `cond_stack'. Each object points to the
128 next object of the same type through the `next' field.
130 Some constructs are visible to `break' exit-statements and others
131 are not. Which constructs are visible depends on the language.
132 Therefore, the data structure allows each construct to be visible
133 or not, according to the args given when the construct is started.
134 The construct is visible if the `exit_label' field is non-null.
135 In that case, the value should be a CODE_LABEL rtx. */
137 struct nesting GTY(())
140 struct nesting *next;
150 /* For conds (if-then and if-then-else statements). */
153 /* Label for the end of the if construct.
154 There is none if EXITFLAG was not set
155 and no `else' has been seen yet. */
157 /* Label for the end of this alternative.
158 This may be the end of the if or the next else/elseif. */
160 } GTY ((tag ("COND_NESTING"))) cond;
161 /* For variable binding contours. */
164 /* Sequence number of this binding contour within the function,
165 in order of entry. */
166 int block_start_count;
167 /* The NOTE that starts this contour.
168 Used by expand_goto to check whether the destination
169 is within each contour or not. */
171 /* The saved target_temp_slot_level from our outer block.
172 We may reset target_temp_slot_level to be the level of
173 this block, if that is done, target_temp_slot_level
174 reverts to the saved target_temp_slot_level at the very
176 int block_target_temp_slot_level;
177 /* True if we are currently emitting insns in an area of
178 output code that is controlled by a conditional
179 expression. This is used by the cleanup handling code to
180 generate conditional cleanup actions. */
181 int conditional_code;
182 /* A place to move the start of the exception region for any
183 of the conditional cleanups, must be at the end or after
184 the start of the last unconditional cleanup, and before any
185 conditional branch points. */
186 rtx last_unconditional_cleanup;
187 } GTY ((tag ("BLOCK_NESTING"))) block;
188 /* For switch (C) or case (Pascal) statements. */
191 /* The insn after which the case dispatch should finally
192 be emitted. Zero for a dummy. */
194 /* A list of case labels; it is first built as an AVL tree.
195 During expand_end_case, this is converted to a list, and may be
196 rearranged into a nearly balanced binary tree. */
197 struct case_node *case_list;
198 /* Label to jump to if no case matches. */
200 /* The expression to be dispatched on. */
202 /* Type that INDEX_EXPR should be converted to. */
204 /* Name of this kind of statement, for warnings. */
205 const char *printname;
206 /* Used to save no_line_numbers till we see the first case label.
207 We set this to -1 when we see the first case label in this
209 int line_number_status;
210 } GTY ((tag ("CASE_NESTING"))) case_stmt;
211 } GTY ((desc ("%1.desc"))) data;
214 /* Allocate and return a new `struct nesting'. */
216 #define ALLOC_NESTING() ggc_alloc (sizeof (struct nesting))
218 /* Pop the nesting stack element by element until we pop off
219 the element which is at the top of STACK.
220 Update all the other stacks, popping off elements from them
221 as we pop them from nesting_stack. */
223 #define POPSTACK(STACK) \
224 do { struct nesting *target = STACK; \
225 struct nesting *this; \
226 do { this = nesting_stack; \
227 if (cond_stack == this) \
228 cond_stack = cond_stack->next; \
229 if (block_stack == this) \
230 block_stack = block_stack->next; \
231 if (case_stack == this) \
232 case_stack = case_stack->next; \
233 nesting_depth = nesting_stack->depth - 1; \
234 nesting_stack = this->all; } \
235 while (this != target); } while (0)
237 /* In some cases it is impossible to generate code for a forward goto
238 until the label definition is seen. This happens when it may be necessary
239 for the goto to reset the stack pointer: we don't yet know how to do that.
240 So expand_goto puts an entry on this fixup list.
241 Each time a binding contour that resets the stack is exited,
243 If the target label has now been defined, we can insert the proper code. */
245 struct goto_fixup GTY(())
247 /* Points to following fixup. */
248 struct goto_fixup *next;
249 /* Points to the insn before the jump insn.
250 If more code must be inserted, it goes after this insn. */
252 /* The LABEL_DECL that this jump is jumping to, or 0
253 for break, continue or return. */
255 /* The BLOCK for the place where this goto was found. */
257 /* The CODE_LABEL rtx that this is jumping to. */
259 /* Number of binding contours started in current function
260 before the label reference. */
261 int block_start_count;
264 struct stmt_status GTY(())
266 /* Chain of all pending binding contours. */
267 struct nesting * x_block_stack;
269 /* If any new stacks are added here, add them to POPSTACKS too. */
271 /* Chain of all pending conditional statements. */
272 struct nesting * x_cond_stack;
274 /* Chain of all pending case or switch statements. */
275 struct nesting * x_case_stack;
277 /* Separate chain including all of the above,
278 chained through the `all' field. */
279 struct nesting * x_nesting_stack;
281 /* Number of entries on nesting_stack now. */
284 /* Number of binding contours started so far in this function. */
285 int x_block_start_count;
287 /* Location of last line-number note, whether we actually
288 emitted it or not. */
289 location_t x_emit_locus;
291 struct goto_fixup *x_goto_fixup_chain;
294 #define block_stack (cfun->stmt->x_block_stack)
295 #define cond_stack (cfun->stmt->x_cond_stack)
296 #define case_stack (cfun->stmt->x_case_stack)
297 #define nesting_stack (cfun->stmt->x_nesting_stack)
298 #define nesting_depth (cfun->stmt->x_nesting_depth)
299 #define current_block_start_count (cfun->stmt->x_block_start_count)
300 #define emit_locus (cfun->stmt->x_emit_locus)
301 #define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
303 /* Nonzero if we are using EH to handle cleanups. */
304 int using_eh_for_cleanups_p = 0;
306 static int n_occurrences (int, const char *);
307 static bool decl_conflicts_with_clobbers_p (tree, const HARD_REG_SET);
308 static void expand_nl_goto_receiver (void);
309 static bool check_operand_nalternatives (tree, tree);
310 static bool check_unique_operand_names (tree, tree);
311 static char *resolve_operand_name_1 (char *, tree, tree);
312 static void expand_null_return_1 (void);
313 static enum br_predictor return_prediction (rtx);
314 static rtx shift_return_value (rtx);
315 static void expand_value_return (rtx);
316 static void do_jump_if_equal (rtx, rtx, rtx, int);
317 static int estimate_case_costs (case_node_ptr);
318 static bool same_case_target_p (rtx, rtx);
319 static void strip_default_case_nodes (case_node_ptr *, rtx);
320 static bool lshift_cheap_p (void);
321 static int case_bit_test_cmp (const void *, const void *);
322 static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
323 static void group_case_nodes (case_node_ptr);
324 static void balance_case_nodes (case_node_ptr *, case_node_ptr);
325 static int node_has_low_bound (case_node_ptr, tree);
326 static int node_has_high_bound (case_node_ptr, tree);
327 static int node_is_bounded (case_node_ptr, tree);
328 static void emit_jump_if_reachable (rtx);
329 static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
330 static struct case_node *case_tree2list (case_node *, case_node *);
333 using_eh_for_cleanups (void)
335 using_eh_for_cleanups_p = 1;
339 init_stmt_for_function (void)
341 cfun->stmt = ggc_alloc_cleared (sizeof (struct stmt_status));
344 /* Record the current file and line. Called from emit_line_note. */
347 set_file_and_line_for_stmt (location_t location)
349 /* If we're outputting an inline function, and we add a line note,
350 there may be no CFUN->STMT information. So, there's no need to
353 emit_locus = location;
356 /* Emit a no-op instruction. */
363 last_insn = get_last_insn ();
365 && (LABEL_P (last_insn)
366 || (NOTE_P (last_insn)
367 && prev_real_insn (last_insn) == 0)))
368 emit_insn (gen_nop ());
371 /* Return the rtx-label that corresponds to a LABEL_DECL,
372 creating it if necessary. */
375 label_rtx (tree label)
377 if (TREE_CODE (label) != LABEL_DECL)
380 if (!DECL_RTL_SET_P (label))
382 rtx r = gen_label_rtx ();
383 SET_DECL_RTL (label, r);
384 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
385 LABEL_PRESERVE_P (r) = 1;
388 return DECL_RTL (label);
391 /* As above, but also put it on the forced-reference list of the
392 function that contains it. */
394 force_label_rtx (tree label)
396 rtx ref = label_rtx (label);
397 tree function = decl_function_context (label);
403 if (function != current_function_decl)
404 p = find_function_data (function);
408 p->expr->x_forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref,
409 p->expr->x_forced_labels);
413 /* Add an unconditional jump to LABEL as the next sequential instruction. */
416 emit_jump (rtx label)
418 do_pending_stack_adjust ();
419 emit_jump_insn (gen_jump (label));
423 /* Emit code to jump to the address
424 specified by the pointer expression EXP. */
427 expand_computed_goto (tree exp)
429 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
431 x = convert_memory_address (Pmode, x);
434 do_pending_stack_adjust ();
435 emit_indirect_jump (x);
438 /* Handle goto statements and the labels that they can go to. */
440 /* Specify the location in the RTL code of a label LABEL,
441 which is a LABEL_DECL tree node.
443 This is used for the kind of label that the user can jump to with a
444 goto statement, and for alternatives of a switch or case statement.
445 RTL labels generated for loops and conditionals don't go through here;
446 they are generated directly at the RTL level, by other functions below.
448 Note that this has nothing to do with defining label *names*.
449 Languages vary in how they do that and what that even means. */
452 expand_label (tree label)
454 rtx label_r = label_rtx (label);
456 do_pending_stack_adjust ();
457 emit_label (label_r);
458 if (DECL_NAME (label))
459 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
461 if (DECL_NONLOCAL (label))
463 expand_nl_goto_receiver ();
464 nonlocal_goto_handler_labels
465 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
466 nonlocal_goto_handler_labels);
469 if (FORCED_LABEL (label))
470 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels);
472 if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
473 maybe_set_first_label_num (label_r);
476 /* Generate RTL code for a `goto' statement with target label LABEL.
477 LABEL should be a LABEL_DECL tree node that was or will later be
478 defined with `expand_label'. */
481 expand_goto (tree label)
483 #ifdef ENABLE_CHECKING
484 /* Check for a nonlocal goto to a containing function. Should have
485 gotten translated to __builtin_nonlocal_goto. */
486 tree context = decl_function_context (label);
487 if (context != 0 && context != current_function_decl)
491 emit_jump (label_rtx (label));
494 /* Return the number of times character C occurs in string S. */
496 n_occurrences (int c, const char *s)
504 /* Generate RTL for an asm statement (explicit assembler code).
505 STRING is a STRING_CST node containing the assembler code text,
506 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
507 insn is volatile; don't optimize it. */
510 expand_asm (tree string, int vol)
514 if (TREE_CODE (string) == ADDR_EXPR)
515 string = TREE_OPERAND (string, 0);
517 body = gen_rtx_ASM_INPUT (VOIDmode, TREE_STRING_POINTER (string));
519 MEM_VOLATILE_P (body) = vol;
524 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
525 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
526 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
527 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
528 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
529 constraint allows the use of a register operand. And, *IS_INOUT
530 will be true if the operand is read-write, i.e., if it is used as
531 an input as well as an output. If *CONSTRAINT_P is not in
532 canonical form, it will be made canonical. (Note that `+' will be
533 replaced with `=' as part of this process.)
535 Returns TRUE if all went well; FALSE if an error occurred. */
538 parse_output_constraint (const char **constraint_p, int operand_num,
539 int ninputs, int noutputs, bool *allows_mem,
540 bool *allows_reg, bool *is_inout)
542 const char *constraint = *constraint_p;
545 /* Assume the constraint doesn't allow the use of either a register
550 /* Allow the `=' or `+' to not be at the beginning of the string,
551 since it wasn't explicitly documented that way, and there is a
552 large body of code that puts it last. Swap the character to
553 the front, so as not to uglify any place else. */
554 p = strchr (constraint, '=');
556 p = strchr (constraint, '+');
558 /* If the string doesn't contain an `=', issue an error
562 error ("output operand constraint lacks `='");
566 /* If the constraint begins with `+', then the operand is both read
567 from and written to. */
568 *is_inout = (*p == '+');
570 /* Canonicalize the output constraint so that it begins with `='. */
571 if (p != constraint || is_inout)
574 size_t c_len = strlen (constraint);
577 warning ("output constraint `%c' for operand %d is not at the beginning",
580 /* Make a copy of the constraint. */
581 buf = alloca (c_len + 1);
582 strcpy (buf, constraint);
583 /* Swap the first character and the `=' or `+'. */
584 buf[p - constraint] = buf[0];
585 /* Make sure the first character is an `='. (Until we do this,
586 it might be a `+'.) */
588 /* Replace the constraint with the canonicalized string. */
589 *constraint_p = ggc_alloc_string (buf, c_len);
590 constraint = *constraint_p;
593 /* Loop through the constraint string. */
594 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
599 error ("operand constraint contains incorrectly positioned '+' or '='");
603 if (operand_num + 1 == ninputs + noutputs)
605 error ("`%%' constraint used with last operand");
610 case 'V': case 'm': case 'o':
614 case '?': case '!': case '*': case '&': case '#':
615 case 'E': case 'F': case 'G': case 'H':
616 case 's': case 'i': case 'n':
617 case 'I': case 'J': case 'K': case 'L': case 'M':
618 case 'N': case 'O': case 'P': case ',':
621 case '0': case '1': case '2': case '3': case '4':
622 case '5': case '6': case '7': case '8': case '9':
624 error ("matching constraint not valid in output operand");
628 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
629 excepting those that expand_call created. So match memory
646 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
648 #ifdef EXTRA_CONSTRAINT_STR
649 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
651 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
655 /* Otherwise we can't assume anything about the nature of
656 the constraint except that it isn't purely registers.
657 Treat it like "g" and hope for the best. */
668 /* Similar, but for input constraints. */
671 parse_input_constraint (const char **constraint_p, int input_num,
672 int ninputs, int noutputs, int ninout,
673 const char * const * constraints,
674 bool *allows_mem, bool *allows_reg)
676 const char *constraint = *constraint_p;
677 const char *orig_constraint = constraint;
678 size_t c_len = strlen (constraint);
680 bool saw_match = false;
682 /* Assume the constraint doesn't allow the use of either
683 a register or memory. */
687 /* Make sure constraint has neither `=', `+', nor '&'. */
689 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
690 switch (constraint[j])
692 case '+': case '=': case '&':
693 if (constraint == orig_constraint)
695 error ("input operand constraint contains `%c'", constraint[j]);
701 if (constraint == orig_constraint
702 && input_num + 1 == ninputs - ninout)
704 error ("`%%' constraint used with last operand");
709 case 'V': case 'm': case 'o':
714 case '?': case '!': case '*': case '#':
715 case 'E': case 'F': case 'G': case 'H':
716 case 's': case 'i': case 'n':
717 case 'I': case 'J': case 'K': case 'L': case 'M':
718 case 'N': case 'O': case 'P': case ',':
721 /* Whether or not a numeric constraint allows a register is
722 decided by the matching constraint, and so there is no need
723 to do anything special with them. We must handle them in
724 the default case, so that we don't unnecessarily force
725 operands to memory. */
726 case '0': case '1': case '2': case '3': case '4':
727 case '5': case '6': case '7': case '8': case '9':
734 match = strtoul (constraint + j, &end, 10);
735 if (match >= (unsigned long) noutputs)
737 error ("matching constraint references invalid operand number");
741 /* Try and find the real constraint for this dup. Only do this
742 if the matching constraint is the only alternative. */
744 && (j == 0 || (j == 1 && constraint[0] == '%')))
746 constraint = constraints[match];
747 *constraint_p = constraint;
748 c_len = strlen (constraint);
750 /* ??? At the end of the loop, we will skip the first part of
751 the matched constraint. This assumes not only that the
752 other constraint is an output constraint, but also that
753 the '=' or '+' come first. */
757 j = end - constraint;
758 /* Anticipate increment at end of loop. */
773 if (! ISALPHA (constraint[j]))
775 error ("invalid punctuation `%c' in constraint", constraint[j]);
778 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
781 #ifdef EXTRA_CONSTRAINT_STR
782 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
784 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
788 /* Otherwise we can't assume anything about the nature of
789 the constraint except that it isn't purely registers.
790 Treat it like "g" and hope for the best. */
798 if (saw_match && !*allows_reg)
799 warning ("matching constraint does not allow a register");
804 /* INPUT is one of the input operands from EXPR, an ASM_EXPR. Returns true
805 if it is an operand which must be passed in memory (i.e. an "m"
806 constraint), false otherwise. */
809 asm_op_is_mem_input (tree input, tree expr)
811 const char *constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (input)));
812 tree outputs = ASM_OUTPUTS (expr);
813 int noutputs = list_length (outputs);
814 const char **constraints
815 = (const char **) alloca ((noutputs) * sizeof (const char *));
817 bool allows_mem, allows_reg;
820 /* Collect output constraints. */
821 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
822 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
824 /* We pass 0 for input_num, ninputs and ninout; they are only used for
825 error checking which will be done at expand time. */
826 parse_input_constraint (&constraint, 0, 0, noutputs, 0, constraints,
827 &allows_mem, &allows_reg);
828 return (!allows_reg && allows_mem);
831 /* Check for overlap between registers marked in CLOBBERED_REGS and
832 anything inappropriate in DECL. Emit error and return TRUE for error,
836 decl_conflicts_with_clobbers_p (tree decl, const HARD_REG_SET clobbered_regs)
838 /* Conflicts between asm-declared register variables and the clobber
839 list are not allowed. */
840 if ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
841 && DECL_REGISTER (decl)
842 && REG_P (DECL_RTL (decl))
843 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
845 rtx reg = DECL_RTL (decl);
848 for (regno = REGNO (reg);
850 + hard_regno_nregs[REGNO (reg)][GET_MODE (reg)]);
852 if (TEST_HARD_REG_BIT (clobbered_regs, regno))
854 error ("asm-specifier for variable `%s' conflicts with asm clobber list",
855 IDENTIFIER_POINTER (DECL_NAME (decl)));
857 /* Reset registerness to stop multiple errors emitted for a
859 DECL_REGISTER (decl) = 0;
866 /* Generate RTL for an asm statement with arguments.
867 STRING is the instruction template.
868 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
869 Each output or input has an expression in the TREE_VALUE and
870 and a tree list in TREE_PURPOSE which in turn contains a constraint
871 name in TREE_VALUE (or NULL_TREE) and a constraint string
873 CLOBBERS is a list of STRING_CST nodes each naming a hard register
874 that is clobbered by this insn.
876 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
877 Some elements of OUTPUTS may be replaced with trees representing temporary
878 values. The caller should copy those temporary values to the originally
881 VOL nonzero means the insn is volatile; don't optimize it. */
884 expand_asm_operands (tree string, tree outputs, tree inputs,
885 tree clobbers, int vol, location_t locus)
887 rtvec argvec, constraintvec;
889 int ninputs = list_length (inputs);
890 int noutputs = list_length (outputs);
893 HARD_REG_SET clobbered_regs;
894 int clobber_conflict_found = 0;
898 /* Vector of RTX's of evaluated output operands. */
899 rtx *output_rtx = alloca (noutputs * sizeof (rtx));
900 int *inout_opnum = alloca (noutputs * sizeof (int));
901 rtx *real_output_rtx = alloca (noutputs * sizeof (rtx));
902 enum machine_mode *inout_mode
903 = alloca (noutputs * sizeof (enum machine_mode));
904 const char **constraints
905 = alloca ((noutputs + ninputs) * sizeof (const char *));
906 int old_generating_concat_p = generating_concat_p;
908 /* An ASM with no outputs needs to be treated as volatile, for now. */
912 if (! check_operand_nalternatives (outputs, inputs))
915 string = resolve_asm_operand_names (string, outputs, inputs);
917 /* Collect constraints. */
919 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
920 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
921 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
922 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
924 /* Sometimes we wish to automatically clobber registers across an asm.
925 Case in point is when the i386 backend moved from cc0 to a hard reg --
926 maintaining source-level compatibility means automatically clobbering
927 the flags register. */
928 clobbers = targetm.md_asm_clobbers (clobbers);
930 /* Count the number of meaningful clobbered registers, ignoring what
931 we would ignore later. */
933 CLEAR_HARD_REG_SET (clobbered_regs);
934 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
936 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
938 i = decode_reg_name (regname);
939 if (i >= 0 || i == -4)
942 error ("unknown register name `%s' in `asm'", regname);
944 /* Mark clobbered registers. */
947 /* Clobbering the PIC register is an error */
948 if (i == (int) PIC_OFFSET_TABLE_REGNUM)
950 error ("PIC register `%s' clobbered in `asm'", regname);
954 SET_HARD_REG_BIT (clobbered_regs, i);
958 /* First pass over inputs and outputs checks validity and sets
959 mark_addressable if needed. */
962 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
964 tree val = TREE_VALUE (tail);
965 tree type = TREE_TYPE (val);
966 const char *constraint;
971 /* If there's an erroneous arg, emit no insn. */
972 if (type == error_mark_node)
975 /* Try to parse the output constraint. If that fails, there's
976 no point in going further. */
977 constraint = constraints[i];
978 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
979 &allows_mem, &allows_reg, &is_inout))
986 && REG_P (DECL_RTL (val))
987 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
988 lang_hooks.mark_addressable (val);
995 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
997 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1001 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
1003 bool allows_reg, allows_mem;
1004 const char *constraint;
1006 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
1007 would get VOIDmode and that could cause a crash in reload. */
1008 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1011 constraint = constraints[i + noutputs];
1012 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1013 constraints, &allows_mem, &allows_reg))
1016 if (! allows_reg && allows_mem)
1017 lang_hooks.mark_addressable (TREE_VALUE (tail));
1020 /* Second pass evaluates arguments. */
1023 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1025 tree val = TREE_VALUE (tail);
1026 tree type = TREE_TYPE (val);
1032 if (!parse_output_constraint (&constraints[i], i, ninputs,
1033 noutputs, &allows_mem, &allows_reg,
1037 /* If an output operand is not a decl or indirect ref and our constraint
1038 allows a register, make a temporary to act as an intermediate.
1039 Make the asm insn write into that, then our caller will copy it to
1040 the real output operand. Likewise for promoted variables. */
1042 generating_concat_p = 0;
1044 real_output_rtx[i] = NULL_RTX;
1045 if ((TREE_CODE (val) == INDIRECT_REF
1048 && (allows_mem || REG_P (DECL_RTL (val)))
1049 && ! (REG_P (DECL_RTL (val))
1050 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1054 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
1056 op = validize_mem (op);
1058 if (! allows_reg && !MEM_P (op))
1059 error ("output number %d not directly addressable", i);
1060 if ((! allows_mem && MEM_P (op))
1061 || GET_CODE (op) == CONCAT)
1063 real_output_rtx[i] = protect_from_queue (op, 1);
1064 op = gen_reg_rtx (GET_MODE (op));
1066 emit_move_insn (op, real_output_rtx[i]);
1071 op = assign_temp (type, 0, 0, 1);
1072 op = validize_mem (op);
1073 TREE_VALUE (tail) = make_tree (type, op);
1077 generating_concat_p = old_generating_concat_p;
1081 inout_mode[ninout] = TYPE_MODE (type);
1082 inout_opnum[ninout++] = i;
1085 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1086 clobber_conflict_found = 1;
1089 /* Make vectors for the expression-rtx, constraint strings,
1090 and named operands. */
1092 argvec = rtvec_alloc (ninputs);
1093 constraintvec = rtvec_alloc (ninputs);
1095 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1096 : GET_MODE (output_rtx[0])),
1097 TREE_STRING_POINTER (string),
1098 empty_string, 0, argvec, constraintvec,
1101 MEM_VOLATILE_P (body) = vol;
1103 /* Eval the inputs and put them into ARGVEC.
1104 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1106 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
1108 bool allows_reg, allows_mem;
1109 const char *constraint;
1113 constraint = constraints[i + noutputs];
1114 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1115 constraints, &allows_mem, &allows_reg))
1118 generating_concat_p = 0;
1120 val = TREE_VALUE (tail);
1121 type = TREE_TYPE (val);
1122 op = expand_expr (val, NULL_RTX, VOIDmode,
1123 (allows_mem && !allows_reg
1124 ? EXPAND_MEMORY : EXPAND_NORMAL));
1126 /* Never pass a CONCAT to an ASM. */
1127 if (GET_CODE (op) == CONCAT)
1128 op = force_reg (GET_MODE (op), op);
1129 else if (MEM_P (op))
1130 op = validize_mem (op);
1132 if (asm_operand_ok (op, constraint) <= 0)
1135 op = force_reg (TYPE_MODE (type), op);
1136 else if (!allows_mem)
1137 warning ("asm operand %d probably doesn't match constraints",
1139 else if (MEM_P (op))
1141 /* We won't recognize either volatile memory or memory
1142 with a queued address as available a memory_operand
1143 at this point. Ignore it: clearly this *is* a memory. */
1147 warning ("use of memory input without lvalue in "
1148 "asm operand %d is deprecated", i + noutputs);
1150 if (CONSTANT_P (op))
1152 rtx mem = force_const_mem (TYPE_MODE (type), op);
1154 op = validize_mem (mem);
1156 op = force_reg (TYPE_MODE (type), op);
1159 || GET_CODE (op) == SUBREG
1160 || GET_CODE (op) == CONCAT)
1162 tree qual_type = build_qualified_type (type,
1164 | TYPE_QUAL_CONST));
1165 rtx memloc = assign_temp (qual_type, 1, 1, 1);
1166 memloc = validize_mem (memloc);
1167 emit_move_insn (memloc, op);
1173 generating_concat_p = old_generating_concat_p;
1174 ASM_OPERANDS_INPUT (body, i) = op;
1176 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1177 = gen_rtx_ASM_INPUT (TYPE_MODE (type), constraints[i + noutputs]);
1179 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1180 clobber_conflict_found = 1;
1183 /* Protect all the operands from the queue now that they have all been
1186 generating_concat_p = 0;
1188 for (i = 0; i < ninputs - ninout; i++)
1189 ASM_OPERANDS_INPUT (body, i)
1190 = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
1192 for (i = 0; i < noutputs; i++)
1193 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1195 /* For in-out operands, copy output rtx to input rtx. */
1196 for (i = 0; i < ninout; i++)
1198 int j = inout_opnum[i];
1201 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1204 sprintf (buffer, "%d", j);
1205 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1206 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
1209 generating_concat_p = old_generating_concat_p;
1211 /* Now, for each output, construct an rtx
1212 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1213 ARGVEC CONSTRAINTS OPNAMES))
1214 If there is more than one, put them inside a PARALLEL. */
1216 if (noutputs == 1 && nclobbers == 0)
1218 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
1219 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1222 else if (noutputs == 0 && nclobbers == 0)
1224 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1236 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1238 /* For each output operand, store a SET. */
1239 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1241 XVECEXP (body, 0, i)
1242 = gen_rtx_SET (VOIDmode,
1244 gen_rtx_ASM_OPERANDS
1245 (GET_MODE (output_rtx[i]),
1246 TREE_STRING_POINTER (string),
1247 constraints[i], i, argvec, constraintvec,
1250 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1253 /* If there are no outputs (but there are some clobbers)
1254 store the bare ASM_OPERANDS into the PARALLEL. */
1257 XVECEXP (body, 0, i++) = obody;
1259 /* Store (clobber REG) for each clobbered register specified. */
1261 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1263 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1264 int j = decode_reg_name (regname);
1269 if (j == -3) /* `cc', which is not a register */
1272 if (j == -4) /* `memory', don't cache memory across asm */
1274 XVECEXP (body, 0, i++)
1275 = gen_rtx_CLOBBER (VOIDmode,
1278 gen_rtx_SCRATCH (VOIDmode)));
1282 /* Ignore unknown register, error already signaled. */
1286 /* Use QImode since that's guaranteed to clobber just one reg. */
1287 clobbered_reg = gen_rtx_REG (QImode, j);
1289 /* Do sanity check for overlap between clobbers and respectively
1290 input and outputs that hasn't been handled. Such overlap
1291 should have been detected and reported above. */
1292 if (!clobber_conflict_found)
1296 /* We test the old body (obody) contents to avoid tripping
1297 over the under-construction body. */
1298 for (opno = 0; opno < noutputs; opno++)
1299 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1300 internal_error ("asm clobber conflict with output operand");
1302 for (opno = 0; opno < ninputs - ninout; opno++)
1303 if (reg_overlap_mentioned_p (clobbered_reg,
1304 ASM_OPERANDS_INPUT (obody, opno)))
1305 internal_error ("asm clobber conflict with input operand");
1308 XVECEXP (body, 0, i++)
1309 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1315 /* For any outputs that needed reloading into registers, spill them
1316 back to where they belong. */
1317 for (i = 0; i < noutputs; ++i)
1318 if (real_output_rtx[i])
1319 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1325 expand_asm_expr (tree exp)
1331 if (ASM_INPUT_P (exp))
1333 expand_asm (ASM_STRING (exp), ASM_VOLATILE_P (exp));
1337 outputs = ASM_OUTPUTS (exp);
1338 noutputs = list_length (outputs);
1339 /* o[I] is the place that output number I should be written. */
1340 o = (tree *) alloca (noutputs * sizeof (tree));
1342 /* Record the contents of OUTPUTS before it is modified. */
1343 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1344 o[i] = TREE_VALUE (tail);
1346 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
1347 OUTPUTS some trees for where the values were actually stored. */
1348 expand_asm_operands (ASM_STRING (exp), outputs, ASM_INPUTS (exp),
1349 ASM_CLOBBERS (exp), ASM_VOLATILE_P (exp),
1352 /* Copy all the intermediate outputs into the specified outputs. */
1353 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1355 if (o[i] != TREE_VALUE (tail))
1357 expand_assignment (o[i], TREE_VALUE (tail), 0);
1360 /* Restore the original value so that it's correct the next
1361 time we expand this function. */
1362 TREE_VALUE (tail) = o[i];
1366 /* Those MODIFY_EXPRs could do autoincrements. */
1370 /* A subroutine of expand_asm_operands. Check that all operands have
1371 the same number of alternatives. Return true if so. */
1374 check_operand_nalternatives (tree outputs, tree inputs)
1376 if (outputs || inputs)
1378 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1380 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1383 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1385 error ("too many alternatives in `asm'");
1392 const char *constraint
1393 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1395 if (n_occurrences (',', constraint) != nalternatives)
1397 error ("operand constraints for `asm' differ in number of alternatives");
1401 if (TREE_CHAIN (tmp))
1402 tmp = TREE_CHAIN (tmp);
1404 tmp = next, next = 0;
1411 /* A subroutine of expand_asm_operands. Check that all operand names
1412 are unique. Return true if so. We rely on the fact that these names
1413 are identifiers, and so have been canonicalized by get_identifier,
1414 so all we need are pointer comparisons. */
1417 check_unique_operand_names (tree outputs, tree inputs)
1421 for (i = outputs; i ; i = TREE_CHAIN (i))
1423 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1427 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1428 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1432 for (i = inputs; i ; i = TREE_CHAIN (i))
1434 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1438 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1439 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1441 for (j = outputs; j ; j = TREE_CHAIN (j))
1442 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1449 error ("duplicate asm operand name '%s'",
1450 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1454 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1455 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1456 STRING and in the constraints to those numbers. */
1459 resolve_asm_operand_names (tree string, tree outputs, tree inputs)
1466 check_unique_operand_names (outputs, inputs);
1468 /* Substitute [<name>] in input constraint strings. There should be no
1469 named operands in output constraints. */
1470 for (t = inputs; t ; t = TREE_CHAIN (t))
1472 c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1473 if (strchr (c, '[') != NULL)
1475 p = buffer = xstrdup (c);
1476 while ((p = strchr (p, '[')) != NULL)
1477 p = resolve_operand_name_1 (p, outputs, inputs);
1478 TREE_VALUE (TREE_PURPOSE (t))
1479 = build_string (strlen (buffer), buffer);
1484 /* Now check for any needed substitutions in the template. */
1485 c = TREE_STRING_POINTER (string);
1486 while ((c = strchr (c, '%')) != NULL)
1490 else if (ISALPHA (c[1]) && c[2] == '[')
1501 /* OK, we need to make a copy so we can perform the substitutions.
1502 Assume that we will not need extra space--we get to remove '['
1503 and ']', which means we cannot have a problem until we have more
1504 than 999 operands. */
1505 buffer = xstrdup (TREE_STRING_POINTER (string));
1506 p = buffer + (c - TREE_STRING_POINTER (string));
1508 while ((p = strchr (p, '%')) != NULL)
1512 else if (ISALPHA (p[1]) && p[2] == '[')
1520 p = resolve_operand_name_1 (p, outputs, inputs);
1523 string = build_string (strlen (buffer), buffer);
1530 /* A subroutine of resolve_operand_names. P points to the '[' for a
1531 potential named operand of the form [<name>]. In place, replace
1532 the name and brackets with a number. Return a pointer to the
1533 balance of the string after substitution. */
1536 resolve_operand_name_1 (char *p, tree outputs, tree inputs)
1543 /* Collect the operand name. */
1544 q = strchr (p, ']');
1547 error ("missing close brace for named operand");
1548 return strchr (p, '\0');
1552 /* Resolve the name to a number. */
1553 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
1555 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1558 const char *c = TREE_STRING_POINTER (name);
1559 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
1563 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
1565 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1568 const char *c = TREE_STRING_POINTER (name);
1569 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
1575 error ("undefined named operand '%s'", p + 1);
1579 /* Replace the name with the number. Unfortunately, not all libraries
1580 get the return value of sprintf correct, so search for the end of the
1581 generated string by hand. */
1582 sprintf (p, "%d", op);
1583 p = strchr (p, '\0');
1585 /* Verify the no extra buffer space assumption. */
1589 /* Shift the rest of the buffer down to fill the gap. */
1590 memmove (p, q + 1, strlen (q + 1) + 1);
1595 /* Generate RTL to evaluate the expression EXP. */
1598 expand_expr_stmt (tree exp)
1603 value = expand_expr (exp, const0_rtx, VOIDmode, 0);
1604 type = TREE_TYPE (exp);
1606 /* If all we do is reference a volatile value in memory,
1607 copy it to a register to be sure it is actually touched. */
1608 if (value && MEM_P (value) && TREE_THIS_VOLATILE (exp))
1610 if (TYPE_MODE (type) == VOIDmode)
1612 else if (TYPE_MODE (type) != BLKmode)
1613 value = copy_to_reg (value);
1616 rtx lab = gen_label_rtx ();
1618 /* Compare the value with itself to reference it. */
1619 emit_cmp_and_jump_insns (value, value, EQ,
1620 expand_expr (TYPE_SIZE (type),
1621 NULL_RTX, VOIDmode, 0),
1627 /* Free any temporaries used to evaluate this expression. */
1633 /* Warn if EXP contains any computations whose results are not used.
1634 Return 1 if a warning is printed; 0 otherwise. LOCUS is the
1635 (potential) location of the expression. */
1638 warn_if_unused_value (tree exp, location_t locus)
1641 if (TREE_USED (exp))
1644 /* Don't warn about void constructs. This includes casting to void,
1645 void function calls, and statement expressions with a final cast
1647 if (VOID_TYPE_P (TREE_TYPE (exp)))
1650 if (EXPR_LOCUS (exp))
1651 locus = *EXPR_LOCUS (exp);
1653 switch (TREE_CODE (exp))
1655 case PREINCREMENT_EXPR:
1656 case POSTINCREMENT_EXPR:
1657 case PREDECREMENT_EXPR:
1658 case POSTDECREMENT_EXPR:
1663 case TRY_CATCH_EXPR:
1664 case WITH_CLEANUP_EXPR:
1669 /* For a binding, warn if no side effect within it. */
1670 exp = BIND_EXPR_BODY (exp);
1674 exp = TREE_OPERAND (exp, 0);
1677 case TRUTH_ORIF_EXPR:
1678 case TRUTH_ANDIF_EXPR:
1679 /* In && or ||, warn if 2nd operand has no side effect. */
1680 exp = TREE_OPERAND (exp, 1);
1684 if (TREE_NO_WARNING (exp))
1686 if (warn_if_unused_value (TREE_OPERAND (exp, 0), locus))
1688 /* Let people do `(foo (), 0)' without a warning. */
1689 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1691 exp = TREE_OPERAND (exp, 1);
1696 case NON_LVALUE_EXPR:
1697 /* Don't warn about conversions not explicit in the user's program. */
1698 if (TREE_NO_WARNING (exp))
1700 /* Assignment to a cast usually results in a cast of a modify.
1701 Don't complain about that. There can be an arbitrary number of
1702 casts before the modify, so we must loop until we find the first
1703 non-cast expression and then test to see if that is a modify. */
1705 tree tem = TREE_OPERAND (exp, 0);
1707 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1708 tem = TREE_OPERAND (tem, 0);
1710 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
1711 || TREE_CODE (tem) == CALL_EXPR)
1717 /* Don't warn about automatic dereferencing of references, since
1718 the user cannot control it. */
1719 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1721 exp = TREE_OPERAND (exp, 0);
1727 /* Referencing a volatile value is a side effect, so don't warn. */
1729 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1730 && TREE_THIS_VOLATILE (exp))
1733 /* If this is an expression which has no operands, there is no value
1734 to be unused. There are no such language-independent codes,
1735 but front ends may define such. */
1736 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
1737 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
1741 /* If this is an expression with side effects, don't warn. */
1742 if (TREE_SIDE_EFFECTS (exp))
1745 warning ("%Hvalue computed is not used", &locus);
1750 /* Generate RTL for the start of an if-then. COND is the expression
1751 whose truth should be tested.
1753 If EXITFLAG is nonzero, this conditional is visible to
1754 `exit_something'. */
1757 expand_start_cond (tree cond, int exitflag)
1759 struct nesting *thiscond = ALLOC_NESTING ();
1761 /* Make an entry on cond_stack for the cond we are entering. */
1763 thiscond->desc = COND_NESTING;
1764 thiscond->next = cond_stack;
1765 thiscond->all = nesting_stack;
1766 thiscond->depth = ++nesting_depth;
1767 thiscond->data.cond.next_label = gen_label_rtx ();
1768 /* Before we encounter an `else', we don't need a separate exit label
1769 unless there are supposed to be exit statements
1770 to exit this conditional. */
1771 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1772 thiscond->data.cond.endif_label = thiscond->exit_label;
1773 cond_stack = thiscond;
1774 nesting_stack = thiscond;
1776 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
1779 /* Generate RTL between then-clause and the elseif-clause
1780 of an if-then-elseif-.... */
1783 expand_start_elseif (tree cond)
1785 if (cond_stack->data.cond.endif_label == 0)
1786 cond_stack->data.cond.endif_label = gen_label_rtx ();
1787 emit_jump (cond_stack->data.cond.endif_label);
1788 emit_label (cond_stack->data.cond.next_label);
1789 cond_stack->data.cond.next_label = gen_label_rtx ();
1790 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1793 /* Generate RTL between the then-clause and the else-clause
1794 of an if-then-else. */
1797 expand_start_else (void)
1799 if (cond_stack->data.cond.endif_label == 0)
1800 cond_stack->data.cond.endif_label = gen_label_rtx ();
1802 emit_jump (cond_stack->data.cond.endif_label);
1803 emit_label (cond_stack->data.cond.next_label);
1804 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
1807 /* After calling expand_start_else, turn this "else" into an "else if"
1808 by providing another condition. */
1811 expand_elseif (tree cond)
1813 cond_stack->data.cond.next_label = gen_label_rtx ();
1814 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1817 /* Generate RTL for the end of an if-then.
1818 Pop the record for it off of cond_stack. */
1821 expand_end_cond (void)
1823 struct nesting *thiscond = cond_stack;
1825 do_pending_stack_adjust ();
1826 if (thiscond->data.cond.next_label)
1827 emit_label (thiscond->data.cond.next_label);
1828 if (thiscond->data.cond.endif_label)
1829 emit_label (thiscond->data.cond.endif_label);
1831 POPSTACK (cond_stack);
1834 /* Return nonzero if we should preserve sub-expressions as separate
1835 pseudos. We never do so if we aren't optimizing. We always do so
1836 if -fexpensive-optimizations. */
1839 preserve_subexpressions_p (void)
1841 if (flag_expensive_optimizations)
1844 if (optimize == 0 || cfun == 0 || cfun->stmt == 0)
1851 /* Generate RTL to return from the current function, with no value.
1852 (That is, we do not do anything about returning any value.) */
1855 expand_null_return (void)
1857 /* If this function was declared to return a value, but we
1858 didn't, clobber the return registers so that they are not
1859 propagated live to the rest of the function. */
1860 clobber_return_register ();
1862 expand_null_return_1 ();
1865 /* Generate RTL to return directly from the current function.
1866 (That is, we bypass any return value.) */
1869 expand_naked_return (void)
1873 clear_pending_stack_adjust ();
1874 do_pending_stack_adjust ();
1876 end_label = naked_return_label;
1878 end_label = naked_return_label = gen_label_rtx ();
1880 emit_jump (end_label);
1883 /* Try to guess whether the value of return means error code. */
1884 static enum br_predictor
1885 return_prediction (rtx val)
1887 /* Different heuristics for pointers and scalars. */
1888 if (POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
1890 /* NULL is usually not returned. */
1891 if (val == const0_rtx)
1892 return PRED_NULL_RETURN;
1896 /* Negative return values are often used to indicate
1898 if (GET_CODE (val) == CONST_INT
1899 && INTVAL (val) < 0)
1900 return PRED_NEGATIVE_RETURN;
1901 /* Constant return values are also usually erors,
1902 zero/one often mean booleans so exclude them from the
1904 if (CONSTANT_P (val)
1905 && (val != const0_rtx && val != const1_rtx))
1906 return PRED_CONST_RETURN;
1908 return PRED_NO_PREDICTION;
1912 /* If the current function returns values in the most significant part
1913 of a register, shift return value VAL appropriately. The mode of
1914 the function's return type is known not to be BLKmode. */
1917 shift_return_value (rtx val)
1921 type = TREE_TYPE (DECL_RESULT (current_function_decl));
1922 if (targetm.calls.return_in_msb (type))
1925 HOST_WIDE_INT shift;
1927 target = DECL_RTL (DECL_RESULT (current_function_decl));
1928 shift = (GET_MODE_BITSIZE (GET_MODE (target))
1929 - BITS_PER_UNIT * int_size_in_bytes (type));
1931 val = expand_shift (LSHIFT_EXPR, GET_MODE (target),
1932 gen_lowpart (GET_MODE (target), val),
1933 build_int_2 (shift, 0), target, 1);
1939 /* Generate RTL to return from the current function, with value VAL. */
1942 expand_value_return (rtx val)
1945 enum br_predictor pred;
1947 if (flag_guess_branch_prob
1948 && (pred = return_prediction (val)) != PRED_NO_PREDICTION)
1950 /* Emit information for branch prediction. */
1953 note = emit_note (NOTE_INSN_PREDICTION);
1955 NOTE_PREDICTION (note) = NOTE_PREDICT (pred, NOT_TAKEN);
1959 return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
1961 /* Copy the value to the return location
1962 unless it's already there. */
1964 if (return_reg != val)
1966 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
1967 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
1969 int unsignedp = TYPE_UNSIGNED (type);
1970 enum machine_mode old_mode
1971 = DECL_MODE (DECL_RESULT (current_function_decl));
1972 enum machine_mode mode
1973 = promote_mode (type, old_mode, &unsignedp, 1);
1975 if (mode != old_mode)
1976 val = convert_modes (mode, old_mode, val, unsignedp);
1978 if (GET_CODE (return_reg) == PARALLEL)
1979 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
1981 emit_move_insn (return_reg, val);
1984 expand_null_return_1 ();
1987 /* Output a return with no value. */
1990 expand_null_return_1 (void)
1994 clear_pending_stack_adjust ();
1995 do_pending_stack_adjust ();
1997 end_label = return_label;
1999 end_label = return_label = gen_label_rtx ();
2000 emit_jump (end_label);
2003 /* Generate RTL to evaluate the expression RETVAL and return it
2004 from the current function. */
2007 expand_return (tree retval)
2013 /* If function wants no value, give it none. */
2014 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2016 expand_expr (retval, NULL_RTX, VOIDmode, 0);
2018 expand_null_return ();
2022 if (retval == error_mark_node)
2024 /* Treat this like a return of no value from a function that
2026 expand_null_return ();
2029 else if (TREE_CODE (retval) == RESULT_DECL)
2030 retval_rhs = retval;
2031 else if ((TREE_CODE (retval) == MODIFY_EXPR
2032 || TREE_CODE (retval) == INIT_EXPR)
2033 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2034 retval_rhs = TREE_OPERAND (retval, 1);
2036 retval_rhs = retval;
2038 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
2040 /* If the result is an aggregate that is being returned in one (or more)
2041 registers, load the registers here. The compiler currently can't handle
2042 copying a BLKmode value into registers. We could put this code in a
2043 more general area (for use by everyone instead of just function
2044 call/return), but until this feature is generally usable it is kept here
2045 (and in expand_call). */
2048 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2049 && REG_P (result_rtl))
2052 unsigned HOST_WIDE_INT bitpos, xbitpos;
2053 unsigned HOST_WIDE_INT padding_correction = 0;
2054 unsigned HOST_WIDE_INT bytes
2055 = int_size_in_bytes (TREE_TYPE (retval_rhs));
2056 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2057 unsigned int bitsize
2058 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
2059 rtx *result_pseudos = alloca (sizeof (rtx) * n_regs);
2060 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
2061 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2062 enum machine_mode tmpmode, result_reg_mode;
2066 expand_null_return ();
2070 /* If the structure doesn't take up a whole number of words, see
2071 whether the register value should be padded on the left or on
2072 the right. Set PADDING_CORRECTION to the number of padding
2073 bits needed on the left side.
2075 In most ABIs, the structure will be returned at the least end of
2076 the register, which translates to right padding on little-endian
2077 targets and left padding on big-endian targets. The opposite
2078 holds if the structure is returned at the most significant
2079 end of the register. */
2080 if (bytes % UNITS_PER_WORD != 0
2081 && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs))
2083 : BYTES_BIG_ENDIAN))
2084 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2087 /* Copy the structure BITSIZE bits at a time. */
2088 for (bitpos = 0, xbitpos = padding_correction;
2089 bitpos < bytes * BITS_PER_UNIT;
2090 bitpos += bitsize, xbitpos += bitsize)
2092 /* We need a new destination pseudo each time xbitpos is
2093 on a word boundary and when xbitpos == padding_correction
2094 (the first time through). */
2095 if (xbitpos % BITS_PER_WORD == 0
2096 || xbitpos == padding_correction)
2098 /* Generate an appropriate register. */
2099 dst = gen_reg_rtx (word_mode);
2100 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2102 /* Clear the destination before we move anything into it. */
2103 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
2106 /* We need a new source operand each time bitpos is on a word
2108 if (bitpos % BITS_PER_WORD == 0)
2109 src = operand_subword_force (result_val,
2110 bitpos / BITS_PER_WORD,
2113 /* Use bitpos for the source extraction (left justified) and
2114 xbitpos for the destination store (right justified). */
2115 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2116 extract_bit_field (src, bitsize,
2117 bitpos % BITS_PER_WORD, 1,
2118 NULL_RTX, word_mode, word_mode));
2121 tmpmode = GET_MODE (result_rtl);
2122 if (tmpmode == BLKmode)
2124 /* Find the smallest integer mode large enough to hold the
2125 entire structure and use that mode instead of BLKmode
2126 on the USE insn for the return register. */
2127 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2128 tmpmode != VOIDmode;
2129 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
2130 /* Have we found a large enough mode? */
2131 if (GET_MODE_SIZE (tmpmode) >= bytes)
2134 /* No suitable mode found. */
2135 if (tmpmode == VOIDmode)
2138 PUT_MODE (result_rtl, tmpmode);
2141 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
2142 result_reg_mode = word_mode;
2144 result_reg_mode = tmpmode;
2145 result_reg = gen_reg_rtx (result_reg_mode);
2148 for (i = 0; i < n_regs; i++)
2149 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
2152 if (tmpmode != result_reg_mode)
2153 result_reg = gen_lowpart (tmpmode, result_reg);
2155 expand_value_return (result_reg);
2157 else if (retval_rhs != 0
2158 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
2159 && (REG_P (result_rtl)
2160 || (GET_CODE (result_rtl) == PARALLEL)))
2162 /* Calculate the return value into a temporary (usually a pseudo
2164 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
2165 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
2167 val = assign_temp (nt, 0, 0, 1);
2168 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
2169 val = force_not_mem (val);
2171 /* Return the calculated value. */
2172 expand_value_return (shift_return_value (val));
2176 /* No hard reg used; calculate value into hard return reg. */
2177 expand_expr (retval, const0_rtx, VOIDmode, 0);
2179 expand_value_return (result_rtl);
2183 /* Generate the RTL code for entering a binding contour.
2184 The variables are declared one by one, by calls to `expand_decl'.
2186 FLAGS is a bitwise or of the following flags:
2188 1 - Nonzero if this construct should be visible to
2191 2 - Nonzero if this contour does not require a
2192 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
2193 language-independent code should set this flag because they
2194 will not create corresponding BLOCK nodes. (There should be
2195 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
2196 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
2197 when expand_end_bindings is called.
2199 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
2200 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
2204 expand_start_bindings_and_block (int flags, tree block)
2206 struct nesting *thisblock = ALLOC_NESTING ();
2208 int exit_flag = ((flags & 1) != 0);
2209 int block_flag = ((flags & 2) == 0);
2211 /* If a BLOCK is supplied, then the caller should be requesting a
2212 NOTE_INSN_BLOCK_BEG note. */
2213 if (!block_flag && block)
2216 /* Create a note to mark the beginning of the block. */
2217 note = emit_note (NOTE_INSN_DELETED);
2219 /* Make an entry on block_stack for the block we are entering. */
2221 thisblock->desc = BLOCK_NESTING;
2222 thisblock->next = block_stack;
2223 thisblock->all = nesting_stack;
2224 thisblock->depth = ++nesting_depth;
2225 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
2227 thisblock->data.block.conditional_code = 0;
2228 thisblock->data.block.last_unconditional_cleanup = note;
2229 /* When we insert instructions after the last unconditional cleanup,
2230 we don't adjust last_insn. That means that a later add_insn will
2231 clobber the instructions we've just added. The easiest way to
2232 fix this is to just insert another instruction here, so that the
2233 instructions inserted after the last unconditional cleanup are
2234 never the last instruction. */
2235 emit_note (NOTE_INSN_DELETED);
2237 thisblock->data.block.first_insn = note;
2238 thisblock->data.block.block_start_count = ++current_block_start_count;
2239 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
2240 block_stack = thisblock;
2241 nesting_stack = thisblock;
2243 /* Make a new level for allocating stack slots. */
2247 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
2248 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
2249 expand_expr are made. After we end the region, we know that all
2250 space for all temporaries that were created by TARGET_EXPRs will be
2251 destroyed and their space freed for reuse. */
2254 expand_start_target_temps (void)
2256 /* This is so that even if the result is preserved, the space
2257 allocated will be freed, as we know that it is no longer in use. */
2260 /* Start a new binding layer that will keep track of all cleanup
2261 actions to be performed. */
2262 expand_start_bindings (2);
2264 target_temp_slot_level = temp_slot_level;
2268 expand_end_target_temps (void)
2270 expand_end_bindings (NULL_TREE, 0, 0);
2272 /* This is so that even if the result is preserved, the space
2273 allocated will be freed, as we know that it is no longer in use. */
2277 /* Given a pointer to a BLOCK node return nonzero if (and only if) the node
2278 in question represents the outermost pair of curly braces (i.e. the "body
2279 block") of a function or method.
2281 For any BLOCK node representing a "body block" of a function or method, the
2282 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
2283 represents the outermost (function) scope for the function or method (i.e.
2284 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
2285 *that* node in turn will point to the relevant FUNCTION_DECL node. */
2288 is_body_block (tree stmt)
2290 if (lang_hooks.no_body_blocks)
2293 if (TREE_CODE (stmt) == BLOCK)
2295 tree parent = BLOCK_SUPERCONTEXT (stmt);
2297 if (parent && TREE_CODE (parent) == BLOCK)
2299 tree grandparent = BLOCK_SUPERCONTEXT (parent);
2301 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
2309 /* Return an opaque pointer to the current nesting level, so frontend code
2310 can check its own sanity. */
2313 current_nesting_level (void)
2315 return cfun ? block_stack : 0;
2318 /* Emit code to restore vital registers at the beginning of a nonlocal goto
2321 expand_nl_goto_receiver (void)
2323 /* Clobber the FP when we get here, so we have to make sure it's
2324 marked as used by this function. */
2325 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
2327 /* Mark the static chain as clobbered here so life information
2328 doesn't get messed up for it. */
2329 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
2331 #ifdef HAVE_nonlocal_goto
2332 if (! HAVE_nonlocal_goto)
2334 /* First adjust our frame pointer to its actual value. It was
2335 previously set to the start of the virtual area corresponding to
2336 the stacked variables when we branched here and now needs to be
2337 adjusted to the actual hardware fp value.
2339 Assignments are to virtual registers are converted by
2340 instantiate_virtual_regs into the corresponding assignment
2341 to the underlying register (fp in this case) that makes
2342 the original assignment true.
2343 So the following insn will actually be
2344 decrementing fp by STARTING_FRAME_OFFSET. */
2345 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
2347 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2348 if (fixed_regs[ARG_POINTER_REGNUM])
2350 #ifdef ELIMINABLE_REGS
2351 /* If the argument pointer can be eliminated in favor of the
2352 frame pointer, we don't need to restore it. We assume here
2353 that if such an elimination is present, it can always be used.
2354 This is the case on all known machines; if we don't make this
2355 assumption, we do unnecessary saving on many machines. */
2356 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
2359 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
2360 if (elim_regs[i].from == ARG_POINTER_REGNUM
2361 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
2364 if (i == ARRAY_SIZE (elim_regs))
2367 /* Now restore our arg pointer from the address at which it
2368 was saved in our stack frame. */
2369 emit_move_insn (virtual_incoming_args_rtx,
2370 copy_to_reg (get_arg_pointer_save_area (cfun)));
2375 #ifdef HAVE_nonlocal_goto_receiver
2376 if (HAVE_nonlocal_goto_receiver)
2377 emit_insn (gen_nonlocal_goto_receiver ());
2380 /* @@@ This is a kludge. Not all machine descriptions define a blockage
2381 insn, but we must not allow the code we just generated to be reordered
2382 by scheduling. Specifically, the update of the frame pointer must
2383 happen immediately, not later. So emit an ASM_INPUT to act as blockage
2385 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
2388 /* Warn about any unused VARS (which may contain nodes other than
2389 VAR_DECLs, but such nodes are ignored). The nodes are connected
2390 via the TREE_CHAIN field. */
2393 warn_about_unused_variables (tree vars)
2397 if (warn_unused_variable)
2398 for (decl = vars; decl; decl = TREE_CHAIN (decl))
2399 if (TREE_CODE (decl) == VAR_DECL
2400 && ! TREE_USED (decl)
2401 && ! DECL_IN_SYSTEM_HEADER (decl)
2402 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
2403 warning ("%Junused variable '%D'", decl, decl);
2406 /* Generate RTL code to terminate a binding contour.
2408 VARS is the chain of VAR_DECL nodes for the variables bound in this
2409 contour. There may actually be other nodes in this chain, but any
2410 nodes other than VAR_DECLS are ignored.
2412 MARK_ENDS is nonzero if we should put a note at the beginning
2413 and end of this binding contour.
2415 DONT_JUMP_IN is positive if it is not valid to jump into this contour,
2416 zero if we can jump into this contour only if it does not have a saved
2417 stack level, and negative if we are not to check for invalid use of
2418 labels (because the front end does that). */
2421 expand_end_bindings (tree vars, int mark_ends ATTRIBUTE_UNUSED,
2422 int dont_jump_in ATTRIBUTE_UNUSED)
2424 struct nesting *thisblock = block_stack;
2426 /* If any of the variables in this scope were not used, warn the
2428 warn_about_unused_variables (vars);
2430 if (thisblock->exit_label)
2432 do_pending_stack_adjust ();
2433 emit_label (thisblock->exit_label);
2436 /* Mark the beginning and end of the scope if requested. */
2438 /* Get rid of the beginning-mark if we don't make an end-mark. */
2439 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
2441 /* Restore the temporary level of TARGET_EXPRs. */
2442 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
2444 /* Restore block_stack level for containing block. */
2446 POPSTACK (block_stack);
2448 /* Pop the stack slot nesting and free any slots at this level. */
2452 /* Generate RTL for the automatic variable declaration DECL.
2453 (Other kinds of declarations are simply ignored if seen here.) */
2456 expand_decl (tree decl)
2460 type = TREE_TYPE (decl);
2462 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
2463 type in case this node is used in a reference. */
2464 if (TREE_CODE (decl) == CONST_DECL)
2466 DECL_MODE (decl) = TYPE_MODE (type);
2467 DECL_ALIGN (decl) = TYPE_ALIGN (type);
2468 DECL_SIZE (decl) = TYPE_SIZE (type);
2469 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
2473 /* Otherwise, only automatic variables need any expansion done. Static and
2474 external variables, and external functions, will be handled by
2475 `assemble_variable' (called from finish_decl). TYPE_DECL requires
2476 nothing. PARM_DECLs are handled in `assign_parms'. */
2477 if (TREE_CODE (decl) != VAR_DECL)
2480 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
2483 /* Create the RTL representation for the variable. */
2485 if (type == error_mark_node)
2486 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
2488 else if (DECL_SIZE (decl) == 0)
2489 /* Variable with incomplete type. */
2492 if (DECL_INITIAL (decl) == 0)
2493 /* Error message was already done; now avoid a crash. */
2494 x = gen_rtx_MEM (BLKmode, const0_rtx);
2496 /* An initializer is going to decide the size of this array.
2497 Until we know the size, represent its address with a reg. */
2498 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
2500 set_mem_attributes (x, decl, 1);
2501 SET_DECL_RTL (decl, x);
2503 else if (use_register_for_decl (decl))
2505 /* Automatic variable that can go in a register. */
2506 int unsignedp = TYPE_UNSIGNED (type);
2507 enum machine_mode reg_mode
2508 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
2510 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
2512 /* Note if the object is a user variable. */
2513 if (!DECL_ARTIFICIAL (decl))
2515 mark_user_reg (DECL_RTL (decl));
2517 /* Trust user variables which have a pointer type to really
2518 be pointers. Do not trust compiler generated temporaries
2519 as our type system is totally busted as it relates to
2520 pointer arithmetic which translates into lots of compiler
2521 generated objects with pointer types, but which are not really
2523 if (POINTER_TYPE_P (type))
2524 mark_reg_pointer (DECL_RTL (decl),
2525 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
2528 maybe_set_unchanging (DECL_RTL (decl), decl);
2531 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
2532 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
2533 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
2534 STACK_CHECK_MAX_VAR_SIZE)))
2536 /* Variable of fixed size that goes on the stack. */
2541 /* If we previously made RTL for this decl, it must be an array
2542 whose size was determined by the initializer.
2543 The old address was a register; set that register now
2544 to the proper address. */
2545 if (DECL_RTL_SET_P (decl))
2547 if (!MEM_P (DECL_RTL (decl))
2548 || !REG_P (XEXP (DECL_RTL (decl), 0)))
2550 oldaddr = XEXP (DECL_RTL (decl), 0);
2553 /* Set alignment we actually gave this decl. */
2554 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
2555 : GET_MODE_BITSIZE (DECL_MODE (decl)));
2556 DECL_USER_ALIGN (decl) = 0;
2558 x = assign_temp (decl, 1, 1, 1);
2559 set_mem_attributes (x, decl, 1);
2560 SET_DECL_RTL (decl, x);
2564 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
2565 if (addr != oldaddr)
2566 emit_move_insn (oldaddr, addr);
2570 /* Dynamic-size object: must push space on the stack. */
2572 rtx address, size, x;
2574 /* Record the stack pointer on entry to block, if have
2575 not already done so. */
2576 do_pending_stack_adjust ();
2578 /* Compute the variable's size, in bytes. This will expand any
2579 needed SAVE_EXPRs for the first time. */
2580 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
2583 /* Allocate space on the stack for the variable. Note that
2584 DECL_ALIGN says how the variable is to be aligned and we
2585 cannot use it to conclude anything about the alignment of
2587 address = allocate_dynamic_stack_space (size, NULL_RTX,
2588 TYPE_ALIGN (TREE_TYPE (decl)));
2590 /* Reference the variable indirect through that rtx. */
2591 x = gen_rtx_MEM (DECL_MODE (decl), address);
2592 set_mem_attributes (x, decl, 1);
2593 SET_DECL_RTL (decl, x);
2596 /* Indicate the alignment we actually gave this variable. */
2597 #ifdef STACK_BOUNDARY
2598 DECL_ALIGN (decl) = STACK_BOUNDARY;
2600 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
2602 DECL_USER_ALIGN (decl) = 0;
2606 /* Emit code to allocate T_SIZE bytes of dynamic stack space for ALLOC. */
2608 expand_stack_alloc (tree alloc, tree t_size)
2610 rtx address, dest, size;
2613 if (TREE_CODE (alloc) != ADDR_EXPR)
2615 var = TREE_OPERAND (alloc, 0);
2616 if (TREE_CODE (var) != VAR_DECL)
2619 type = TREE_TYPE (var);
2621 /* Compute the variable's size, in bytes. */
2622 size = expand_expr (t_size, NULL_RTX, VOIDmode, 0);
2625 /* Allocate space on the stack for the variable. */
2626 address = XEXP (DECL_RTL (var), 0);
2627 dest = allocate_dynamic_stack_space (size, address, TYPE_ALIGN (type));
2628 if (dest != address)
2629 emit_move_insn (address, dest);
2631 /* Indicate the alignment we actually gave this variable. */
2632 #ifdef STACK_BOUNDARY
2633 DECL_ALIGN (var) = STACK_BOUNDARY;
2635 DECL_ALIGN (var) = BIGGEST_ALIGNMENT;
2637 DECL_USER_ALIGN (var) = 0;
2640 /* Emit code to save the current value of stack. */
2642 expand_stack_save (void)
2646 do_pending_stack_adjust ();
2647 emit_stack_save (SAVE_BLOCK, &ret, NULL_RTX);
2651 /* Emit code to restore the current value of stack. */
2653 expand_stack_restore (tree var)
2655 rtx sa = DECL_RTL (var);
2657 emit_stack_restore (SAVE_BLOCK, sa, NULL_RTX);
2660 /* Emit code to perform the initialization of a declaration DECL. */
2663 expand_decl_init (tree decl)
2665 int was_used = TREE_USED (decl);
2667 /* If this is a CONST_DECL, we don't have to generate any code. Likewise
2668 for static decls. */
2669 if (TREE_CODE (decl) == CONST_DECL
2670 || TREE_STATIC (decl))
2673 /* Compute and store the initial value now. */
2677 if (DECL_INITIAL (decl) == error_mark_node)
2679 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
2681 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
2682 || code == POINTER_TYPE || code == REFERENCE_TYPE)
2683 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
2687 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
2689 emit_line_note (DECL_SOURCE_LOCATION (decl));
2690 expand_assignment (decl, DECL_INITIAL (decl), 0);
2694 /* Don't let the initialization count as "using" the variable. */
2695 TREE_USED (decl) = was_used;
2697 /* Free any temporaries we made while initializing the decl. */
2698 preserve_temp_slots (NULL_RTX);
2704 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
2705 DECL_ELTS is the list of elements that belong to DECL's type.
2706 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
2709 expand_anon_union_decl (tree decl, tree cleanup ATTRIBUTE_UNUSED,
2715 /* If any of the elements are addressable, so is the entire union. */
2716 for (t = decl_elts; t; t = TREE_CHAIN (t))
2717 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
2719 TREE_ADDRESSABLE (decl) = 1;
2724 x = DECL_RTL (decl);
2726 /* Go through the elements, assigning RTL to each. */
2727 for (t = decl_elts; t; t = TREE_CHAIN (t))
2729 tree decl_elt = TREE_VALUE (t);
2730 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
2732 /* If any of the elements are addressable, so is the entire
2734 if (TREE_USED (decl_elt))
2735 TREE_USED (decl) = 1;
2737 /* Propagate the union's alignment to the elements. */
2738 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
2739 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
2741 /* If the element has BLKmode and the union doesn't, the union is
2742 aligned such that the element doesn't need to have BLKmode, so
2743 change the element's mode to the appropriate one for its size. */
2744 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
2745 DECL_MODE (decl_elt) = mode
2746 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
2748 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
2749 instead create a new MEM rtx with the proper mode. */
2752 if (mode == GET_MODE (x))
2753 SET_DECL_RTL (decl_elt, x);
2755 SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
2759 if (mode == GET_MODE (x))
2760 SET_DECL_RTL (decl_elt, x);
2762 SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
2769 /* Enter a case (Pascal) or switch (C) statement.
2770 Push a block onto case_stack and nesting_stack
2771 to accumulate the case-labels that are seen
2772 and to record the labels generated for the statement.
2774 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
2775 Otherwise, this construct is transparent for `exit_something'.
2777 EXPR is the index-expression to be dispatched on.
2778 TYPE is its nominal type. We could simply convert EXPR to this type,
2779 but instead we take short cuts. */
2782 expand_start_case (int exit_flag, tree expr, tree type,
2783 const char *printname)
2785 struct nesting *thiscase = ALLOC_NESTING ();
2787 /* Make an entry on case_stack for the case we are entering. */
2789 thiscase->desc = CASE_NESTING;
2790 thiscase->next = case_stack;
2791 thiscase->all = nesting_stack;
2792 thiscase->depth = ++nesting_depth;
2793 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
2794 thiscase->data.case_stmt.case_list = 0;
2795 thiscase->data.case_stmt.index_expr = expr;
2796 thiscase->data.case_stmt.nominal_type = type;
2797 thiscase->data.case_stmt.default_label = 0;
2798 thiscase->data.case_stmt.printname = printname;
2799 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
2800 case_stack = thiscase;
2801 nesting_stack = thiscase;
2803 do_pending_stack_adjust ();
2806 /* Make sure case_stmt.start points to something that won't
2807 need any transformation before expand_end_case. */
2808 if (!NOTE_P (get_last_insn ()))
2809 emit_note (NOTE_INSN_DELETED);
2811 thiscase->data.case_stmt.start = get_last_insn ();
2814 /* Accumulate one case or default label inside a case or switch statement.
2815 VALUE is the value of the case (a null pointer, for a default label).
2816 The function CONVERTER, when applied to arguments T and V,
2817 converts the value V to the type T.
2819 If not currently inside a case or switch statement, return 1 and do
2820 nothing. The caller will print a language-specific error message.
2821 If VALUE is a duplicate or overlaps, return 2 and do nothing
2822 except store the (first) duplicate node in *DUPLICATE.
2823 If VALUE is out of range, return 3 and do nothing.
2824 Return 0 on success.
2826 Extended to handle range statements. */
2829 pushcase (tree value, tree (*converter) (tree, tree), tree label,
2835 /* Fail if not inside a real case statement. */
2836 if (! (case_stack && case_stack->data.case_stmt.start))
2839 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
2840 nominal_type = case_stack->data.case_stmt.nominal_type;
2842 /* If the index is erroneous, avoid more problems: pretend to succeed. */
2843 if (index_type == error_mark_node)
2846 /* Convert VALUE to the type in which the comparisons are nominally done. */
2848 value = (*converter) (nominal_type, value);
2850 /* Fail if this value is out of range for the actual type of the index
2851 (which may be narrower than NOMINAL_TYPE). */
2853 && (TREE_CONSTANT_OVERFLOW (value)
2854 || ! int_fits_type_p (value, index_type)))
2857 return add_case_node (value, value, label, duplicate, false);
2860 /* Like pushcase but this case applies to all values between VALUE1 and
2861 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
2862 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
2863 starts at VALUE1 and ends at the highest value of the index type.
2864 If both are NULL, this case applies to all values.
2866 The return value is the same as that of pushcase but there is one
2867 additional error code: 4 means the specified range was empty. */
2870 pushcase_range (tree value1, tree value2, tree (*converter) (tree, tree),
2871 tree label, tree *duplicate)
2876 /* Fail if not inside a real case statement. */
2877 if (! (case_stack && case_stack->data.case_stmt.start))
2880 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
2881 nominal_type = case_stack->data.case_stmt.nominal_type;
2883 /* If the index is erroneous, avoid more problems: pretend to succeed. */
2884 if (index_type == error_mark_node)
2887 /* Convert VALUEs to type in which the comparisons are nominally done
2888 and replace any unspecified value with the corresponding bound. */
2890 value1 = TYPE_MIN_VALUE (index_type);
2892 value2 = TYPE_MAX_VALUE (index_type);
2894 /* Fail if the range is empty. Do this before any conversion since
2895 we want to allow out-of-range empty ranges. */
2896 if (value2 != 0 && tree_int_cst_lt (value2, value1))
2899 /* If the max was unbounded, use the max of the nominal_type we are
2900 converting to. Do this after the < check above to suppress false
2903 value2 = TYPE_MAX_VALUE (nominal_type);
2905 value1 = (*converter) (nominal_type, value1);
2906 value2 = (*converter) (nominal_type, value2);
2908 /* Fail if these values are out of range. */
2909 if (TREE_CONSTANT_OVERFLOW (value1)
2910 || ! int_fits_type_p (value1, index_type))
2913 if (TREE_CONSTANT_OVERFLOW (value2)
2914 || ! int_fits_type_p (value2, index_type))
2917 return add_case_node (value1, value2, label, duplicate, false);
2920 /* Do the actual insertion of a case label for pushcase and pushcase_range
2921 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
2922 slowdown for large switch statements. */
2925 add_case_node (tree low, tree high, tree label, tree *duplicate,
2926 bool dont_expand_label)
2928 struct case_node *p, **q, *r;
2930 /* If there's no HIGH value, then this is not a case range; it's
2931 just a simple case label. But that's just a degenerate case
2936 /* Handle default labels specially. */
2939 if (case_stack->data.case_stmt.default_label != 0)
2941 *duplicate = case_stack->data.case_stmt.default_label;
2944 case_stack->data.case_stmt.default_label = label;
2945 if (!dont_expand_label)
2946 expand_label (label);
2950 q = &case_stack->data.case_stmt.case_list;
2957 /* Keep going past elements distinctly greater than HIGH. */
2958 if (tree_int_cst_lt (high, p->low))
2961 /* or distinctly less than LOW. */
2962 else if (tree_int_cst_lt (p->high, low))
2967 /* We have an overlap; this is an error. */
2968 *duplicate = p->code_label;
2973 /* Add this label to the chain, and succeed. */
2975 r = ggc_alloc (sizeof (struct case_node));
2978 /* If the bounds are equal, turn this into the one-value case. */
2979 if (tree_int_cst_equal (low, high))
2984 r->code_label = label;
2985 if (!dont_expand_label)
2986 expand_label (label);
2996 struct case_node *s;
3002 if (! (b = p->balance))
3003 /* Growth propagation from left side. */
3010 if ((p->left = s = r->right))
3019 if ((r->parent = s))
3027 case_stack->data.case_stmt.case_list = r;
3030 /* r->balance == +1 */
3035 struct case_node *t = r->right;
3037 if ((p->left = s = t->right))
3041 if ((r->right = s = t->left))
3055 if ((t->parent = s))
3063 case_stack->data.case_stmt.case_list = t;
3070 /* p->balance == +1; growth of left side balances the node. */
3080 if (! (b = p->balance))
3081 /* Growth propagation from right side. */
3089 if ((p->right = s = r->left))
3097 if ((r->parent = s))
3106 case_stack->data.case_stmt.case_list = r;
3110 /* r->balance == -1 */
3114 struct case_node *t = r->left;
3116 if ((p->right = s = t->left))
3121 if ((r->left = s = t->right))
3135 if ((t->parent = s))
3144 case_stack->data.case_stmt.case_list = t;
3150 /* p->balance == -1; growth of right side balances the node. */
3163 /* Maximum number of case bit tests. */
3164 #define MAX_CASE_BIT_TESTS 3
3166 /* By default, enable case bit tests on targets with ashlsi3. */
3167 #ifndef CASE_USE_BIT_TESTS
3168 #define CASE_USE_BIT_TESTS (ashl_optab->handlers[word_mode].insn_code \
3169 != CODE_FOR_nothing)
3173 /* A case_bit_test represents a set of case nodes that may be
3174 selected from using a bit-wise comparison. HI and LO hold
3175 the integer to be tested against, LABEL contains the label
3176 to jump to upon success and BITS counts the number of case
3177 nodes handled by this test, typically the number of bits
3180 struct case_bit_test
3188 /* Determine whether "1 << x" is relatively cheap in word_mode. */
3191 bool lshift_cheap_p (void)
3193 static bool init = false;
3194 static bool cheap = true;
3198 rtx reg = gen_rtx_REG (word_mode, 10000);
3199 int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET);
3200 cheap = cost < COSTS_N_INSNS (3);
3207 /* Comparison function for qsort to order bit tests by decreasing
3208 number of case nodes, i.e. the node with the most cases gets
3212 case_bit_test_cmp (const void *p1, const void *p2)
3214 const struct case_bit_test *d1 = p1;
3215 const struct case_bit_test *d2 = p2;
3217 return d2->bits - d1->bits;
3220 /* Expand a switch statement by a short sequence of bit-wise
3221 comparisons. "switch(x)" is effectively converted into
3222 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
3225 INDEX_EXPR is the value being switched on, which is of
3226 type INDEX_TYPE. MINVAL is the lowest case value of in
3227 the case nodes, of INDEX_TYPE type, and RANGE is highest
3228 value minus MINVAL, also of type INDEX_TYPE. NODES is
3229 the set of case nodes, and DEFAULT_LABEL is the label to
3230 branch to should none of the cases match.
3232 There *MUST* be MAX_CASE_BIT_TESTS or less unique case
3236 emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
3237 tree range, case_node_ptr nodes, rtx default_label)
3239 struct case_bit_test test[MAX_CASE_BIT_TESTS];
3240 enum machine_mode mode;
3241 rtx expr, index, label;
3242 unsigned int i,j,lo,hi;
3243 struct case_node *n;
3247 for (n = nodes; n; n = n->right)
3249 label = label_rtx (n->code_label);
3250 for (i = 0; i < count; i++)
3251 if (same_case_target_p (label, test[i].label))
3256 if (count >= MAX_CASE_BIT_TESTS)
3260 test[i].label = label;
3267 lo = tree_low_cst (fold (build (MINUS_EXPR, index_type,
3268 n->low, minval)), 1);
3269 hi = tree_low_cst (fold (build (MINUS_EXPR, index_type,
3270 n->high, minval)), 1);
3271 for (j = lo; j <= hi; j++)
3272 if (j >= HOST_BITS_PER_WIDE_INT)
3273 test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
3275 test[i].lo |= (HOST_WIDE_INT) 1 << j;
3278 qsort (test, count, sizeof(*test), case_bit_test_cmp);
3280 index_expr = fold (build (MINUS_EXPR, index_type,
3281 convert (index_type, index_expr),
3282 convert (index_type, minval)));
3283 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
3285 index = protect_from_queue (index, 0);
3286 do_pending_stack_adjust ();
3288 mode = TYPE_MODE (index_type);
3289 expr = expand_expr (range, NULL_RTX, VOIDmode, 0);
3290 emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
3293 index = convert_to_mode (word_mode, index, 0);
3294 index = expand_binop (word_mode, ashl_optab, const1_rtx,
3295 index, NULL_RTX, 1, OPTAB_WIDEN);
3297 for (i = 0; i < count; i++)
3299 expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
3300 expr = expand_binop (word_mode, and_optab, index, expr,
3301 NULL_RTX, 1, OPTAB_WIDEN);
3302 emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
3303 word_mode, 1, test[i].label);
3306 emit_jump (default_label);
3310 #define HAVE_casesi 0
3313 #ifndef HAVE_tablejump
3314 #define HAVE_tablejump 0
3317 /* Terminate a case (Pascal) or switch (C) statement
3318 in which ORIG_INDEX is the expression to be tested.
3319 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
3320 type as given in the source before any compiler conversions.
3321 Generate the code to test it and jump to the right place. */
3324 expand_end_case_type (tree orig_index, tree orig_type)
3326 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
3327 rtx default_label = 0;
3328 struct case_node *n, *m;
3329 unsigned int count, uniq;
3335 rtx before_case, end, lab;
3336 struct nesting *thiscase = case_stack;
3337 tree index_expr, index_type;
3338 bool exit_done = false;
3341 /* Don't crash due to previous errors. */
3342 if (thiscase == NULL)
3345 index_expr = thiscase->data.case_stmt.index_expr;
3346 index_type = TREE_TYPE (index_expr);
3347 unsignedp = TYPE_UNSIGNED (index_type);
3348 if (orig_type == NULL)
3349 orig_type = TREE_TYPE (orig_index);
3351 do_pending_stack_adjust ();
3353 /* An ERROR_MARK occurs for various reasons including invalid data type. */
3354 if (index_type != error_mark_node)
3356 /* If we don't have a default-label, create one here,
3357 after the body of the switch. */
3358 if (thiscase->data.case_stmt.default_label == 0)
3360 thiscase->data.case_stmt.default_label
3361 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
3362 /* Share the exit label if possible. */
3363 if (thiscase->exit_label)
3365 SET_DECL_RTL (thiscase->data.case_stmt.default_label,
3366 thiscase->exit_label);
3369 expand_label (thiscase->data.case_stmt.default_label);
3371 default_label = label_rtx (thiscase->data.case_stmt.default_label);
3373 before_case = get_last_insn ();
3375 if (thiscase->data.case_stmt.case_list
3376 && thiscase->data.case_stmt.case_list->left)
3377 thiscase->data.case_stmt.case_list
3378 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
3380 /* Simplify the case-list before we count it. */
3381 group_case_nodes (thiscase->data.case_stmt.case_list);
3382 strip_default_case_nodes (&thiscase->data.case_stmt.case_list,
3385 /* Get upper and lower bounds of case values.
3386 Also convert all the case values to the index expr's data type. */
3390 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3392 /* Check low and high label values are integers. */
3393 if (TREE_CODE (n->low) != INTEGER_CST)
3395 if (TREE_CODE (n->high) != INTEGER_CST)
3398 n->low = convert (index_type, n->low);
3399 n->high = convert (index_type, n->high);
3401 /* Count the elements and track the largest and smallest
3402 of them (treating them as signed even if they are not). */
3410 if (INT_CST_LT (n->low, minval))
3412 if (INT_CST_LT (maxval, n->high))
3415 /* A range counts double, since it requires two compares. */
3416 if (! tree_int_cst_equal (n->low, n->high))
3419 /* Count the number of unique case node targets. */
3421 lab = label_rtx (n->code_label);
3422 for (m = thiscase->data.case_stmt.case_list; m != n; m = m->right)
3423 if (same_case_target_p (label_rtx (m->code_label), lab))
3430 /* Compute span of values. */
3432 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
3436 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
3438 emit_jump (default_label);
3441 /* Try implementing this switch statement by a short sequence of
3442 bit-wise comparisons. However, we let the binary-tree case
3443 below handle constant index expressions. */
3444 else if (CASE_USE_BIT_TESTS
3445 && ! TREE_CONSTANT (index_expr)
3446 && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
3447 && compare_tree_int (range, 0) > 0
3448 && lshift_cheap_p ()
3449 && ((uniq == 1 && count >= 3)
3450 || (uniq == 2 && count >= 5)
3451 || (uniq == 3 && count >= 6)))
3453 /* Optimize the case where all the case values fit in a
3454 word without having to subtract MINVAL. In this case,
3455 we can optimize away the subtraction. */
3456 if (compare_tree_int (minval, 0) > 0
3457 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
3459 minval = integer_zero_node;
3462 emit_case_bit_tests (index_type, index_expr, minval, range,
3463 thiscase->data.case_stmt.case_list,
3467 /* If range of values is much bigger than number of values,
3468 make a sequence of conditional branches instead of a dispatch.
3469 If the switch-index is a constant, do it this way
3470 because we can optimize it. */
3472 else if (count < case_values_threshold ()
3473 || compare_tree_int (range,
3474 (optimize_size ? 3 : 10) * count) > 0
3475 /* RANGE may be signed, and really large ranges will show up
3476 as negative numbers. */
3477 || compare_tree_int (range, 0) < 0
3478 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
3481 || TREE_CONSTANT (index_expr)
3482 /* If neither casesi or tablejump is available, we can
3483 only go this way. */
3484 || (!HAVE_casesi && !HAVE_tablejump))
3486 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
3488 /* If the index is a short or char that we do not have
3489 an insn to handle comparisons directly, convert it to
3490 a full integer now, rather than letting each comparison
3491 generate the conversion. */
3493 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
3494 && ! have_insn_for (COMPARE, GET_MODE (index)))
3496 enum machine_mode wider_mode;
3497 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
3498 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
3499 if (have_insn_for (COMPARE, wider_mode))
3501 index = convert_to_mode (wider_mode, index, unsignedp);
3507 do_pending_stack_adjust ();
3509 index = protect_from_queue (index, 0);
3511 index = copy_to_reg (index);
3512 if (GET_CODE (index) == CONST_INT
3513 || TREE_CODE (index_expr) == INTEGER_CST)
3515 /* Make a tree node with the proper constant value
3516 if we don't already have one. */
3517 if (TREE_CODE (index_expr) != INTEGER_CST)
3520 = build_int_2 (INTVAL (index),
3521 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
3522 index_expr = convert (index_type, index_expr);
3525 /* For constant index expressions we need only
3526 issue an unconditional branch to the appropriate
3527 target code. The job of removing any unreachable
3528 code is left to the optimization phase if the
3529 "-O" option is specified. */
3530 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3531 if (! tree_int_cst_lt (index_expr, n->low)
3532 && ! tree_int_cst_lt (n->high, index_expr))
3536 emit_jump (label_rtx (n->code_label));
3538 emit_jump (default_label);
3542 /* If the index expression is not constant we generate
3543 a binary decision tree to select the appropriate
3544 target code. This is done as follows:
3546 The list of cases is rearranged into a binary tree,
3547 nearly optimal assuming equal probability for each case.
3549 The tree is transformed into RTL, eliminating
3550 redundant test conditions at the same time.
3552 If program flow could reach the end of the
3553 decision tree an unconditional jump to the
3554 default code is emitted. */
3557 = (TREE_CODE (orig_type) != ENUMERAL_TYPE
3558 && estimate_case_costs (thiscase->data.case_stmt.case_list));
3559 balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
3560 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
3561 default_label, index_type);
3562 emit_jump_if_reachable (default_label);
3567 table_label = gen_label_rtx ();
3568 if (! try_casesi (index_type, index_expr, minval, range,
3569 table_label, default_label))
3571 index_type = thiscase->data.case_stmt.nominal_type;
3573 /* Index jumptables from zero for suitable values of
3574 minval to avoid a subtraction. */
3576 && compare_tree_int (minval, 0) > 0
3577 && compare_tree_int (minval, 3) < 0)
3579 minval = integer_zero_node;
3583 if (! try_tablejump (index_type, index_expr, minval, range,
3584 table_label, default_label))
3588 /* Get table of labels to jump to, in order of case index. */
3590 ncases = tree_low_cst (range, 0) + 1;
3591 labelvec = alloca (ncases * sizeof (rtx));
3592 memset (labelvec, 0, ncases * sizeof (rtx));
3594 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3596 /* Compute the low and high bounds relative to the minimum
3597 value since that should fit in a HOST_WIDE_INT while the
3598 actual values may not. */
3600 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
3601 n->low, minval)), 1);
3602 HOST_WIDE_INT i_high
3603 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
3604 n->high, minval)), 1);
3607 for (i = i_low; i <= i_high; i ++)
3609 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
3612 /* Fill in the gaps with the default. */
3613 for (i = 0; i < ncases; i++)
3614 if (labelvec[i] == 0)
3615 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
3617 /* Output the table. */
3618 emit_label (table_label);
3620 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
3621 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
3622 gen_rtx_LABEL_REF (Pmode, table_label),
3623 gen_rtvec_v (ncases, labelvec),
3624 const0_rtx, const0_rtx));
3626 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
3627 gen_rtvec_v (ncases, labelvec)));
3629 /* If the case insn drops through the table,
3630 after the table we must jump to the default-label.
3631 Otherwise record no drop-through after the table. */
3632 #ifdef CASE_DROPS_THROUGH
3633 emit_jump (default_label);
3639 before_case = NEXT_INSN (before_case);
3640 end = get_last_insn ();
3641 if (squeeze_notes (&before_case, &end))
3643 reorder_insns (before_case, end,
3644 thiscase->data.case_stmt.start);
3647 if (thiscase->exit_label && !exit_done)
3648 emit_label (thiscase->exit_label);
3650 POPSTACK (case_stack);
3655 /* Convert the tree NODE into a list linked by the right field, with the left
3656 field zeroed. RIGHT is used for recursion; it is a list to be placed
3657 rightmost in the resulting list. */
3659 static struct case_node *
3660 case_tree2list (struct case_node *node, struct case_node *right)
3662 struct case_node *left;
3665 right = case_tree2list (node->right, right);
3667 node->right = right;
3668 if ((left = node->left))
3671 return case_tree2list (left, node);
3677 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
3680 do_jump_if_equal (rtx op1, rtx op2, rtx label, int unsignedp)
3682 if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
3688 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
3689 (GET_MODE (op1) == VOIDmode
3690 ? GET_MODE (op2) : GET_MODE (op1)),
3694 /* Not all case values are encountered equally. This function
3695 uses a heuristic to weight case labels, in cases where that
3696 looks like a reasonable thing to do.
3698 Right now, all we try to guess is text, and we establish the
3701 chars above space: 16
3710 If we find any cases in the switch that are not either -1 or in the range
3711 of valid ASCII characters, or are control characters other than those
3712 commonly used with "\", don't treat this switch scanning text.
3714 Return 1 if these nodes are suitable for cost estimation, otherwise
3718 estimate_case_costs (case_node_ptr node)
3720 tree min_ascii = integer_minus_one_node;
3721 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
3725 /* If we haven't already made the cost table, make it now. Note that the
3726 lower bound of the table is -1, not zero. */
3728 if (! cost_table_initialized)
3730 cost_table_initialized = 1;
3732 for (i = 0; i < 128; i++)
3735 COST_TABLE (i) = 16;
3736 else if (ISPUNCT (i))
3738 else if (ISCNTRL (i))
3739 COST_TABLE (i) = -1;
3742 COST_TABLE (' ') = 8;
3743 COST_TABLE ('\t') = 4;
3744 COST_TABLE ('\0') = 4;
3745 COST_TABLE ('\n') = 2;
3746 COST_TABLE ('\f') = 1;
3747 COST_TABLE ('\v') = 1;
3748 COST_TABLE ('\b') = 1;
3751 /* See if all the case expressions look like text. It is text if the
3752 constant is >= -1 and the highest constant is <= 127. Do all comparisons
3753 as signed arithmetic since we don't want to ever access cost_table with a
3754 value less than -1. Also check that none of the constants in a range
3755 are strange control characters. */
3757 for (n = node; n; n = n->right)
3759 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
3762 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
3763 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
3764 if (COST_TABLE (i) < 0)
3768 /* All interesting values are within the range of interesting
3769 ASCII characters. */
3773 /* Determine whether two case labels branch to the same target. */
3776 same_case_target_p (rtx l1, rtx l2)
3784 i1 = next_real_insn (l1);
3785 i2 = next_real_insn (l2);
3789 if (i1 && simplejump_p (i1))
3791 l1 = XEXP (SET_SRC (PATTERN (i1)), 0);
3794 if (i2 && simplejump_p (i2))
3796 l2 = XEXP (SET_SRC (PATTERN (i2)), 0);
3799 /* When coming from gimple, we usually won't have emitted either
3800 the labels or the body of the switch statement. The job being
3801 done here should be done via jump threading at the tree level.
3802 Cases that go the same place should have the same label. */
3806 /* Delete nodes that branch to the default label from a list of
3807 case nodes. Eg. case 5: default: becomes just default: */
3810 strip_default_case_nodes (case_node_ptr *prev, rtx deflab)
3817 if (same_case_target_p (label_rtx (ptr->code_label), deflab))
3824 /* Scan an ordered list of case nodes
3825 combining those with consecutive values or ranges.
3827 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
3830 group_case_nodes (case_node_ptr head)
3832 case_node_ptr node = head;
3837 case_node_ptr np = node;
3839 lab = label_rtx (node->code_label);
3841 /* Try to group the successors of NODE with NODE. */
3842 while (((np = np->right) != 0)
3843 /* Do they jump to the same place? */
3844 && same_case_target_p (label_rtx (np->code_label), lab)
3845 /* Are their ranges consecutive? */
3846 && tree_int_cst_equal (np->low,
3847 fold (build (PLUS_EXPR,
3848 TREE_TYPE (node->high),
3851 /* An overflow is not consecutive. */
3852 && tree_int_cst_lt (node->high,
3853 fold (build (PLUS_EXPR,
3854 TREE_TYPE (node->high),
3856 integer_one_node))))
3858 node->high = np->high;
3860 /* NP is the first node after NODE which can't be grouped with it.
3861 Delete the nodes in between, and move on to that node. */
3867 /* Take an ordered list of case nodes
3868 and transform them into a near optimal binary tree,
3869 on the assumption that any target code selection value is as
3870 likely as any other.
3872 The transformation is performed by splitting the ordered
3873 list into two equal sections plus a pivot. The parts are
3874 then attached to the pivot as left and right branches. Each
3875 branch is then transformed recursively. */
3878 balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
3891 /* Count the number of entries on branch. Also count the ranges. */
3895 if (!tree_int_cst_equal (np->low, np->high))
3899 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
3903 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
3911 /* Split this list if it is long enough for that to help. */
3916 /* Find the place in the list that bisects the list's total cost,
3917 Here I gets half the total cost. */
3922 /* Skip nodes while their cost does not reach that amount. */
3923 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
3924 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
3925 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
3928 npp = &(*npp)->right;
3933 /* Leave this branch lopsided, but optimize left-hand
3934 side and fill in `parent' fields for right-hand side. */
3936 np->parent = parent;
3937 balance_case_nodes (&np->left, np);
3938 for (; np->right; np = np->right)
3939 np->right->parent = np;
3943 /* If there are just three nodes, split at the middle one. */
3945 npp = &(*npp)->right;
3948 /* Find the place in the list that bisects the list's total cost,
3949 where ranges count as 2.
3950 Here I gets half the total cost. */
3951 i = (i + ranges + 1) / 2;
3954 /* Skip nodes while their cost does not reach that amount. */
3955 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
3960 npp = &(*npp)->right;
3965 np->parent = parent;
3968 /* Optimize each of the two split parts. */
3969 balance_case_nodes (&np->left, np);
3970 balance_case_nodes (&np->right, np);
3974 /* Else leave this branch as one level,
3975 but fill in `parent' fields. */
3977 np->parent = parent;
3978 for (; np->right; np = np->right)
3979 np->right->parent = np;
3984 /* Search the parent sections of the case node tree
3985 to see if a test for the lower bound of NODE would be redundant.
3986 INDEX_TYPE is the type of the index expression.
3988 The instructions to generate the case decision tree are
3989 output in the same order as nodes are processed so it is
3990 known that if a parent node checks the range of the current
3991 node minus one that the current node is bounded at its lower
3992 span. Thus the test would be redundant. */
3995 node_has_low_bound (case_node_ptr node, tree index_type)
3998 case_node_ptr pnode;
4000 /* If the lower bound of this node is the lowest value in the index type,
4001 we need not test it. */
4003 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
4006 /* If this node has a left branch, the value at the left must be less
4007 than that at this node, so it cannot be bounded at the bottom and
4008 we need not bother testing any further. */
4013 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
4014 node->low, integer_one_node));
4016 /* If the subtraction above overflowed, we can't verify anything.
4017 Otherwise, look for a parent that tests our value - 1. */
4019 if (! tree_int_cst_lt (low_minus_one, node->low))
4022 for (pnode = node->parent; pnode; pnode = pnode->parent)
4023 if (tree_int_cst_equal (low_minus_one, pnode->high))
4029 /* Search the parent sections of the case node tree
4030 to see if a test for the upper bound of NODE would be redundant.
4031 INDEX_TYPE is the type of the index expression.
4033 The instructions to generate the case decision tree are
4034 output in the same order as nodes are processed so it is
4035 known that if a parent node checks the range of the current
4036 node plus one that the current node is bounded at its upper
4037 span. Thus the test would be redundant. */
4040 node_has_high_bound (case_node_ptr node, tree index_type)
4043 case_node_ptr pnode;
4045 /* If there is no upper bound, obviously no test is needed. */
4047 if (TYPE_MAX_VALUE (index_type) == NULL)
4050 /* If the upper bound of this node is the highest value in the type
4051 of the index expression, we need not test against it. */
4053 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
4056 /* If this node has a right branch, the value at the right must be greater
4057 than that at this node, so it cannot be bounded at the top and
4058 we need not bother testing any further. */
4063 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
4064 node->high, integer_one_node));
4066 /* If the addition above overflowed, we can't verify anything.
4067 Otherwise, look for a parent that tests our value + 1. */
4069 if (! tree_int_cst_lt (node->high, high_plus_one))
4072 for (pnode = node->parent; pnode; pnode = pnode->parent)
4073 if (tree_int_cst_equal (high_plus_one, pnode->low))
4079 /* Search the parent sections of the
4080 case node tree to see if both tests for the upper and lower
4081 bounds of NODE would be redundant. */
4084 node_is_bounded (case_node_ptr node, tree index_type)
4086 return (node_has_low_bound (node, index_type)
4087 && node_has_high_bound (node, index_type));
4090 /* Emit an unconditional jump to LABEL unless it would be dead code. */
4093 emit_jump_if_reachable (rtx label)
4095 if (!BARRIER_P (get_last_insn ()))
4099 /* Emit step-by-step code to select a case for the value of INDEX.
4100 The thus generated decision tree follows the form of the
4101 case-node binary tree NODE, whose nodes represent test conditions.
4102 INDEX_TYPE is the type of the index of the switch.
4104 Care is taken to prune redundant tests from the decision tree
4105 by detecting any boundary conditions already checked by
4106 emitted rtx. (See node_has_high_bound, node_has_low_bound
4107 and node_is_bounded, above.)
4109 Where the test conditions can be shown to be redundant we emit
4110 an unconditional jump to the target code. As a further
4111 optimization, the subordinates of a tree node are examined to
4112 check for bounded nodes. In this case conditional and/or
4113 unconditional jumps as a result of the boundary check for the
4114 current node are arranged to target the subordinates associated
4115 code for out of bound conditions on the current node.
4117 We can assume that when control reaches the code generated here,
4118 the index value has already been compared with the parents
4119 of this node, and determined to be on the same side of each parent
4120 as this node is. Thus, if this node tests for the value 51,
4121 and a parent tested for 52, we don't need to consider
4122 the possibility of a value greater than 51. If another parent
4123 tests for the value 50, then this node need not test anything. */
4126 emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
4129 /* If INDEX has an unsigned type, we must make unsigned branches. */
4130 int unsignedp = TYPE_UNSIGNED (index_type);
4131 enum machine_mode mode = GET_MODE (index);
4132 enum machine_mode imode = TYPE_MODE (index_type);
4134 /* See if our parents have already tested everything for us.
4135 If they have, emit an unconditional jump for this node. */
4136 if (node_is_bounded (node, index_type))
4137 emit_jump (label_rtx (node->code_label));
4139 else if (tree_int_cst_equal (node->low, node->high))
4141 /* Node is single valued. First see if the index expression matches
4142 this node and then check our children, if any. */
4144 do_jump_if_equal (index,
4145 convert_modes (mode, imode,
4146 expand_expr (node->low, NULL_RTX,
4149 label_rtx (node->code_label), unsignedp);
4151 if (node->right != 0 && node->left != 0)
4153 /* This node has children on both sides.
4154 Dispatch to one side or the other
4155 by comparing the index value with this node's value.
4156 If one subtree is bounded, check that one first,
4157 so we can avoid real branches in the tree. */
4159 if (node_is_bounded (node->right, index_type))
4161 emit_cmp_and_jump_insns (index,
4164 expand_expr (node->high, NULL_RTX,
4167 GT, NULL_RTX, mode, unsignedp,
4168 label_rtx (node->right->code_label));
4169 emit_case_nodes (index, node->left, default_label, index_type);
4172 else if (node_is_bounded (node->left, index_type))
4174 emit_cmp_and_jump_insns (index,