1 /* Expands front end tree to back end RTL for GCC
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
38 #include "coretypes.h"
47 #include "insn-config.h"
50 #include "hard-reg-set.h"
57 #include "langhooks.h"
63 /* Functions and data structures for expanding case statements. */
65 /* Case label structure, used to hold info on labels within case
66 statements. We handle "range" labels; for a single-value label
67 as in C, the high and low limits are the same.
69 We start with a vector of case nodes sorted in ascending order, and
70 the default label as the last element in the vector. Before expanding
71 to RTL, we transform this vector into a list linked via the RIGHT
72 fields in the case_node struct. Nodes with higher case values are
75 Switch statements can be output in three forms. A branch table is
76 used if there are more than a few labels and the labels are dense
77 within the range between the smallest and largest case value. If a
78 branch table is used, no further manipulations are done with the case
81 The alternative to the use of a branch table is to generate a series
82 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
83 and PARENT fields to hold a binary tree. Initially the tree is
84 totally unbalanced, with everything on the right. We balance the tree
85 with nodes on the left having lower case values than the parent
86 and nodes on the right having higher values. We then output the tree
89 For very small, suitable switch statements, we can generate a series
90 of simple bit test and branches instead. */
92 struct case_node GTY(())
94 struct case_node *left; /* Left son in binary tree */
95 struct case_node *right; /* Right son in binary tree; also node chain */
96 struct case_node *parent; /* Parent of node in binary tree */
97 tree low; /* Lowest index value for this label */
98 tree high; /* Highest index value for this label */
99 tree code_label; /* Label to jump to when node matches */
102 typedef struct case_node case_node;
103 typedef struct case_node *case_node_ptr;
105 /* These are used by estimate_case_costs and balance_case_nodes. */
107 /* This must be a signed type, and non-ANSI compilers lack signed char. */
108 static short cost_table_[129];
109 static int use_cost_table;
110 static int cost_table_initialized;
112 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
114 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
116 /* Stack of control and binding constructs we are currently inside.
118 These constructs begin when you call `expand_start_WHATEVER'
119 and end when you call `expand_end_WHATEVER'. This stack records
120 info about how the construct began that tells the end-function
121 what to do. It also may provide information about the construct
122 to alter the behavior of other constructs within the body.
123 For example, they may affect the behavior of C `break' and `continue'.
125 Each construct gets one `struct nesting' object.
126 All of these objects are chained through the `all' field.
127 `nesting_stack' points to the first object (innermost construct).
128 The position of an entry on `nesting_stack' is in its `depth' field.
130 Each type of construct has its own individual stack.
131 For example, loops have `cond_stack'. Each object points to the
132 next object of the same type through the `next' field.
134 Some constructs are visible to `break' exit-statements and others
135 are not. Which constructs are visible depends on the language.
136 Therefore, the data structure allows each construct to be visible
137 or not, according to the args given when the construct is started.
138 The construct is visible if the `exit_label' field is non-null.
139 In that case, the value should be a CODE_LABEL rtx. */
141 struct nesting GTY(())
144 struct nesting *next;
154 /* For conds (if-then and if-then-else statements). */
157 /* Label for the end of the if construct.
158 There is none if EXITFLAG was not set
159 and no `else' has been seen yet. */
161 /* Label for the end of this alternative.
162 This may be the end of the if or the next else/elseif. */
164 } GTY ((tag ("COND_NESTING"))) cond;
165 /* For variable binding contours. */
168 /* Sequence number of this binding contour within the function,
169 in order of entry. */
170 int block_start_count;
171 /* The NOTE that starts this contour.
172 Used by expand_goto to check whether the destination
173 is within each contour or not. */
175 /* The saved target_temp_slot_level from our outer block.
176 We may reset target_temp_slot_level to be the level of
177 this block, if that is done, target_temp_slot_level
178 reverts to the saved target_temp_slot_level at the very
180 int block_target_temp_slot_level;
181 } GTY ((tag ("BLOCK_NESTING"))) block;
182 /* For switch (C) or case (Pascal) statements. */
185 /* The insn after which the case dispatch should finally
186 be emitted. Zero for a dummy. */
188 /* A list of case labels; it is first built as an AVL tree.
189 During expand_end_case, this is converted to a list, and may be
190 rearranged into a nearly balanced binary tree. */
191 struct case_node *case_list;
192 /* Label to jump to if no case matches. */
194 /* The expression to be dispatched on. */
196 } GTY ((tag ("CASE_NESTING"))) case_stmt;
197 } GTY ((desc ("%1.desc"))) data;
200 /* Allocate and return a new `struct nesting'. */
202 #define ALLOC_NESTING() ggc_alloc (sizeof (struct nesting))
204 /* Pop the nesting stack element by element until we pop off
205 the element which is at the top of STACK.
206 Update all the other stacks, popping off elements from them
207 as we pop them from nesting_stack. */
209 #define POPSTACK(STACK) \
210 do { struct nesting *target = STACK; \
211 struct nesting *this; \
212 do { this = nesting_stack; \
213 if (cond_stack == this) \
214 cond_stack = cond_stack->next; \
215 if (block_stack == this) \
216 block_stack = block_stack->next; \
217 if (case_stack == this) \
218 case_stack = case_stack->next; \
219 nesting_depth = nesting_stack->depth - 1; \
220 nesting_stack = this->all; } \
221 while (this != target); } while (0)
224 struct stmt_status GTY(())
226 /* Chain of all pending binding contours. */
227 struct nesting * x_block_stack;
229 /* If any new stacks are added here, add them to POPSTACKS too. */
231 /* Chain of all pending conditional statements. */
232 struct nesting * x_cond_stack;
234 /* Chain of all pending case or switch statements. */
235 struct nesting * x_case_stack;
237 /* Separate chain including all of the above,
238 chained through the `all' field. */
239 struct nesting * x_nesting_stack;
241 /* Number of entries on nesting_stack now. */
244 /* Number of binding contours started so far in this function. */
245 int x_block_start_count;
247 /* Location of last line-number note, whether we actually
248 emitted it or not. */
249 location_t x_emit_locus;
252 #define block_stack (cfun->stmt->x_block_stack)
253 #define cond_stack (cfun->stmt->x_cond_stack)
254 #define case_stack (cfun->stmt->x_case_stack)
255 #define nesting_stack (cfun->stmt->x_nesting_stack)
256 #define nesting_depth (cfun->stmt->x_nesting_depth)
257 #define current_block_start_count (cfun->stmt->x_block_start_count)
258 #define emit_locus (cfun->stmt->x_emit_locus)
260 static int n_occurrences (int, const char *);
261 static bool decl_conflicts_with_clobbers_p (tree, const HARD_REG_SET);
262 static void expand_nl_goto_receiver (void);
263 static bool check_operand_nalternatives (tree, tree);
264 static bool check_unique_operand_names (tree, tree);
265 static char *resolve_operand_name_1 (char *, tree, tree);
266 static void expand_null_return_1 (void);
267 static rtx shift_return_value (rtx);
268 static void expand_value_return (rtx);
269 static void do_jump_if_equal (rtx, rtx, rtx, int);
270 static int estimate_case_costs (case_node_ptr);
271 static bool same_case_target_p (rtx, rtx);
272 static bool lshift_cheap_p (void);
273 static int case_bit_test_cmp (const void *, const void *);
274 static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
275 static void balance_case_nodes (case_node_ptr *, case_node_ptr);
276 static int node_has_low_bound (case_node_ptr, tree);
277 static int node_has_high_bound (case_node_ptr, tree);
278 static int node_is_bounded (case_node_ptr, tree);
279 static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
282 init_stmt_for_function (void)
284 cfun->stmt = ggc_alloc_cleared (sizeof (struct stmt_status));
287 /* Record the current file and line. Called from emit_line_note. */
290 set_file_and_line_for_stmt (location_t location)
292 /* If we're outputting an inline function, and we add a line note,
293 there may be no CFUN->STMT information. So, there's no need to
296 emit_locus = location;
299 /* Emit a no-op instruction. */
306 last_insn = get_last_insn ();
308 && (LABEL_P (last_insn)
309 || (NOTE_P (last_insn)
310 && prev_real_insn (last_insn) == 0)))
311 emit_insn (gen_nop ());
314 /* Return the rtx-label that corresponds to a LABEL_DECL,
315 creating it if necessary. */
318 label_rtx (tree label)
320 if (TREE_CODE (label) != LABEL_DECL)
323 if (!DECL_RTL_SET_P (label))
325 rtx r = gen_label_rtx ();
326 SET_DECL_RTL (label, r);
327 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
328 LABEL_PRESERVE_P (r) = 1;
331 return DECL_RTL (label);
334 /* As above, but also put it on the forced-reference list of the
335 function that contains it. */
337 force_label_rtx (tree label)
339 rtx ref = label_rtx (label);
340 tree function = decl_function_context (label);
346 if (function != current_function_decl)
347 p = find_function_data (function);
351 p->expr->x_forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref,
352 p->expr->x_forced_labels);
356 /* Add an unconditional jump to LABEL as the next sequential instruction. */
359 emit_jump (rtx label)
361 do_pending_stack_adjust ();
362 emit_jump_insn (gen_jump (label));
366 /* Emit code to jump to the address
367 specified by the pointer expression EXP. */
370 expand_computed_goto (tree exp)
372 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
374 x = convert_memory_address (Pmode, x);
376 do_pending_stack_adjust ();
377 emit_indirect_jump (x);
380 /* Handle goto statements and the labels that they can go to. */
382 /* Specify the location in the RTL code of a label LABEL,
383 which is a LABEL_DECL tree node.
385 This is used for the kind of label that the user can jump to with a
386 goto statement, and for alternatives of a switch or case statement.
387 RTL labels generated for loops and conditionals don't go through here;
388 they are generated directly at the RTL level, by other functions below.
390 Note that this has nothing to do with defining label *names*.
391 Languages vary in how they do that and what that even means. */
394 expand_label (tree label)
396 rtx label_r = label_rtx (label);
398 do_pending_stack_adjust ();
399 emit_label (label_r);
400 if (DECL_NAME (label))
401 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
403 if (DECL_NONLOCAL (label))
405 expand_nl_goto_receiver ();
406 nonlocal_goto_handler_labels
407 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
408 nonlocal_goto_handler_labels);
411 if (FORCED_LABEL (label))
412 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels);
414 if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
415 maybe_set_first_label_num (label_r);
418 /* Generate RTL code for a `goto' statement with target label LABEL.
419 LABEL should be a LABEL_DECL tree node that was or will later be
420 defined with `expand_label'. */
423 expand_goto (tree label)
425 #ifdef ENABLE_CHECKING
426 /* Check for a nonlocal goto to a containing function. Should have
427 gotten translated to __builtin_nonlocal_goto. */
428 tree context = decl_function_context (label);
429 if (context != 0 && context != current_function_decl)
433 emit_jump (label_rtx (label));
436 /* Return the number of times character C occurs in string S. */
438 n_occurrences (int c, const char *s)
446 /* Generate RTL for an asm statement (explicit assembler code).
447 STRING is a STRING_CST node containing the assembler code text,
448 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
449 insn is volatile; don't optimize it. */
452 expand_asm (tree string, int vol)
456 if (TREE_CODE (string) == ADDR_EXPR)
457 string = TREE_OPERAND (string, 0);
459 body = gen_rtx_ASM_INPUT (VOIDmode, TREE_STRING_POINTER (string));
461 MEM_VOLATILE_P (body) = vol;
466 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
467 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
468 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
469 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
470 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
471 constraint allows the use of a register operand. And, *IS_INOUT
472 will be true if the operand is read-write, i.e., if it is used as
473 an input as well as an output. If *CONSTRAINT_P is not in
474 canonical form, it will be made canonical. (Note that `+' will be
475 replaced with `=' as part of this process.)
477 Returns TRUE if all went well; FALSE if an error occurred. */
480 parse_output_constraint (const char **constraint_p, int operand_num,
481 int ninputs, int noutputs, bool *allows_mem,
482 bool *allows_reg, bool *is_inout)
484 const char *constraint = *constraint_p;
487 /* Assume the constraint doesn't allow the use of either a register
492 /* Allow the `=' or `+' to not be at the beginning of the string,
493 since it wasn't explicitly documented that way, and there is a
494 large body of code that puts it last. Swap the character to
495 the front, so as not to uglify any place else. */
496 p = strchr (constraint, '=');
498 p = strchr (constraint, '+');
500 /* If the string doesn't contain an `=', issue an error
504 error ("output operand constraint lacks `='");
508 /* If the constraint begins with `+', then the operand is both read
509 from and written to. */
510 *is_inout = (*p == '+');
512 /* Canonicalize the output constraint so that it begins with `='. */
513 if (p != constraint || is_inout)
516 size_t c_len = strlen (constraint);
519 warning ("output constraint `%c' for operand %d is not at the beginning",
522 /* Make a copy of the constraint. */
523 buf = alloca (c_len + 1);
524 strcpy (buf, constraint);
525 /* Swap the first character and the `=' or `+'. */
526 buf[p - constraint] = buf[0];
527 /* Make sure the first character is an `='. (Until we do this,
528 it might be a `+'.) */
530 /* Replace the constraint with the canonicalized string. */
531 *constraint_p = ggc_alloc_string (buf, c_len);
532 constraint = *constraint_p;
535 /* Loop through the constraint string. */
536 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
541 error ("operand constraint contains incorrectly positioned '+' or '='");
545 if (operand_num + 1 == ninputs + noutputs)
547 error ("`%%' constraint used with last operand");
552 case 'V': case 'm': case 'o':
556 case '?': case '!': case '*': case '&': case '#':
557 case 'E': case 'F': case 'G': case 'H':
558 case 's': case 'i': case 'n':
559 case 'I': case 'J': case 'K': case 'L': case 'M':
560 case 'N': case 'O': case 'P': case ',':
563 case '0': case '1': case '2': case '3': case '4':
564 case '5': case '6': case '7': case '8': case '9':
566 error ("matching constraint not valid in output operand");
570 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
571 excepting those that expand_call created. So match memory
588 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
590 #ifdef EXTRA_CONSTRAINT_STR
591 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
593 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
597 /* Otherwise we can't assume anything about the nature of
598 the constraint except that it isn't purely registers.
599 Treat it like "g" and hope for the best. */
610 /* Similar, but for input constraints. */
613 parse_input_constraint (const char **constraint_p, int input_num,
614 int ninputs, int noutputs, int ninout,
615 const char * const * constraints,
616 bool *allows_mem, bool *allows_reg)
618 const char *constraint = *constraint_p;
619 const char *orig_constraint = constraint;
620 size_t c_len = strlen (constraint);
622 bool saw_match = false;
624 /* Assume the constraint doesn't allow the use of either
625 a register or memory. */
629 /* Make sure constraint has neither `=', `+', nor '&'. */
631 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
632 switch (constraint[j])
634 case '+': case '=': case '&':
635 if (constraint == orig_constraint)
637 error ("input operand constraint contains `%c'", constraint[j]);
643 if (constraint == orig_constraint
644 && input_num + 1 == ninputs - ninout)
646 error ("`%%' constraint used with last operand");
651 case 'V': case 'm': case 'o':
656 case '?': case '!': case '*': case '#':
657 case 'E': case 'F': case 'G': case 'H':
658 case 's': case 'i': case 'n':
659 case 'I': case 'J': case 'K': case 'L': case 'M':
660 case 'N': case 'O': case 'P': case ',':
663 /* Whether or not a numeric constraint allows a register is
664 decided by the matching constraint, and so there is no need
665 to do anything special with them. We must handle them in
666 the default case, so that we don't unnecessarily force
667 operands to memory. */
668 case '0': case '1': case '2': case '3': case '4':
669 case '5': case '6': case '7': case '8': case '9':
676 match = strtoul (constraint + j, &end, 10);
677 if (match >= (unsigned long) noutputs)
679 error ("matching constraint references invalid operand number");
683 /* Try and find the real constraint for this dup. Only do this
684 if the matching constraint is the only alternative. */
686 && (j == 0 || (j == 1 && constraint[0] == '%')))
688 constraint = constraints[match];
689 *constraint_p = constraint;
690 c_len = strlen (constraint);
692 /* ??? At the end of the loop, we will skip the first part of
693 the matched constraint. This assumes not only that the
694 other constraint is an output constraint, but also that
695 the '=' or '+' come first. */
699 j = end - constraint;
700 /* Anticipate increment at end of loop. */
715 if (! ISALPHA (constraint[j]))
717 error ("invalid punctuation `%c' in constraint", constraint[j]);
720 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
723 #ifdef EXTRA_CONSTRAINT_STR
724 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
726 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
730 /* Otherwise we can't assume anything about the nature of
731 the constraint except that it isn't purely registers.
732 Treat it like "g" and hope for the best. */
740 if (saw_match && !*allows_reg)
741 warning ("matching constraint does not allow a register");
746 /* INPUT is one of the input operands from EXPR, an ASM_EXPR. Returns true
747 if it is an operand which must be passed in memory (i.e. an "m"
748 constraint), false otherwise. */
751 asm_op_is_mem_input (tree input, tree expr)
753 const char *constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (input)));
754 tree outputs = ASM_OUTPUTS (expr);
755 int noutputs = list_length (outputs);
756 const char **constraints
757 = (const char **) alloca ((noutputs) * sizeof (const char *));
759 bool allows_mem, allows_reg;
762 /* Collect output constraints. */
763 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
764 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
766 /* We pass 0 for input_num, ninputs and ninout; they are only used for
767 error checking which will be done at expand time. */
768 parse_input_constraint (&constraint, 0, 0, noutputs, 0, constraints,
769 &allows_mem, &allows_reg);
770 return (!allows_reg && allows_mem);
773 /* Check for overlap between registers marked in CLOBBERED_REGS and
774 anything inappropriate in DECL. Emit error and return TRUE for error,
778 decl_conflicts_with_clobbers_p (tree decl, const HARD_REG_SET clobbered_regs)
780 /* Conflicts between asm-declared register variables and the clobber
781 list are not allowed. */
782 if ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
783 && DECL_REGISTER (decl)
784 && REG_P (DECL_RTL (decl))
785 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
787 rtx reg = DECL_RTL (decl);
790 for (regno = REGNO (reg);
792 + hard_regno_nregs[REGNO (reg)][GET_MODE (reg)]);
794 if (TEST_HARD_REG_BIT (clobbered_regs, regno))
796 error ("asm-specifier for variable `%s' conflicts with asm clobber list",
797 IDENTIFIER_POINTER (DECL_NAME (decl)));
799 /* Reset registerness to stop multiple errors emitted for a
801 DECL_REGISTER (decl) = 0;
808 /* Generate RTL for an asm statement with arguments.
809 STRING is the instruction template.
810 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
811 Each output or input has an expression in the TREE_VALUE and
812 and a tree list in TREE_PURPOSE which in turn contains a constraint
813 name in TREE_VALUE (or NULL_TREE) and a constraint string
815 CLOBBERS is a list of STRING_CST nodes each naming a hard register
816 that is clobbered by this insn.
818 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
819 Some elements of OUTPUTS may be replaced with trees representing temporary
820 values. The caller should copy those temporary values to the originally
823 VOL nonzero means the insn is volatile; don't optimize it. */
826 expand_asm_operands (tree string, tree outputs, tree inputs,
827 tree clobbers, int vol, location_t locus)
829 rtvec argvec, constraintvec;
831 int ninputs = list_length (inputs);
832 int noutputs = list_length (outputs);
835 HARD_REG_SET clobbered_regs;
836 int clobber_conflict_found = 0;
840 /* Vector of RTX's of evaluated output operands. */
841 rtx *output_rtx = alloca (noutputs * sizeof (rtx));
842 int *inout_opnum = alloca (noutputs * sizeof (int));
843 rtx *real_output_rtx = alloca (noutputs * sizeof (rtx));
844 enum machine_mode *inout_mode
845 = alloca (noutputs * sizeof (enum machine_mode));
846 const char **constraints
847 = alloca ((noutputs + ninputs) * sizeof (const char *));
848 int old_generating_concat_p = generating_concat_p;
850 /* An ASM with no outputs needs to be treated as volatile, for now. */
854 if (! check_operand_nalternatives (outputs, inputs))
857 string = resolve_asm_operand_names (string, outputs, inputs);
859 /* Collect constraints. */
861 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
862 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
863 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
864 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
866 /* Sometimes we wish to automatically clobber registers across an asm.
867 Case in point is when the i386 backend moved from cc0 to a hard reg --
868 maintaining source-level compatibility means automatically clobbering
869 the flags register. */
870 clobbers = targetm.md_asm_clobbers (clobbers);
872 /* Count the number of meaningful clobbered registers, ignoring what
873 we would ignore later. */
875 CLEAR_HARD_REG_SET (clobbered_regs);
876 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
878 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
880 i = decode_reg_name (regname);
881 if (i >= 0 || i == -4)
884 error ("unknown register name `%s' in `asm'", regname);
886 /* Mark clobbered registers. */
889 /* Clobbering the PIC register is an error */
890 if (i == (int) PIC_OFFSET_TABLE_REGNUM)
892 error ("PIC register `%s' clobbered in `asm'", regname);
896 SET_HARD_REG_BIT (clobbered_regs, i);
900 /* First pass over inputs and outputs checks validity and sets
901 mark_addressable if needed. */
904 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
906 tree val = TREE_VALUE (tail);
907 tree type = TREE_TYPE (val);
908 const char *constraint;
913 /* If there's an erroneous arg, emit no insn. */
914 if (type == error_mark_node)
917 /* Try to parse the output constraint. If that fails, there's
918 no point in going further. */
919 constraint = constraints[i];
920 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
921 &allows_mem, &allows_reg, &is_inout))
928 && REG_P (DECL_RTL (val))
929 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
930 lang_hooks.mark_addressable (val);
937 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
939 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
943 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
945 bool allows_reg, allows_mem;
946 const char *constraint;
948 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
949 would get VOIDmode and that could cause a crash in reload. */
950 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
953 constraint = constraints[i + noutputs];
954 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
955 constraints, &allows_mem, &allows_reg))
958 if (! allows_reg && allows_mem)
959 lang_hooks.mark_addressable (TREE_VALUE (tail));
962 /* Second pass evaluates arguments. */
965 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
967 tree val = TREE_VALUE (tail);
968 tree type = TREE_TYPE (val);
974 if (!parse_output_constraint (&constraints[i], i, ninputs,
975 noutputs, &allows_mem, &allows_reg,
979 /* If an output operand is not a decl or indirect ref and our constraint
980 allows a register, make a temporary to act as an intermediate.
981 Make the asm insn write into that, then our caller will copy it to
982 the real output operand. Likewise for promoted variables. */
984 generating_concat_p = 0;
986 real_output_rtx[i] = NULL_RTX;
987 if ((TREE_CODE (val) == INDIRECT_REF
990 && (allows_mem || REG_P (DECL_RTL (val)))
991 && ! (REG_P (DECL_RTL (val))
992 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
996 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
998 op = validize_mem (op);
1000 if (! allows_reg && !MEM_P (op))
1001 error ("output number %d not directly addressable", i);
1002 if ((! allows_mem && MEM_P (op))
1003 || GET_CODE (op) == CONCAT)
1005 real_output_rtx[i] = op;
1006 op = gen_reg_rtx (GET_MODE (op));
1008 emit_move_insn (op, real_output_rtx[i]);
1013 op = assign_temp (type, 0, 0, 1);
1014 op = validize_mem (op);
1015 TREE_VALUE (tail) = make_tree (type, op);
1019 generating_concat_p = old_generating_concat_p;
1023 inout_mode[ninout] = TYPE_MODE (type);
1024 inout_opnum[ninout++] = i;
1027 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1028 clobber_conflict_found = 1;
1031 /* Make vectors for the expression-rtx, constraint strings,
1032 and named operands. */
1034 argvec = rtvec_alloc (ninputs);
1035 constraintvec = rtvec_alloc (ninputs);
1037 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1038 : GET_MODE (output_rtx[0])),
1039 TREE_STRING_POINTER (string),
1040 empty_string, 0, argvec, constraintvec,
1043 MEM_VOLATILE_P (body) = vol;
1045 /* Eval the inputs and put them into ARGVEC.
1046 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1048 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
1050 bool allows_reg, allows_mem;
1051 const char *constraint;
1055 constraint = constraints[i + noutputs];
1056 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1057 constraints, &allows_mem, &allows_reg))
1060 generating_concat_p = 0;
1062 val = TREE_VALUE (tail);
1063 type = TREE_TYPE (val);
1064 op = expand_expr (val, NULL_RTX, VOIDmode,
1065 (allows_mem && !allows_reg
1066 ? EXPAND_MEMORY : EXPAND_NORMAL));
1068 /* Never pass a CONCAT to an ASM. */
1069 if (GET_CODE (op) == CONCAT)
1070 op = force_reg (GET_MODE (op), op);
1071 else if (MEM_P (op))
1072 op = validize_mem (op);
1074 if (asm_operand_ok (op, constraint) <= 0)
1077 op = force_reg (TYPE_MODE (type), op);
1078 else if (!allows_mem)
1079 warning ("asm operand %d probably doesn't match constraints",
1081 else if (MEM_P (op))
1083 /* We won't recognize either volatile memory or memory
1084 with a queued address as available a memory_operand
1085 at this point. Ignore it: clearly this *is* a memory. */
1089 warning ("use of memory input without lvalue in "
1090 "asm operand %d is deprecated", i + noutputs);
1092 if (CONSTANT_P (op))
1094 rtx mem = force_const_mem (TYPE_MODE (type), op);
1096 op = validize_mem (mem);
1098 op = force_reg (TYPE_MODE (type), op);
1101 || GET_CODE (op) == SUBREG
1102 || GET_CODE (op) == CONCAT)
1104 tree qual_type = build_qualified_type (type,
1106 | TYPE_QUAL_CONST));
1107 rtx memloc = assign_temp (qual_type, 1, 1, 1);
1108 memloc = validize_mem (memloc);
1109 emit_move_insn (memloc, op);
1115 generating_concat_p = old_generating_concat_p;
1116 ASM_OPERANDS_INPUT (body, i) = op;
1118 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1119 = gen_rtx_ASM_INPUT (TYPE_MODE (type), constraints[i + noutputs]);
1121 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1122 clobber_conflict_found = 1;
1125 /* Protect all the operands from the queue now that they have all been
1128 generating_concat_p = 0;
1130 /* For in-out operands, copy output rtx to input rtx. */
1131 for (i = 0; i < ninout; i++)
1133 int j = inout_opnum[i];
1136 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1139 sprintf (buffer, "%d", j);
1140 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1141 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
1144 generating_concat_p = old_generating_concat_p;
1146 /* Now, for each output, construct an rtx
1147 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1148 ARGVEC CONSTRAINTS OPNAMES))
1149 If there is more than one, put them inside a PARALLEL. */
1151 if (noutputs == 1 && nclobbers == 0)
1153 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
1154 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1157 else if (noutputs == 0 && nclobbers == 0)
1159 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1171 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1173 /* For each output operand, store a SET. */
1174 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1176 XVECEXP (body, 0, i)
1177 = gen_rtx_SET (VOIDmode,
1179 gen_rtx_ASM_OPERANDS
1180 (GET_MODE (output_rtx[i]),
1181 TREE_STRING_POINTER (string),
1182 constraints[i], i, argvec, constraintvec,
1185 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1188 /* If there are no outputs (but there are some clobbers)
1189 store the bare ASM_OPERANDS into the PARALLEL. */
1192 XVECEXP (body, 0, i++) = obody;
1194 /* Store (clobber REG) for each clobbered register specified. */
1196 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1198 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1199 int j = decode_reg_name (regname);
1204 if (j == -3) /* `cc', which is not a register */
1207 if (j == -4) /* `memory', don't cache memory across asm */
1209 XVECEXP (body, 0, i++)
1210 = gen_rtx_CLOBBER (VOIDmode,
1213 gen_rtx_SCRATCH (VOIDmode)));
1217 /* Ignore unknown register, error already signaled. */
1221 /* Use QImode since that's guaranteed to clobber just one reg. */
1222 clobbered_reg = gen_rtx_REG (QImode, j);
1224 /* Do sanity check for overlap between clobbers and respectively
1225 input and outputs that hasn't been handled. Such overlap
1226 should have been detected and reported above. */
1227 if (!clobber_conflict_found)
1231 /* We test the old body (obody) contents to avoid tripping
1232 over the under-construction body. */
1233 for (opno = 0; opno < noutputs; opno++)
1234 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1235 internal_error ("asm clobber conflict with output operand");
1237 for (opno = 0; opno < ninputs - ninout; opno++)
1238 if (reg_overlap_mentioned_p (clobbered_reg,
1239 ASM_OPERANDS_INPUT (obody, opno)))
1240 internal_error ("asm clobber conflict with input operand");
1243 XVECEXP (body, 0, i++)
1244 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1250 /* For any outputs that needed reloading into registers, spill them
1251 back to where they belong. */
1252 for (i = 0; i < noutputs; ++i)
1253 if (real_output_rtx[i])
1254 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1260 expand_asm_expr (tree exp)
1266 if (ASM_INPUT_P (exp))
1268 expand_asm (ASM_STRING (exp), ASM_VOLATILE_P (exp));
1272 outputs = ASM_OUTPUTS (exp);
1273 noutputs = list_length (outputs);
1274 /* o[I] is the place that output number I should be written. */
1275 o = (tree *) alloca (noutputs * sizeof (tree));
1277 /* Record the contents of OUTPUTS before it is modified. */
1278 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1279 o[i] = TREE_VALUE (tail);
1281 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
1282 OUTPUTS some trees for where the values were actually stored. */
1283 expand_asm_operands (ASM_STRING (exp), outputs, ASM_INPUTS (exp),
1284 ASM_CLOBBERS (exp), ASM_VOLATILE_P (exp),
1287 /* Copy all the intermediate outputs into the specified outputs. */
1288 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1290 if (o[i] != TREE_VALUE (tail))
1292 expand_assignment (o[i], TREE_VALUE (tail), 0);
1295 /* Restore the original value so that it's correct the next
1296 time we expand this function. */
1297 TREE_VALUE (tail) = o[i];
1302 /* A subroutine of expand_asm_operands. Check that all operands have
1303 the same number of alternatives. Return true if so. */
1306 check_operand_nalternatives (tree outputs, tree inputs)
1308 if (outputs || inputs)
1310 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1312 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1315 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1317 error ("too many alternatives in `asm'");
1324 const char *constraint
1325 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1327 if (n_occurrences (',', constraint) != nalternatives)
1329 error ("operand constraints for `asm' differ in number of alternatives");
1333 if (TREE_CHAIN (tmp))
1334 tmp = TREE_CHAIN (tmp);
1336 tmp = next, next = 0;
1343 /* A subroutine of expand_asm_operands. Check that all operand names
1344 are unique. Return true if so. We rely on the fact that these names
1345 are identifiers, and so have been canonicalized by get_identifier,
1346 so all we need are pointer comparisons. */
1349 check_unique_operand_names (tree outputs, tree inputs)
1353 for (i = outputs; i ; i = TREE_CHAIN (i))
1355 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1359 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1360 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1364 for (i = inputs; i ; i = TREE_CHAIN (i))
1366 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1370 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1371 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1373 for (j = outputs; j ; j = TREE_CHAIN (j))
1374 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1381 error ("duplicate asm operand name '%s'",
1382 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1386 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1387 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1388 STRING and in the constraints to those numbers. */
1391 resolve_asm_operand_names (tree string, tree outputs, tree inputs)
1398 check_unique_operand_names (outputs, inputs);
1400 /* Substitute [<name>] in input constraint strings. There should be no
1401 named operands in output constraints. */
1402 for (t = inputs; t ; t = TREE_CHAIN (t))
1404 c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1405 if (strchr (c, '[') != NULL)
1407 p = buffer = xstrdup (c);
1408 while ((p = strchr (p, '[')) != NULL)
1409 p = resolve_operand_name_1 (p, outputs, inputs);
1410 TREE_VALUE (TREE_PURPOSE (t))
1411 = build_string (strlen (buffer), buffer);
1416 /* Now check for any needed substitutions in the template. */
1417 c = TREE_STRING_POINTER (string);
1418 while ((c = strchr (c, '%')) != NULL)
1422 else if (ISALPHA (c[1]) && c[2] == '[')
1433 /* OK, we need to make a copy so we can perform the substitutions.
1434 Assume that we will not need extra space--we get to remove '['
1435 and ']', which means we cannot have a problem until we have more
1436 than 999 operands. */
1437 buffer = xstrdup (TREE_STRING_POINTER (string));
1438 p = buffer + (c - TREE_STRING_POINTER (string));
1440 while ((p = strchr (p, '%')) != NULL)
1444 else if (ISALPHA (p[1]) && p[2] == '[')
1452 p = resolve_operand_name_1 (p, outputs, inputs);
1455 string = build_string (strlen (buffer), buffer);
1462 /* A subroutine of resolve_operand_names. P points to the '[' for a
1463 potential named operand of the form [<name>]. In place, replace
1464 the name and brackets with a number. Return a pointer to the
1465 balance of the string after substitution. */
1468 resolve_operand_name_1 (char *p, tree outputs, tree inputs)
1475 /* Collect the operand name. */
1476 q = strchr (p, ']');
1479 error ("missing close brace for named operand");
1480 return strchr (p, '\0');
1484 /* Resolve the name to a number. */
1485 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
1487 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1490 const char *c = TREE_STRING_POINTER (name);
1491 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
1495 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
1497 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1500 const char *c = TREE_STRING_POINTER (name);
1501 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
1507 error ("undefined named operand '%s'", p + 1);
1511 /* Replace the name with the number. Unfortunately, not all libraries
1512 get the return value of sprintf correct, so search for the end of the
1513 generated string by hand. */
1514 sprintf (p, "%d", op);
1515 p = strchr (p, '\0');
1517 /* Verify the no extra buffer space assumption. */
1521 /* Shift the rest of the buffer down to fill the gap. */
1522 memmove (p, q + 1, strlen (q + 1) + 1);
1527 /* Generate RTL to evaluate the expression EXP. */
1530 expand_expr_stmt (tree exp)
1535 value = expand_expr (exp, const0_rtx, VOIDmode, 0);
1536 type = TREE_TYPE (exp);
1538 /* If all we do is reference a volatile value in memory,
1539 copy it to a register to be sure it is actually touched. */
1540 if (value && MEM_P (value) && TREE_THIS_VOLATILE (exp))
1542 if (TYPE_MODE (type) == VOIDmode)
1544 else if (TYPE_MODE (type) != BLKmode)
1545 value = copy_to_reg (value);
1548 rtx lab = gen_label_rtx ();
1550 /* Compare the value with itself to reference it. */
1551 emit_cmp_and_jump_insns (value, value, EQ,
1552 expand_expr (TYPE_SIZE (type),
1553 NULL_RTX, VOIDmode, 0),
1559 /* Free any temporaries used to evaluate this expression. */
1563 /* Warn if EXP contains any computations whose results are not used.
1564 Return 1 if a warning is printed; 0 otherwise. LOCUS is the
1565 (potential) location of the expression. */
1568 warn_if_unused_value (tree exp, location_t locus)
1571 if (TREE_USED (exp))
1574 /* Don't warn about void constructs. This includes casting to void,
1575 void function calls, and statement expressions with a final cast
1577 if (VOID_TYPE_P (TREE_TYPE (exp)))
1580 if (EXPR_HAS_LOCATION (exp))
1581 locus = EXPR_LOCATION (exp);
1583 switch (TREE_CODE (exp))
1585 case PREINCREMENT_EXPR:
1586 case POSTINCREMENT_EXPR:
1587 case PREDECREMENT_EXPR:
1588 case POSTDECREMENT_EXPR:
1593 case TRY_CATCH_EXPR:
1594 case WITH_CLEANUP_EXPR:
1599 /* For a binding, warn if no side effect within it. */
1600 exp = BIND_EXPR_BODY (exp);
1604 exp = TREE_OPERAND (exp, 0);
1607 case TRUTH_ORIF_EXPR:
1608 case TRUTH_ANDIF_EXPR:
1609 /* In && or ||, warn if 2nd operand has no side effect. */
1610 exp = TREE_OPERAND (exp, 1);
1614 if (TREE_NO_WARNING (exp))
1616 if (warn_if_unused_value (TREE_OPERAND (exp, 0), locus))
1618 /* Let people do `(foo (), 0)' without a warning. */
1619 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1621 exp = TREE_OPERAND (exp, 1);
1626 case NON_LVALUE_EXPR:
1627 /* Don't warn about conversions not explicit in the user's program. */
1628 if (TREE_NO_WARNING (exp))
1630 /* Assignment to a cast usually results in a cast of a modify.
1631 Don't complain about that. There can be an arbitrary number of
1632 casts before the modify, so we must loop until we find the first
1633 non-cast expression and then test to see if that is a modify. */
1635 tree tem = TREE_OPERAND (exp, 0);
1637 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1638 tem = TREE_OPERAND (tem, 0);
1640 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
1641 || TREE_CODE (tem) == CALL_EXPR)
1647 /* Don't warn about automatic dereferencing of references, since
1648 the user cannot control it. */
1649 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1651 exp = TREE_OPERAND (exp, 0);
1657 /* Referencing a volatile value is a side effect, so don't warn. */
1659 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1660 && TREE_THIS_VOLATILE (exp))
1663 /* If this is an expression which has no operands, there is no value
1664 to be unused. There are no such language-independent codes,
1665 but front ends may define such. */
1666 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
1667 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
1671 /* If this is an expression with side effects, don't warn. */
1672 if (TREE_SIDE_EFFECTS (exp))
1675 warning ("%Hvalue computed is not used", &locus);
1680 /* Generate RTL for the start of an if-then. COND is the expression
1681 whose truth should be tested.
1683 If EXITFLAG is nonzero, this conditional is visible to
1684 `exit_something'. */
1687 expand_start_cond (tree cond, int exitflag)
1689 struct nesting *thiscond = ALLOC_NESTING ();
1691 /* Make an entry on cond_stack for the cond we are entering. */
1693 thiscond->desc = COND_NESTING;
1694 thiscond->next = cond_stack;
1695 thiscond->all = nesting_stack;
1696 thiscond->depth = ++nesting_depth;
1697 thiscond->data.cond.next_label = gen_label_rtx ();
1698 /* Before we encounter an `else', we don't need a separate exit label
1699 unless there are supposed to be exit statements
1700 to exit this conditional. */
1701 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1702 thiscond->data.cond.endif_label = thiscond->exit_label;
1703 cond_stack = thiscond;
1704 nesting_stack = thiscond;
1706 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
1709 /* Generate RTL between then-clause and the elseif-clause
1710 of an if-then-elseif-.... */
1713 expand_start_elseif (tree cond)
1715 if (cond_stack->data.cond.endif_label == 0)
1716 cond_stack->data.cond.endif_label = gen_label_rtx ();
1717 emit_jump (cond_stack->data.cond.endif_label);
1718 emit_label (cond_stack->data.cond.next_label);
1719 cond_stack->data.cond.next_label = gen_label_rtx ();
1720 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1723 /* Generate RTL between the then-clause and the else-clause
1724 of an if-then-else. */
1727 expand_start_else (void)
1729 if (cond_stack->data.cond.endif_label == 0)
1730 cond_stack->data.cond.endif_label = gen_label_rtx ();
1732 emit_jump (cond_stack->data.cond.endif_label);
1733 emit_label (cond_stack->data.cond.next_label);
1734 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
1737 /* After calling expand_start_else, turn this "else" into an "else if"
1738 by providing another condition. */
1741 expand_elseif (tree cond)
1743 cond_stack->data.cond.next_label = gen_label_rtx ();
1744 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1747 /* Generate RTL for the end of an if-then.
1748 Pop the record for it off of cond_stack. */
1751 expand_end_cond (void)
1753 struct nesting *thiscond = cond_stack;
1755 do_pending_stack_adjust ();
1756 if (thiscond->data.cond.next_label)
1757 emit_label (thiscond->data.cond.next_label);
1758 if (thiscond->data.cond.endif_label)
1759 emit_label (thiscond->data.cond.endif_label);
1761 POPSTACK (cond_stack);
1764 /* Return nonzero if we should preserve sub-expressions as separate
1765 pseudos. We never do so if we aren't optimizing. We always do so
1766 if -fexpensive-optimizations. */
1769 preserve_subexpressions_p (void)
1771 if (flag_expensive_optimizations)
1774 if (optimize == 0 || cfun == 0 || cfun->stmt == 0)
1781 /* Generate RTL to return from the current function, with no value.
1782 (That is, we do not do anything about returning any value.) */
1785 expand_null_return (void)
1787 /* If this function was declared to return a value, but we
1788 didn't, clobber the return registers so that they are not
1789 propagated live to the rest of the function. */
1790 clobber_return_register ();
1792 expand_null_return_1 ();
1795 /* Generate RTL to return directly from the current function.
1796 (That is, we bypass any return value.) */
1799 expand_naked_return (void)
1803 clear_pending_stack_adjust ();
1804 do_pending_stack_adjust ();
1806 end_label = naked_return_label;
1808 end_label = naked_return_label = gen_label_rtx ();
1810 emit_jump (end_label);
1813 /* If the current function returns values in the most significant part
1814 of a register, shift return value VAL appropriately. The mode of
1815 the function's return type is known not to be BLKmode. */
1818 shift_return_value (rtx val)
1822 type = TREE_TYPE (DECL_RESULT (current_function_decl));
1823 if (targetm.calls.return_in_msb (type))
1826 HOST_WIDE_INT shift;
1828 target = DECL_RTL (DECL_RESULT (current_function_decl));
1829 shift = (GET_MODE_BITSIZE (GET_MODE (target))
1830 - BITS_PER_UNIT * int_size_in_bytes (type));
1832 val = expand_shift (LSHIFT_EXPR, GET_MODE (target),
1833 gen_lowpart (GET_MODE (target), val),
1834 build_int_2 (shift, 0), target, 1);
1840 /* Generate RTL to return from the current function, with value VAL. */
1843 expand_value_return (rtx val)
1845 /* Copy the value to the return location
1846 unless it's already there. */
1848 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
1849 if (return_reg != val)
1851 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
1852 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
1854 int unsignedp = TYPE_UNSIGNED (type);
1855 enum machine_mode old_mode
1856 = DECL_MODE (DECL_RESULT (current_function_decl));
1857 enum machine_mode mode
1858 = promote_mode (type, old_mode, &unsignedp, 1);
1860 if (mode != old_mode)
1861 val = convert_modes (mode, old_mode, val, unsignedp);
1863 if (GET_CODE (return_reg) == PARALLEL)
1864 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
1866 emit_move_insn (return_reg, val);
1869 expand_null_return_1 ();
1872 /* Output a return with no value. */
1875 expand_null_return_1 (void)
1879 clear_pending_stack_adjust ();
1880 do_pending_stack_adjust ();
1882 end_label = return_label;
1884 end_label = return_label = gen_label_rtx ();
1885 emit_jump (end_label);
1888 /* Generate RTL to evaluate the expression RETVAL and return it
1889 from the current function. */
1892 expand_return (tree retval)
1898 /* If function wants no value, give it none. */
1899 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
1901 expand_expr (retval, NULL_RTX, VOIDmode, 0);
1902 expand_null_return ();
1906 if (retval == error_mark_node)
1908 /* Treat this like a return of no value from a function that
1910 expand_null_return ();
1913 else if (TREE_CODE (retval) == RESULT_DECL)
1914 retval_rhs = retval;
1915 else if ((TREE_CODE (retval) == MODIFY_EXPR
1916 || TREE_CODE (retval) == INIT_EXPR)
1917 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
1918 retval_rhs = TREE_OPERAND (retval, 1);
1920 retval_rhs = retval;
1922 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
1924 /* If the result is an aggregate that is being returned in one (or more)
1925 registers, load the registers here. The compiler currently can't handle
1926 copying a BLKmode value into registers. We could put this code in a
1927 more general area (for use by everyone instead of just function
1928 call/return), but until this feature is generally usable it is kept here
1929 (and in expand_call). */
1932 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
1933 && REG_P (result_rtl))
1936 unsigned HOST_WIDE_INT bitpos, xbitpos;
1937 unsigned HOST_WIDE_INT padding_correction = 0;
1938 unsigned HOST_WIDE_INT bytes
1939 = int_size_in_bytes (TREE_TYPE (retval_rhs));
1940 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1941 unsigned int bitsize
1942 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
1943 rtx *result_pseudos = alloca (sizeof (rtx) * n_regs);
1944 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
1945 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
1946 enum machine_mode tmpmode, result_reg_mode;
1950 expand_null_return ();
1954 /* If the structure doesn't take up a whole number of words, see
1955 whether the register value should be padded on the left or on
1956 the right. Set PADDING_CORRECTION to the number of padding
1957 bits needed on the left side.
1959 In most ABIs, the structure will be returned at the least end of
1960 the register, which translates to right padding on little-endian
1961 targets and left padding on big-endian targets. The opposite
1962 holds if the structure is returned at the most significant
1963 end of the register. */
1964 if (bytes % UNITS_PER_WORD != 0
1965 && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs))
1967 : BYTES_BIG_ENDIAN))
1968 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
1971 /* Copy the structure BITSIZE bits at a time. */
1972 for (bitpos = 0, xbitpos = padding_correction;
1973 bitpos < bytes * BITS_PER_UNIT;
1974 bitpos += bitsize, xbitpos += bitsize)
1976 /* We need a new destination pseudo each time xbitpos is
1977 on a word boundary and when xbitpos == padding_correction
1978 (the first time through). */
1979 if (xbitpos % BITS_PER_WORD == 0
1980 || xbitpos == padding_correction)
1982 /* Generate an appropriate register. */
1983 dst = gen_reg_rtx (word_mode);
1984 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
1986 /* Clear the destination before we move anything into it. */
1987 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
1990 /* We need a new source operand each time bitpos is on a word
1992 if (bitpos % BITS_PER_WORD == 0)
1993 src = operand_subword_force (result_val,
1994 bitpos / BITS_PER_WORD,
1997 /* Use bitpos for the source extraction (left justified) and
1998 xbitpos for the destination store (right justified). */
1999 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2000 extract_bit_field (src, bitsize,
2001 bitpos % BITS_PER_WORD, 1,
2002 NULL_RTX, word_mode, word_mode));
2005 tmpmode = GET_MODE (result_rtl);
2006 if (tmpmode == BLKmode)
2008 /* Find the smallest integer mode large enough to hold the
2009 entire structure and use that mode instead of BLKmode
2010 on the USE insn for the return register. */
2011 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2012 tmpmode != VOIDmode;
2013 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
2014 /* Have we found a large enough mode? */
2015 if (GET_MODE_SIZE (tmpmode) >= bytes)
2018 /* No suitable mode found. */
2019 if (tmpmode == VOIDmode)
2022 PUT_MODE (result_rtl, tmpmode);
2025 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
2026 result_reg_mode = word_mode;
2028 result_reg_mode = tmpmode;
2029 result_reg = gen_reg_rtx (result_reg_mode);
2031 for (i = 0; i < n_regs; i++)
2032 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
2035 if (tmpmode != result_reg_mode)
2036 result_reg = gen_lowpart (tmpmode, result_reg);
2038 expand_value_return (result_reg);
2040 else if (retval_rhs != 0
2041 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
2042 && (REG_P (result_rtl)
2043 || (GET_CODE (result_rtl) == PARALLEL)))
2045 /* Calculate the return value into a temporary (usually a pseudo
2047 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
2048 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
2050 val = assign_temp (nt, 0, 0, 1);
2051 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
2052 val = force_not_mem (val);
2053 /* Return the calculated value. */
2054 expand_value_return (shift_return_value (val));
2058 /* No hard reg used; calculate value into hard return reg. */
2059 expand_expr (retval, const0_rtx, VOIDmode, 0);
2060 expand_value_return (result_rtl);
2064 /* Generate the RTL code for entering a binding contour.
2065 The variables are declared one by one, by calls to `expand_decl'.
2067 FLAGS is a bitwise or of the following flags:
2069 1 - Nonzero if this construct should be visible to
2072 2 - Nonzero if this contour does not require a
2073 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
2074 language-independent code should set this flag because they
2075 will not create corresponding BLOCK nodes. (There should be
2076 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
2077 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
2078 when expand_end_bindings is called.
2080 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
2081 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
2085 expand_start_bindings_and_block (int flags, tree block)
2087 struct nesting *thisblock = ALLOC_NESTING ();
2089 int exit_flag = ((flags & 1) != 0);
2090 int block_flag = ((flags & 2) == 0);
2092 /* If a BLOCK is supplied, then the caller should be requesting a
2093 NOTE_INSN_BLOCK_BEG note. */
2094 if (!block_flag && block)
2097 /* Create a note to mark the beginning of the block. */
2098 note = emit_note (NOTE_INSN_DELETED);
2100 /* Make an entry on block_stack for the block we are entering. */
2102 thisblock->desc = BLOCK_NESTING;
2103 thisblock->next = block_stack;
2104 thisblock->all = nesting_stack;
2105 thisblock->depth = ++nesting_depth;
2106 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
2108 /* When we insert instructions after the last unconditional cleanup,
2109 we don't adjust last_insn. That means that a later add_insn will
2110 clobber the instructions we've just added. The easiest way to
2111 fix this is to just insert another instruction here, so that the
2112 instructions inserted after the last unconditional cleanup are
2113 never the last instruction. */
2114 emit_note (NOTE_INSN_DELETED);
2116 thisblock->data.block.first_insn = note;
2117 thisblock->data.block.block_start_count = ++current_block_start_count;
2118 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
2119 block_stack = thisblock;
2120 nesting_stack = thisblock;
2122 /* Make a new level for allocating stack slots. */
2126 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
2127 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
2128 expand_expr are made. After we end the region, we know that all
2129 space for all temporaries that were created by TARGET_EXPRs will be
2130 destroyed and their space freed for reuse. */
2133 expand_start_target_temps (void)
2135 /* This is so that even if the result is preserved, the space
2136 allocated will be freed, as we know that it is no longer in use. */
2139 /* Start a new binding layer that will keep track of all cleanup
2140 actions to be performed. */
2141 expand_start_bindings (2);
2143 target_temp_slot_level = temp_slot_level;
2147 expand_end_target_temps (void)
2149 expand_end_bindings (NULL_TREE, 0, 0);
2151 /* This is so that even if the result is preserved, the space
2152 allocated will be freed, as we know that it is no longer in use. */
2156 /* Given a pointer to a BLOCK node return nonzero if (and only if) the node
2157 in question represents the outermost pair of curly braces (i.e. the "body
2158 block") of a function or method.
2160 For any BLOCK node representing a "body block" of a function or method, the
2161 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
2162 represents the outermost (function) scope for the function or method (i.e.
2163 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
2164 *that* node in turn will point to the relevant FUNCTION_DECL node. */
2167 is_body_block (tree stmt)
2169 if (lang_hooks.no_body_blocks)
2172 if (TREE_CODE (stmt) == BLOCK)
2174 tree parent = BLOCK_SUPERCONTEXT (stmt);
2176 if (parent && TREE_CODE (parent) == BLOCK)
2178 tree grandparent = BLOCK_SUPERCONTEXT (parent);
2180 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
2188 /* Return an opaque pointer to the current nesting level, so frontend code
2189 can check its own sanity. */
2192 current_nesting_level (void)
2194 return cfun ? block_stack : 0;
2197 /* Emit code to restore vital registers at the beginning of a nonlocal goto
2200 expand_nl_goto_receiver (void)
2202 /* Clobber the FP when we get here, so we have to make sure it's
2203 marked as used by this function. */
2204 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
2206 /* Mark the static chain as clobbered here so life information
2207 doesn't get messed up for it. */
2208 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
2210 #ifdef HAVE_nonlocal_goto
2211 if (! HAVE_nonlocal_goto)
2213 /* First adjust our frame pointer to its actual value. It was
2214 previously set to the start of the virtual area corresponding to
2215 the stacked variables when we branched here and now needs to be
2216 adjusted to the actual hardware fp value.
2218 Assignments are to virtual registers are converted by
2219 instantiate_virtual_regs into the corresponding assignment
2220 to the underlying register (fp in this case) that makes
2221 the original assignment true.
2222 So the following insn will actually be
2223 decrementing fp by STARTING_FRAME_OFFSET. */
2224 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
2226 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2227 if (fixed_regs[ARG_POINTER_REGNUM])
2229 #ifdef ELIMINABLE_REGS
2230 /* If the argument pointer can be eliminated in favor of the
2231 frame pointer, we don't need to restore it. We assume here
2232 that if such an elimination is present, it can always be used.
2233 This is the case on all known machines; if we don't make this
2234 assumption, we do unnecessary saving on many machines. */
2235 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
2238 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
2239 if (elim_regs[i].from == ARG_POINTER_REGNUM
2240 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
2243 if (i == ARRAY_SIZE (elim_regs))
2246 /* Now restore our arg pointer from the address at which it
2247 was saved in our stack frame. */
2248 emit_move_insn (virtual_incoming_args_rtx,
2249 copy_to_reg (get_arg_pointer_save_area (cfun)));
2254 #ifdef HAVE_nonlocal_goto_receiver
2255 if (HAVE_nonlocal_goto_receiver)
2256 emit_insn (gen_nonlocal_goto_receiver ());
2259 /* @@@ This is a kludge. Not all machine descriptions define a blockage
2260 insn, but we must not allow the code we just generated to be reordered
2261 by scheduling. Specifically, the update of the frame pointer must
2262 happen immediately, not later. So emit an ASM_INPUT to act as blockage
2264 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
2267 /* Warn about any unused VARS (which may contain nodes other than
2268 VAR_DECLs, but such nodes are ignored). The nodes are connected
2269 via the TREE_CHAIN field. */
2272 warn_about_unused_variables (tree vars)
2276 if (warn_unused_variable)
2277 for (decl = vars; decl; decl = TREE_CHAIN (decl))
2278 if (TREE_CODE (decl) == VAR_DECL
2279 && ! TREE_USED (decl)
2280 && ! DECL_IN_SYSTEM_HEADER (decl)
2281 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
2282 warning ("%Junused variable '%D'", decl, decl);
2285 /* Generate RTL code to terminate a binding contour.
2287 VARS is the chain of VAR_DECL nodes for the variables bound in this
2288 contour. There may actually be other nodes in this chain, but any
2289 nodes other than VAR_DECLS are ignored.
2291 MARK_ENDS is nonzero if we should put a note at the beginning
2292 and end of this binding contour.
2294 DONT_JUMP_IN is positive if it is not valid to jump into this contour,
2295 zero if we can jump into this contour only if it does not have a saved
2296 stack level, and negative if we are not to check for invalid use of
2297 labels (because the front end does that). */
2300 expand_end_bindings (tree vars, int mark_ends ATTRIBUTE_UNUSED,
2301 int dont_jump_in ATTRIBUTE_UNUSED)
2303 struct nesting *thisblock = block_stack;
2305 /* If any of the variables in this scope were not used, warn the
2307 warn_about_unused_variables (vars);
2309 if (thisblock->exit_label)
2311 do_pending_stack_adjust ();
2312 emit_label (thisblock->exit_label);
2315 /* Mark the beginning and end of the scope if requested. */
2317 /* Get rid of the beginning-mark if we don't make an end-mark. */
2318 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
2320 /* Restore the temporary level of TARGET_EXPRs. */
2321 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
2323 /* Restore block_stack level for containing block. */
2325 POPSTACK (block_stack);
2327 /* Pop the stack slot nesting and free any slots at this level. */
2331 /* Generate RTL for the automatic variable declaration DECL.
2332 (Other kinds of declarations are simply ignored if seen here.) */
2335 expand_decl (tree decl)
2339 type = TREE_TYPE (decl);
2341 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
2342 type in case this node is used in a reference. */
2343 if (TREE_CODE (decl) == CONST_DECL)
2345 DECL_MODE (decl) = TYPE_MODE (type);
2346 DECL_ALIGN (decl) = TYPE_ALIGN (type);
2347 DECL_SIZE (decl) = TYPE_SIZE (type);
2348 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
2352 /* Otherwise, only automatic variables need any expansion done. Static and
2353 external variables, and external functions, will be handled by
2354 `assemble_variable' (called from finish_decl). TYPE_DECL requires
2355 nothing. PARM_DECLs are handled in `assign_parms'. */
2356 if (TREE_CODE (decl) != VAR_DECL)
2359 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
2362 /* Create the RTL representation for the variable. */
2364 if (type == error_mark_node)
2365 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
2367 else if (DECL_SIZE (decl) == 0)
2368 /* Variable with incomplete type. */
2371 if (DECL_INITIAL (decl) == 0)
2372 /* Error message was already done; now avoid a crash. */
2373 x = gen_rtx_MEM (BLKmode, const0_rtx);
2375 /* An initializer is going to decide the size of this array.
2376 Until we know the size, represent its address with a reg. */
2377 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
2379 set_mem_attributes (x, decl, 1);
2380 SET_DECL_RTL (decl, x);
2382 else if (use_register_for_decl (decl))
2384 /* Automatic variable that can go in a register. */
2385 int unsignedp = TYPE_UNSIGNED (type);
2386 enum machine_mode reg_mode
2387 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
2389 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
2391 /* Note if the object is a user variable. */
2392 if (!DECL_ARTIFICIAL (decl))
2394 mark_user_reg (DECL_RTL (decl));
2396 /* Trust user variables which have a pointer type to really
2397 be pointers. Do not trust compiler generated temporaries
2398 as our type system is totally busted as it relates to
2399 pointer arithmetic which translates into lots of compiler
2400 generated objects with pointer types, but which are not really
2402 if (POINTER_TYPE_P (type))
2403 mark_reg_pointer (DECL_RTL (decl),
2404 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
2407 maybe_set_unchanging (DECL_RTL (decl), decl);
2410 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
2411 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
2412 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
2413 STACK_CHECK_MAX_VAR_SIZE)))
2415 /* Variable of fixed size that goes on the stack. */
2420 /* If we previously made RTL for this decl, it must be an array
2421 whose size was determined by the initializer.
2422 The old address was a register; set that register now
2423 to the proper address. */
2424 if (DECL_RTL_SET_P (decl))
2426 if (!MEM_P (DECL_RTL (decl))
2427 || !REG_P (XEXP (DECL_RTL (decl), 0)))
2429 oldaddr = XEXP (DECL_RTL (decl), 0);
2432 /* Set alignment we actually gave this decl. */
2433 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
2434 : GET_MODE_BITSIZE (DECL_MODE (decl)));
2435 DECL_USER_ALIGN (decl) = 0;
2437 x = assign_temp (decl, 1, 1, 1);
2438 set_mem_attributes (x, decl, 1);
2439 SET_DECL_RTL (decl, x);
2443 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
2444 if (addr != oldaddr)
2445 emit_move_insn (oldaddr, addr);
2449 /* Dynamic-size object: must push space on the stack. */
2451 rtx address, size, x;
2453 /* Record the stack pointer on entry to block, if have
2454 not already done so. */
2455 do_pending_stack_adjust ();
2457 /* Compute the variable's size, in bytes. This will expand any
2458 needed SAVE_EXPRs for the first time. */
2459 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
2462 /* Allocate space on the stack for the variable. Note that
2463 DECL_ALIGN says how the variable is to be aligned and we
2464 cannot use it to conclude anything about the alignment of
2466 address = allocate_dynamic_stack_space (size, NULL_RTX,
2467 TYPE_ALIGN (TREE_TYPE (decl)));
2469 /* Reference the variable indirect through that rtx. */
2470 x = gen_rtx_MEM (DECL_MODE (decl), address);
2471 set_mem_attributes (x, decl, 1);
2472 SET_DECL_RTL (decl, x);
2475 /* Indicate the alignment we actually gave this variable. */
2476 #ifdef STACK_BOUNDARY
2477 DECL_ALIGN (decl) = STACK_BOUNDARY;
2479 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
2481 DECL_USER_ALIGN (decl) = 0;
2485 /* Emit code to allocate T_SIZE bytes of dynamic stack space for ALLOC. */
2487 expand_stack_alloc (tree alloc, tree t_size)
2489 rtx address, dest, size;
2492 if (TREE_CODE (alloc) != ADDR_EXPR)
2494 var = TREE_OPERAND (alloc, 0);
2495 if (TREE_CODE (var) != VAR_DECL)
2498 type = TREE_TYPE (var);
2500 /* Compute the variable's size, in bytes. */
2501 size = expand_expr (t_size, NULL_RTX, VOIDmode, 0);
2504 /* Allocate space on the stack for the variable. */
2505 address = XEXP (DECL_RTL (var), 0);
2506 dest = allocate_dynamic_stack_space (size, address, TYPE_ALIGN (type));
2507 if (dest != address)
2508 emit_move_insn (address, dest);
2510 /* Indicate the alignment we actually gave this variable. */
2511 #ifdef STACK_BOUNDARY
2512 DECL_ALIGN (var) = STACK_BOUNDARY;
2514 DECL_ALIGN (var) = BIGGEST_ALIGNMENT;
2516 DECL_USER_ALIGN (var) = 0;
2519 /* Emit code to save the current value of stack. */
2521 expand_stack_save (void)
2525 do_pending_stack_adjust ();
2526 emit_stack_save (SAVE_BLOCK, &ret, NULL_RTX);
2530 /* Emit code to restore the current value of stack. */
2532 expand_stack_restore (tree var)
2534 rtx sa = DECL_RTL (var);
2536 emit_stack_restore (SAVE_BLOCK, sa, NULL_RTX);
2539 /* Emit code to perform the initialization of a declaration DECL. */
2542 expand_decl_init (tree decl)
2544 int was_used = TREE_USED (decl);
2546 /* If this is a CONST_DECL, we don't have to generate any code. Likewise
2547 for static decls. */
2548 if (TREE_CODE (decl) == CONST_DECL
2549 || TREE_STATIC (decl))
2552 /* Compute and store the initial value now. */
2556 if (DECL_INITIAL (decl) == error_mark_node)
2558 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
2560 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
2561 || code == POINTER_TYPE || code == REFERENCE_TYPE)
2562 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
2565 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
2567 emit_line_note (DECL_SOURCE_LOCATION (decl));
2568 expand_assignment (decl, DECL_INITIAL (decl), 0);
2571 /* Don't let the initialization count as "using" the variable. */
2572 TREE_USED (decl) = was_used;
2574 /* Free any temporaries we made while initializing the decl. */
2575 preserve_temp_slots (NULL_RTX);
2581 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
2582 DECL_ELTS is the list of elements that belong to DECL's type.
2583 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
2586 expand_anon_union_decl (tree decl, tree cleanup ATTRIBUTE_UNUSED,
2592 /* If any of the elements are addressable, so is the entire union. */
2593 for (t = decl_elts; t; t = TREE_CHAIN (t))
2594 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
2596 TREE_ADDRESSABLE (decl) = 1;
2601 x = DECL_RTL (decl);
2603 /* Go through the elements, assigning RTL to each. */
2604 for (t = decl_elts; t; t = TREE_CHAIN (t))
2606 tree decl_elt = TREE_VALUE (t);
2607 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
2609 /* If any of the elements are addressable, so is the entire
2611 if (TREE_USED (decl_elt))
2612 TREE_USED (decl) = 1;
2614 /* Propagate the union's alignment to the elements. */
2615 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
2616 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
2618 /* If the element has BLKmode and the union doesn't, the union is
2619 aligned such that the element doesn't need to have BLKmode, so
2620 change the element's mode to the appropriate one for its size. */
2621 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
2622 DECL_MODE (decl_elt) = mode
2623 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
2625 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
2626 instead create a new MEM rtx with the proper mode. */
2629 if (mode == GET_MODE (x))
2630 SET_DECL_RTL (decl_elt, x);
2632 SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
2636 if (mode == GET_MODE (x))
2637 SET_DECL_RTL (decl_elt, x);
2639 SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
2646 /* Enter a case (Pascal) or switch (C) statement.
2647 Push a block onto case_stack and nesting_stack
2648 to accumulate the case-labels that are seen
2649 and to record the labels generated for the statement.
2651 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
2652 Otherwise, this construct is transparent for `exit_something'.
2654 EXPR is the index-expression to be dispatched on.
2655 TYPE is its nominal type. We could simply convert EXPR to this type,
2656 but instead we take short cuts. */
2659 expand_start_case (tree index_expr)
2661 struct nesting *thiscase = ALLOC_NESTING ();
2663 /* Make an entry on case_stack for the case we are entering. */
2665 thiscase->desc = CASE_NESTING;
2666 thiscase->next = case_stack;
2667 thiscase->all = nesting_stack;
2668 thiscase->depth = ++nesting_depth;
2669 thiscase->exit_label = 0;
2670 thiscase->data.case_stmt.case_list = 0;
2671 thiscase->data.case_stmt.index_expr = index_expr;
2672 thiscase->data.case_stmt.default_label = 0;
2673 case_stack = thiscase;
2674 nesting_stack = thiscase;
2676 do_pending_stack_adjust ();
2678 /* Make sure case_stmt.start points to something that won't
2679 need any transformation before expand_end_case. */
2680 if (!NOTE_P (get_last_insn ()))
2681 emit_note (NOTE_INSN_DELETED);
2683 thiscase->data.case_stmt.start = get_last_insn ();
2686 /* Do the insertion of a case label into
2687 case_stack->data.case_stmt.case_list. The labels are fed to us
2688 in descending order from the sorted vector of case labels used
2689 in the tree part of the middle end. So the list we construct is
2690 sorted in ascending order. */
2693 add_case_node (tree low, tree high, tree label)
2695 struct case_node *r;
2697 /* If there's no HIGH value, then this is not a case range; it's
2698 just a simple case label. But that's just a degenerate case
2700 If the bounds are equal, turn this into the one-value case. */
2701 if (!high || tree_int_cst_equal (low, high))
2704 /* Handle default labels specially. */
2707 #ifdef ENABLE_CHECKING
2708 if (case_stack->data.case_stmt.default_label != 0)
2711 case_stack->data.case_stmt.default_label = label;
2715 /* Add this label to the chain. */
2716 r = ggc_alloc (sizeof (struct case_node));
2719 r->code_label = label;
2720 r->parent = r->left = NULL;
2721 r->right = case_stack->data.case_stmt.case_list;
2722 case_stack->data.case_stmt.case_list = r;
2725 /* Maximum number of case bit tests. */
2726 #define MAX_CASE_BIT_TESTS 3
2728 /* By default, enable case bit tests on targets with ashlsi3. */
2729 #ifndef CASE_USE_BIT_TESTS
2730 #define CASE_USE_BIT_TESTS (ashl_optab->handlers[word_mode].insn_code \
2731 != CODE_FOR_nothing)
2735 /* A case_bit_test represents a set of case nodes that may be
2736 selected from using a bit-wise comparison. HI and LO hold
2737 the integer to be tested against, LABEL contains the label
2738 to jump to upon success and BITS counts the number of case
2739 nodes handled by this test, typically the number of bits
2742 struct case_bit_test
2750 /* Determine whether "1 << x" is relatively cheap in word_mode. */
2753 bool lshift_cheap_p (void)
2755 static bool init = false;
2756 static bool cheap = true;
2760 rtx reg = gen_rtx_REG (word_mode, 10000);
2761 int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET);
2762 cheap = cost < COSTS_N_INSNS (3);
2769 /* Comparison function for qsort to order bit tests by decreasing
2770 number of case nodes, i.e. the node with the most cases gets
2774 case_bit_test_cmp (const void *p1, const void *p2)
2776 const struct case_bit_test *d1 = p1;
2777 const struct case_bit_test *d2 = p2;
2779 return d2->bits - d1->bits;
2782 /* Expand a switch statement by a short sequence of bit-wise
2783 comparisons. "switch(x)" is effectively converted into
2784 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
2787 INDEX_EXPR is the value being switched on, which is of
2788 type INDEX_TYPE. MINVAL is the lowest case value of in
2789 the case nodes, of INDEX_TYPE type, and RANGE is highest
2790 value minus MINVAL, also of type INDEX_TYPE. NODES is
2791 the set of case nodes, and DEFAULT_LABEL is the label to
2792 branch to should none of the cases match.
2794 There *MUST* be MAX_CASE_BIT_TESTS or less unique case
2798 emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
2799 tree range, case_node_ptr nodes, rtx default_label)
2801 struct case_bit_test test[MAX_CASE_BIT_TESTS];
2802 enum machine_mode mode;
2803 rtx expr, index, label;
2804 unsigned int i,j,lo,hi;
2805 struct case_node *n;
2809 for (n = nodes; n; n = n->right)
2811 label = label_rtx (n->code_label);
2812 for (i = 0; i < count; i++)
2813 if (same_case_target_p (label, test[i].label))
2818 if (count >= MAX_CASE_BIT_TESTS)
2822 test[i].label = label;
2829 lo = tree_low_cst (fold (build (MINUS_EXPR, index_type,
2830 n->low, minval)), 1);
2831 hi = tree_low_cst (fold (build (MINUS_EXPR, index_type,
2832 n->high, minval)), 1);
2833 for (j = lo; j <= hi; j++)
2834 if (j >= HOST_BITS_PER_WIDE_INT)
2835 test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
2837 test[i].lo |= (HOST_WIDE_INT) 1 << j;
2840 qsort (test, count, sizeof(*test), case_bit_test_cmp);
2842 index_expr = fold (build (MINUS_EXPR, index_type,
2843 convert (index_type, index_expr),
2844 convert (index_type, minval)));
2845 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
2846 do_pending_stack_adjust ();
2848 mode = TYPE_MODE (index_type);
2849 expr = expand_expr (range, NULL_RTX, VOIDmode, 0);
2850 emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
2853 index = convert_to_mode (word_mode, index, 0);
2854 index = expand_binop (word_mode, ashl_optab, const1_rtx,
2855 index, NULL_RTX, 1, OPTAB_WIDEN);
2857 for (i = 0; i < count; i++)
2859 expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
2860 expr = expand_binop (word_mode, and_optab, index, expr,
2861 NULL_RTX, 1, OPTAB_WIDEN);
2862 emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
2863 word_mode, 1, test[i].label);
2866 emit_jump (default_label);
2870 #define HAVE_casesi 0
2873 #ifndef HAVE_tablejump
2874 #define HAVE_tablejump 0
2877 /* Terminate a case (Pascal) or switch (C) statement
2878 in which ORIG_INDEX is the expression to be tested.
2879 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
2880 type as given in the source before any compiler conversions.
2881 Generate the code to test it and jump to the right place. */
2884 expand_end_case_type (tree orig_index, tree orig_type)
2886 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
2887 rtx default_label = 0;
2888 struct case_node *n, *m;
2889 unsigned int count, uniq;
2895 rtx before_case, end, lab;
2896 struct nesting *thiscase = case_stack;
2897 tree index_expr, index_type;
2898 bool exit_done = false;
2901 /* Don't crash due to previous errors. */
2902 if (thiscase == NULL)
2905 index_expr = thiscase->data.case_stmt.index_expr;
2906 index_type = TREE_TYPE (index_expr);
2907 unsignedp = TYPE_UNSIGNED (index_type);
2908 if (orig_type == NULL)
2909 orig_type = TREE_TYPE (orig_index);
2911 do_pending_stack_adjust ();
2913 /* An ERROR_MARK occurs for various reasons including invalid data type. */
2914 if (index_type != error_mark_node)
2916 /* If we don't have a default-label, create one here,
2917 after the body of the switch. */
2918 if (thiscase->data.case_stmt.default_label == 0)
2920 thiscase->data.case_stmt.default_label
2921 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
2922 /* Share the exit label if possible. */
2923 if (thiscase->exit_label)
2925 SET_DECL_RTL (thiscase->data.case_stmt.default_label,
2926 thiscase->exit_label);
2929 expand_label (thiscase->data.case_stmt.default_label);
2931 default_label = label_rtx (thiscase->data.case_stmt.default_label);
2933 before_case = get_last_insn ();
2935 /* Get upper and lower bounds of case values.
2936 Also convert all the case values to the index expr's data type. */
2940 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
2942 /* Check low and high label values are integers. */
2943 if (TREE_CODE (n->low) != INTEGER_CST)
2945 if (TREE_CODE (n->high) != INTEGER_CST)
2948 n->low = convert (index_type, n->low);
2949 n->high = convert (index_type, n->high);
2951 /* Count the elements and track the largest and smallest
2952 of them (treating them as signed even if they are not). */
2960 if (INT_CST_LT (n->low, minval))
2962 if (INT_CST_LT (maxval, n->high))
2965 /* A range counts double, since it requires two compares. */
2966 if (! tree_int_cst_equal (n->low, n->high))
2969 /* Count the number of unique case node targets. */
2971 lab = label_rtx (n->code_label);
2972 for (m = thiscase->data.case_stmt.case_list; m != n; m = m->right)
2973 if (same_case_target_p (label_rtx (m->code_label), lab))
2980 /* Compute span of values. */
2982 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
2986 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
2987 emit_jump (default_label);
2990 /* Try implementing this switch statement by a short sequence of
2991 bit-wise comparisons. However, we let the binary-tree case
2992 below handle constant index expressions. */
2993 else if (CASE_USE_BIT_TESTS
2994 && ! TREE_CONSTANT (index_expr)
2995 && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
2996 && compare_tree_int (range, 0) > 0
2997 && lshift_cheap_p ()
2998 && ((uniq == 1 && count >= 3)
2999 || (uniq == 2 && count >= 5)
3000 || (uniq == 3 && count >= 6)))
3002 /* Optimize the case where all the case values fit in a
3003 word without having to subtract MINVAL. In this case,
3004 we can optimize away the subtraction. */
3005 if (compare_tree_int (minval, 0) > 0
3006 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
3008 minval = integer_zero_node;
3011 emit_case_bit_tests (index_type, index_expr, minval, range,
3012 thiscase->data.case_stmt.case_list,
3016 /* If range of values is much bigger than number of values,
3017 make a sequence of conditional branches instead of a dispatch.
3018 If the switch-index is a constant, do it this way
3019 because we can optimize it. */
3021 else if (count < case_values_threshold ()
3022 || compare_tree_int (range,
3023 (optimize_size ? 3 : 10) * count) > 0
3024 /* RANGE may be signed, and really large ranges will show up
3025 as negative numbers. */
3026 || compare_tree_int (range, 0) < 0
3027 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
3030 || TREE_CONSTANT (index_expr)
3031 /* If neither casesi or tablejump is available, we can
3032 only go this way. */
3033 || (!HAVE_casesi && !HAVE_tablejump))
3035 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
3037 /* If the index is a short or char that we do not have
3038 an insn to handle comparisons directly, convert it to
3039 a full integer now, rather than letting each comparison
3040 generate the conversion. */
3042 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
3043 && ! have_insn_for (COMPARE, GET_MODE (index)))
3045 enum machine_mode wider_mode;
3046 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
3047 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
3048 if (have_insn_for (COMPARE, wider_mode))
3050 index = convert_to_mode (wider_mode, index, unsignedp);
3055 do_pending_stack_adjust ();
3058 index = copy_to_reg (index);
3059 if (GET_CODE (index) == CONST_INT
3060 || TREE_CODE (index_expr) == INTEGER_CST)
3062 /* Make a tree node with the proper constant value
3063 if we don't already have one. */
3064 if (TREE_CODE (index_expr) != INTEGER_CST)
3067 = build_int_2 (INTVAL (index),
3068 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
3069 index_expr = convert (index_type, index_expr);
3072 /* For constant index expressions we need only
3073 issue an unconditional branch to the appropriate
3074 target code. The job of removing any unreachable
3075 code is left to the optimization phase if the
3076 "-O" option is specified. */
3077 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3078 if (! tree_int_cst_lt (index_expr, n->low)
3079 && ! tree_int_cst_lt (n->high, index_expr))
3083 emit_jump (label_rtx (n->code_label));
3085 emit_jump (default_label);
3089 /* If the index expression is not constant we generate
3090 a binary decision tree to select the appropriate
3091 target code. This is done as follows:
3093 The list of cases is rearranged into a binary tree,
3094 nearly optimal assuming equal probability for each case.
3096 The tree is transformed into RTL, eliminating
3097 redundant test conditions at the same time.
3099 If program flow could reach the end of the
3100 decision tree an unconditional jump to the
3101 default code is emitted. */
3104 = (TREE_CODE (orig_type) != ENUMERAL_TYPE
3105 && estimate_case_costs (thiscase->data.case_stmt.case_list));
3106 balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
3107 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
3108 default_label, index_type);
3109 emit_jump (default_label);
3114 table_label = gen_label_rtx ();
3115 if (! try_casesi (index_type, index_expr, minval, range,
3116 table_label, default_label))
3118 index_type = integer_type_node;
3120 /* Index jumptables from zero for suitable values of
3121 minval to avoid a subtraction. */
3123 && compare_tree_int (minval, 0) > 0
3124 && compare_tree_int (minval, 3) < 0)
3126 minval = integer_zero_node;
3130 if (! try_tablejump (index_type, index_expr, minval, range,
3131 table_label, default_label))
3135 /* Get table of labels to jump to, in order of case index. */
3137 ncases = tree_low_cst (range, 0) + 1;
3138 labelvec = alloca (ncases * sizeof (rtx));
3139 memset (labelvec, 0, ncases * sizeof (rtx));
3141 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3143 /* Compute the low and high bounds relative to the minimum
3144 value since that should fit in a HOST_WIDE_INT while the
3145 actual values may not. */
3147 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
3148 n->low, minval)), 1);
3149 HOST_WIDE_INT i_high
3150 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
3151 n->high, minval)), 1);
3154 for (i = i_low; i <= i_high; i ++)
3156 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
3159 /* Fill in the gaps with the default. */
3160 for (i = 0; i < ncases; i++)
3161 if (labelvec[i] == 0)
3162 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
3164 /* Output the table. */
3165 emit_label (table_label);
3167 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
3168 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
3169 gen_rtx_LABEL_REF (Pmode, table_label),
3170 gen_rtvec_v (ncases, labelvec),
3171 const0_rtx, const0_rtx));
3173 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
3174 gen_rtvec_v (ncases, labelvec)));
3176 /* If the case insn drops through the table,
3177 after the table we must jump to the default-label.
3178 Otherwise record no drop-through after the table. */
3179 #ifdef CASE_DROPS_THROUGH
3180 emit_jump (default_label);
3186 before_case = NEXT_INSN (before_case);
3187 end = get_last_insn ();
3188 if (squeeze_notes (&before_case, &end))
3190 reorder_insns (before_case, end,
3191 thiscase->data.case_stmt.start);
3194 if (thiscase->exit_label && !exit_done)
3195 emit_label (thiscase->exit_label);
3197 POPSTACK (case_stack);
3202 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
3205 do_jump_if_equal (rtx op1, rtx op2, rtx label, int unsignedp)
3207 if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
3213 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
3214 (GET_MODE (op1) == VOIDmode
3215 ? GET_MODE (op2) : GET_MODE (op1)),
3219 /* Not all case values are encountered equally. This function
3220 uses a heuristic to weight case labels, in cases where that
3221 looks like a reasonable thing to do.
3223 Right now, all we try to guess is text, and we establish the
3226 chars above space: 16
3235 If we find any cases in the switch that are not either -1 or in the range
3236 of valid ASCII characters, or are control characters other than those
3237 commonly used with "\", don't treat this switch scanning text.
3239 Return 1 if these nodes are suitable for cost estimation, otherwise
3243 estimate_case_costs (case_node_ptr node)
3245 tree min_ascii = integer_minus_one_node;
3246 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
3250 /* If we haven't already made the cost table, make it now. Note that the
3251 lower bound of the table is -1, not zero. */
3253 if (! cost_table_initialized)
3255 cost_table_initialized = 1;
3257 for (i = 0; i < 128; i++)
3260 COST_TABLE (i) = 16;
3261 else if (ISPUNCT (i))
3263 else if (ISCNTRL (i))
3264 COST_TABLE (i) = -1;
3267 COST_TABLE (' ') = 8;
3268 COST_TABLE ('\t') = 4;
3269 COST_TABLE ('\0') = 4;
3270 COST_TABLE ('\n') = 2;
3271 COST_TABLE ('\f') = 1;
3272 COST_TABLE ('\v') = 1;
3273 COST_TABLE ('\b') = 1;
3276 /* See if all the case expressions look like text. It is text if the
3277 constant is >= -1 and the highest constant is <= 127. Do all comparisons
3278 as signed arithmetic since we don't want to ever access cost_table with a
3279 value less than -1. Also check that none of the constants in a range
3280 are strange control characters. */
3282 for (n = node; n; n = n->right)
3284 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
3287 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
3288 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
3289 if (COST_TABLE (i) < 0)
3293 /* All interesting values are within the range of interesting
3294 ASCII characters. */
3298 /* Determine whether two case labels branch to the same target.
3299 Since we now do tree optimizations, just comparing labels is
3303 same_case_target_p (rtx l1, rtx l2)
3308 /* Take an ordered list of case nodes
3309 and transform them into a near optimal binary tree,
3310 on the assumption that any target code selection value is as
3311 likely as any other.
3313 The transformation is performed by splitting the ordered
3314 list into two equal sections plus a pivot. The parts are
3315 then attached to the pivot as left and right branches. Each
3316 branch is then transformed recursively. */
3319 balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
3332 /* Count the number of entries on branch. Also count the ranges. */
3336 if (!tree_int_cst_equal (np->low, np->high))
3340 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
3344 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
3352 /* Split this list if it is long enough for that to help. */
3357 /* Find the place in the list that bisects the list's total cost,
3358 Here I gets half the total cost. */
3363 /* Skip nodes while their cost does not reach that amount. */
3364 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
3365 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
3366 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
3369 npp = &(*npp)->right;
3374 /* Leave this branch lopsided, but optimize left-hand
3375 side and fill in `parent' fields for right-hand side. */
3377 np->parent = parent;
3378 balance_case_nodes (&np->left, np);
3379 for (; np->right; np = np->right)
3380 np->right->parent = np;
3384 /* If there are just three nodes, split at the middle one. */
3386 npp = &(*npp)->right;
3389 /* Find the place in the list that bisects the list's total cost,
3390 where ranges count as 2.
3391 Here I gets half the total cost. */
3392 i = (i + ranges + 1) / 2;
3395 /* Skip nodes while their cost does not reach that amount. */
3396 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
3401 npp = &(*npp)->right;
3406 np->parent = parent;
3409 /* Optimize each of the two split parts. */
3410 balance_case_nodes (&np->left, np);
3411 balance_case_nodes (&np->right, np);
3415 /* Else leave this branch as one level,
3416 but fill in `parent' fields. */
3418 np->parent = parent;
3419 for (; np->right; np = np->right)
3420 np->right->parent = np;
3425 /* Search the parent sections of the case node tree
3426 to see if a test for the lower bound of NODE would be redundant.
3427 INDEX_TYPE is the type of the index expression.
3429 The instructions to generate the case decision tree are
3430 output in the same order as nodes are processed so it is
3431 known that if a parent node checks the range of the current
3432 node minus one that the current node is bounded at its lower
3433 span. Thus the test would be redundant. */
3436 node_has_low_bound (case_node_ptr node, tree index_type)
3439 case_node_ptr pnode;
3441 /* If the lower bound of this node is the lowest value in the index type,
3442 we need not test it. */
3444 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
3447 /* If this node has a left branch, the value at the left must be less
3448 than that at this node, so it cannot be bounded at the bottom and
3449 we need not bother testing any further. */
3454 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
3455 node->low, integer_one_node));
3457 /* If the subtraction above overflowed, we can't verify anything.
3458 Otherwise, look for a parent that tests our value - 1. */
3460 if (! tree_int_cst_lt (low_minus_one, node->low))
3463 for (pnode = node->parent; pnode; pnode = pnode->parent)
3464 if (tree_int_cst_equal (low_minus_one, pnode->high))
3470 /* Search the parent sections of the case node tree
3471 to see if a test for the upper bound of NODE would be redundant.
3472 INDEX_TYPE is the type of the index expression.
3474 The instructions to generate the case decision tree are
3475 output in the same order as nodes are processed so it is
3476 known that if a parent node checks the range of the current
3477 node plus one that the current node is bounded at its upper
3478 span. Thus the test would be redundant. */
3481 node_has_high_bound (case_node_ptr node, tree index_type)
3484 case_node_ptr pnode;
3486 /* If there is no upper bound, obviously no test is needed. */
3488 if (TYPE_MAX_VALUE (index_type) == NULL)
3491 /* If the upper bound of this node is the highest value in the type
3492 of the index expression, we need not test against it. */
3494 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
3497 /* If this node has a right branch, the value at the right must be greater
3498 than that at this node, so it cannot be bounded at the top and
3499 we need not bother testing any further. */
3504 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
3505 node->high, integer_one_node));
3507 /* If the addition above overflowed, we can't verify anything.
3508 Otherwise, look for a parent that tests our value + 1. */
3510 if (! tree_int_cst_lt (node->high, high_plus_one))
3513 for (pnode = node->parent; pnode; pnode = pnode->parent)
3514 if (tree_int_cst_equal (high_plus_one, pnode->low))
3520 /* Search the parent sections of the
3521 case node tree to see if both tests for the upper and lower
3522 bounds of NODE would be redundant. */
3525 node_is_bounded (case_node_ptr node, tree index_type)
3527 return (node_has_low_bound (node, index_type)
3528 && node_has_high_bound (node, index_type));
3531 /* Emit step-by-step code to select a case for the value of INDEX.
3532 The thus generated decision tree follows the form of the
3533 case-node binary tree NODE, whose nodes represent test conditions.
3534 INDEX_TYPE is the type of the index of the switch.
3536 Care is taken to prune redundant tests from the decision tree
3537 by detecting any boundary conditions already checked by
3538 emitted rtx. (See node_has_high_bound, node_has_low_bound
3539 and node_is_bounded, above.)
3541 Where the test conditions can be shown to be redundant we emit
3542 an unconditional jump to the target code. As a further
3543 optimization, the subordinates of a tree node are examined to
3544 check for bounded nodes. In this case conditional and/or
3545 unconditional jumps as a result of the boundary check for the
3546 current node are arranged to target the subordinates associated
3547 code for out of bound conditions on the current node.
3549 We can assume that when control reaches the code generated here,
3550 the index value has already been compared with the parents
3551 of this node, and determined to be on the same side of each parent
3552 as this node is. Thus, if this node tests for the value 51,
3553 and a parent tested for 52, we don't need to consider
3554 the possibility of a value greater than 51. If another parent
3555 tests for the value 50, then this node need not test anything. */
3558 emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
3561 /* If INDEX has an unsigned type, we must make unsigned branches. */
3562 int unsignedp = TYPE_UNSIGNED (index_type);
3563 enum machine_mode mode = GET_MODE (index);
3564 enum machine_mode imode = TYPE_MODE (index_type);
3566 /* See if our parents have already tested everything for us.
3567 If they have, emit an unconditional jump for this node. */
3568 if (node_is_bounded (node, index_type))
3569 emit_jump (label_rtx (node->code_label));
3571 else if (tree_int_cst_equal (node->low, node->high))
3573 /* Node is single valued. First see if the index expression matches
3574 this node and then check our children, if any. */
3576 do_jump_if_equal (index,
3577 convert_modes (mode, imode,
3578 expand_expr (node->low, NULL_RTX,
3581 label_rtx (node->code_label), unsignedp);
3583 if (node->right != 0 && node->left != 0)
3585 /* This node has children on both sides.
3586 Dispatch to one side or the other
3587 by comparing the index value with this node's value.
3588 If one subtree is bounded, check that one first,
3589 so we can avoid real branches in the tree. */
3591 if (node_is_bounded (node->right, index_type))
3593 emit_cmp_and_jump_insns (index,
3596 expand_expr (node->high, NULL_RTX,
3599 GT, NULL_RTX, mode, unsignedp,
3600 label_rtx (node->right->code_label));
3601 emit_case_nodes (index, node->left, default_label, index_type);
3604 else if (node_is_bounded (node->left, index_type))
3606 emit_cmp_and_jump_insns (index,
3609 expand_expr (node->high, NULL_RTX,
3612 LT, NULL_RTX, mode, unsignedp,
3613 label_rtx (node->left->code_label));
3614 emit_case_nodes (index, node->right, default_label, index_type);
3617 /* If both children are single-valued cases with no
3618 children, finish up all the work. This way, we can save
3619 one ordered comparison. */
3620 else if (tree_int_cst_equal (node->right->low, node->right->high)
3621 && node->right->left == 0
3622 && node->right->right == 0
3623 && tree_int_cst_equal (node->left->low, node->left->high)
3624 && node->left->left == 0
3625 && node->left->right == 0)
3627 /* Neither node is bounded. First distinguish the two sides;
3628 then emit the code for one side at a time. */
3630 /* See if the value matches what the right hand side
3632 do_jump_if_equal (index,
3633 convert_modes (mode, imode,
3634 expand_expr (node->right->low,
3638 label_rtx (node->right->code_label),
3641 /* See if the value matches what the left hand side
3643 do_jump_if_equal (index,
3644 convert_modes (mode, imode,
3645 expand_expr (node->left->low,
3649 label_rtx (node->left->code_label),
3655 /* Neither node is bounded. First distinguish the two sides;
3656 then emit the code for one side at a time. */
3658 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
3660 /* See if the value is on the right. */
3661 emit_cmp_and_jump_insns (index,
3664 expand_expr (node->high, NULL_RTX,
3667 GT, NULL_RTX, mode, unsignedp,
3668 label_rtx (test_label));
3670 /* Value must be on the left.
3671 Handle the left-hand subtree. */
3672 emit_case_nodes (index, node->left, default_label, index_type);
3673 /* If left-hand subtree does nothing,
3675 emit_jump (default_label);
3677 /* Code branches here for the right-hand subtree. */
3678 expand_label (test_label);
3679 emit_case_nodes (index, node->right, default_label, index_type);
3683 else if (node->right != 0 && node->left == 0)
3685 /* Here we have a right child but no left so we issue conditional
3686 branch to default and process the right child.
3688 Omit the conditional branch to default if we it avoid only one
3689 right child; it costs too much space to save so little time. */
3691 if (node->right->right || node->right->left
3692 || !tree_int_cst_equal (node->right->low, node->right->high))
3694 if (!node_has_low_bound (node, index_type))
3696 emit_cmp_and_jump_insns (index,
3699 expand_expr (node->high, NULL_RTX,
3702 LT, NULL_RTX, mode, unsignedp,
3706 emit_case_nodes (index, node->right, default_label, index_type);
3709 /* We cannot process node->right normally
3710 since we haven't ruled out the numbers less than
3711 this node's value. So handle node->right explicitly. */
3712 do_jump_if_equal (index,
3715 expand_expr (node->right->low, NULL_RTX,
3718 label_rtx (node->right->code_label), unsignedp);
3721 else if (node->right == 0 && node->left != 0)
3723 /* Just one subtree, on the left. */
3724 if (node->left->left || node->left->right
3725 || !tree_int_cst_equal (node->left->low, node->left->high))
3727 if (!node_has_high_bound (node, index_type))
3729 emit_cmp_and_jump_insns (index,
3732 expand_expr (node->high, NULL_RTX,
3735 GT, NULL_RTX, mode, unsignedp,
3739 emit_case_nodes (index, node->left, default_label, index_type);
3742 /* We cannot process node->left normally
3743 since we haven't ruled out the numbers less than
3744 this node's value. So handle node->left explicitly. */
3745 do_jump_if_equal (index,
3748 expand_expr (node->left->low, NULL_RTX,
3751 label_rtx (node->left->code_label), unsignedp);
3756 /* Node is a range. These cases are very similar to those for a single
3757 value, except that we do not start by testing whether this node
3758 is the one to branch to. */
3760 if (node->right != 0 && node->left != 0)
3762 /* Node has subtrees on both sides.
3763 If the right-hand subtree is bounded,
3764 test for it first, since we can go straight there.
3765 Otherwise, we need to make a branch in the control structure,
3766 then handle the two subtrees. */
3767 tree test_label = 0;
3769 if (node_is_bounded (node->right, index_type))
3770 /* Right hand node is fully bounded so we can eliminate any
3771 testing and branch directly to the target code. */
3772 emit_cmp_and_jump_insns (index,
3775 expand_expr (node->high, NULL_RTX,
3778 GT, NULL_RTX, mode, unsignedp,
3779 label_rtx (node->right->code_label));
3782 /* Right hand node requires testing.
3783 Branch to a label where we will handle it later. */
3785 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
3786 emit_cmp_and_jump_insns (index,
3789 expand_expr (node->high, NULL_RTX,
3792 GT, NULL_RTX, mode, unsignedp,
3793 label_rtx (test_label));
3796 /* Value belongs to this node or to the left-hand subtree. */
3798 emit_cmp_and_jump_insns (index,
3801 expand_expr (node->low, NULL_RTX,
3804 GE, NULL_RTX, mode, unsignedp,
3805 label_rtx (node->code_label));
3807 /* Handle the left-hand subtree. */
3808 emit_case_nodes (index, node->left, default_label, index_type);
3810 /* If right node had to be handled later, do that now. */
3814 /* If the left-hand subtree fell through,
3815 don't let it fall into the right-hand subtree. */
3816 emit_jump (default_label);
3818 expand_label (test_label);
3819 emit_case_nodes (index, node->right, default_label, index_type);
3823 else if (node->right != 0 && node->left == 0)
3825 /* Deal with values to the left of this node,
3826 if they are possible. */
3827 if (!node_has_low_bound (node, index_type))
3829 emit_cmp_and_jump_insns (index,
3832 expand_expr (node->low, NULL_RTX,
3835 LT, NULL_RTX, mode, unsignedp,
3839 /* Value belongs to this node or to the right-hand subtree. */
3841 emit_cmp_and_jump_insns (index,
3844 expand_expr (node->high, NULL_RTX,
3847 LE, NULL_RTX, mode, unsignedp,
3848 label_rtx (node->code_label));
3850 emit_case_nodes (index, node->right, default_label, index_type);
3853 else if (node->right == 0 && node->left != 0)
3855 /* Deal with values to the right of this node,
3856 if they are possible. */
3857 if (!node_has_high_bound (node, index_type))
3859 emit_cmp_and_jump_insns (index,
3862 expand_expr (node->high, NULL_RTX,
3865 GT, NULL_RTX, mode, unsignedp,
3869 /* Value belongs to this node or to the left-hand subtree. */
3871 emit_cmp_and_jump_insns (index,
3874 expand_expr (node->low, NULL_RTX,
3877 GE, NULL_RTX, mode, unsignedp,
3878 label_rtx (node->code_label));
3880 emit_case_nodes (index, node->left, default_label, index_type);
3885 /* Node has no children so we check low and high bounds to remove
3886 redundant tests. Only one of the bounds can exist,
3887 since otherwise this node is bounded--a case tested already. */
3888 int high_bound = node_has_high_bound (node, index_type);
3889 int low_bound = node_has_low_bound (node, index_type);
3891 if (!high_bound && low_bound)
3893 emit_cmp_and_jump_insns (index,
3896 expand_expr (node->high, NULL_RTX,
3899 GT, NULL_RTX, mode, unsignedp,
3903 else if (!low_bound && high_bound)
3905 emit_cmp_and_jump_insns (index,
3908 expand_expr (node->low, NULL_RTX,
3911 LT, NULL_RTX, mode, unsignedp,
3914 else if (!low_bound && !high_bound)
3916 /* Widen LOW and HIGH to the same width as INDEX. */
3917 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
3918 tree low = build1 (CONVERT_EXPR, type, node->low);
3919 tree high = build1 (CONVERT_EXPR, type, node->high);
3920 rtx low_rtx, new_index, new_bound;
3922 /* Instead of doing two branches, emit one unsigned branch for
3923 (index-low) > (high-low). */
3924 low_rtx = expand_expr (low, NULL_RTX, mode, 0);
3925 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
3926 NULL_RTX, unsignedp,
3928 new_bound = expand_expr (fold (build (MINUS_EXPR, type,
3932 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
3933 mode, 1, default_label);
3936 emit_jump (label_rtx (node->code_label));
3941 #include "gt-stmt.h"