1 /* Expands front end tree to back end RTL for GCC
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 The functions whose names start with `expand_' are called by the
25 expander to generate RTL instructions for various kinds of constructs. */
29 #include "coretypes.h"
38 #include "insn-config.h"
41 #include "hard-reg-set.h"
48 #include "langhooks.h"
54 /* Functions and data structures for expanding case statements. */
56 /* Case label structure, used to hold info on labels within case
57 statements. We handle "range" labels; for a single-value label
58 as in C, the high and low limits are the same.
60 We start with a vector of case nodes sorted in ascending order, and
61 the default label as the last element in the vector. Before expanding
62 to RTL, we transform this vector into a list linked via the RIGHT
63 fields in the case_node struct. Nodes with higher case values are
66 Switch statements can be output in three forms. A branch table is
67 used if there are more than a few labels and the labels are dense
68 within the range between the smallest and largest case value. If a
69 branch table is used, no further manipulations are done with the case
72 The alternative to the use of a branch table is to generate a series
73 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
74 and PARENT fields to hold a binary tree. Initially the tree is
75 totally unbalanced, with everything on the right. We balance the tree
76 with nodes on the left having lower case values than the parent
77 and nodes on the right having higher values. We then output the tree
80 For very small, suitable switch statements, we can generate a series
81 of simple bit test and branches instead. */
83 struct case_node GTY(())
85 struct case_node *left; /* Left son in binary tree */
86 struct case_node *right; /* Right son in binary tree; also node chain */
87 struct case_node *parent; /* Parent of node in binary tree */
88 tree low; /* Lowest index value for this label */
89 tree high; /* Highest index value for this label */
90 tree code_label; /* Label to jump to when node matches */
93 typedef struct case_node case_node;
94 typedef struct case_node *case_node_ptr;
96 /* These are used by estimate_case_costs and balance_case_nodes. */
98 /* This must be a signed type, and non-ANSI compilers lack signed char. */
99 static short cost_table_[129];
100 static int use_cost_table;
101 static int cost_table_initialized;
103 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
105 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
107 static int n_occurrences (int, const char *);
108 static bool decl_conflicts_with_clobbers_p (tree, const HARD_REG_SET);
109 static void expand_nl_goto_receiver (void);
110 static bool check_operand_nalternatives (tree, tree);
111 static bool check_unique_operand_names (tree, tree);
112 static char *resolve_operand_name_1 (char *, tree, tree);
113 static void expand_null_return_1 (void);
114 static rtx shift_return_value (rtx);
115 static void expand_value_return (rtx);
116 static void do_jump_if_equal (rtx, rtx, rtx, int);
117 static int estimate_case_costs (case_node_ptr);
118 static bool lshift_cheap_p (void);
119 static int case_bit_test_cmp (const void *, const void *);
120 static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
121 static void balance_case_nodes (case_node_ptr *, case_node_ptr);
122 static int node_has_low_bound (case_node_ptr, tree);
123 static int node_has_high_bound (case_node_ptr, tree);
124 static int node_is_bounded (case_node_ptr, tree);
125 static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
126 static struct case_node *add_case_node (struct case_node *, tree, tree, tree);
129 /* Return the rtx-label that corresponds to a LABEL_DECL,
130 creating it if necessary. */
133 label_rtx (tree label)
135 gcc_assert (TREE_CODE (label) == LABEL_DECL);
137 if (!DECL_RTL_SET_P (label))
139 rtx r = gen_label_rtx ();
140 SET_DECL_RTL (label, r);
141 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
142 LABEL_PRESERVE_P (r) = 1;
145 return DECL_RTL (label);
148 /* As above, but also put it on the forced-reference list of the
149 function that contains it. */
151 force_label_rtx (tree label)
153 rtx ref = label_rtx (label);
154 tree function = decl_function_context (label);
157 gcc_assert (function);
159 if (function != current_function_decl)
160 p = find_function_data (function);
164 p->expr->x_forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref,
165 p->expr->x_forced_labels);
169 /* Add an unconditional jump to LABEL as the next sequential instruction. */
172 emit_jump (rtx label)
174 do_pending_stack_adjust ();
175 emit_jump_insn (gen_jump (label));
179 /* Emit code to jump to the address
180 specified by the pointer expression EXP. */
183 expand_computed_goto (tree exp)
185 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
187 x = convert_memory_address (Pmode, x);
189 do_pending_stack_adjust ();
190 emit_indirect_jump (x);
193 /* Handle goto statements and the labels that they can go to. */
195 /* Specify the location in the RTL code of a label LABEL,
196 which is a LABEL_DECL tree node.
198 This is used for the kind of label that the user can jump to with a
199 goto statement, and for alternatives of a switch or case statement.
200 RTL labels generated for loops and conditionals don't go through here;
201 they are generated directly at the RTL level, by other functions below.
203 Note that this has nothing to do with defining label *names*.
204 Languages vary in how they do that and what that even means. */
207 expand_label (tree label)
209 rtx label_r = label_rtx (label);
211 do_pending_stack_adjust ();
212 emit_label (label_r);
213 if (DECL_NAME (label))
214 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
216 if (DECL_NONLOCAL (label))
218 expand_nl_goto_receiver ();
219 nonlocal_goto_handler_labels
220 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
221 nonlocal_goto_handler_labels);
224 if (FORCED_LABEL (label))
225 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels);
227 if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
228 maybe_set_first_label_num (label_r);
231 /* Generate RTL code for a `goto' statement with target label LABEL.
232 LABEL should be a LABEL_DECL tree node that was or will later be
233 defined with `expand_label'. */
236 expand_goto (tree label)
238 #ifdef ENABLE_CHECKING
239 /* Check for a nonlocal goto to a containing function. Should have
240 gotten translated to __builtin_nonlocal_goto. */
241 tree context = decl_function_context (label);
242 gcc_assert (!context || context == current_function_decl);
245 emit_jump (label_rtx (label));
248 /* Return the number of times character C occurs in string S. */
250 n_occurrences (int c, const char *s)
258 /* Generate RTL for an asm statement (explicit assembler code).
259 STRING is a STRING_CST node containing the assembler code text,
260 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
261 insn is volatile; don't optimize it. */
264 expand_asm (tree string, int vol)
268 if (TREE_CODE (string) == ADDR_EXPR)
269 string = TREE_OPERAND (string, 0);
271 body = gen_rtx_ASM_INPUT (VOIDmode, TREE_STRING_POINTER (string));
273 MEM_VOLATILE_P (body) = vol;
278 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
279 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
280 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
281 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
282 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
283 constraint allows the use of a register operand. And, *IS_INOUT
284 will be true if the operand is read-write, i.e., if it is used as
285 an input as well as an output. If *CONSTRAINT_P is not in
286 canonical form, it will be made canonical. (Note that `+' will be
287 replaced with `=' as part of this process.)
289 Returns TRUE if all went well; FALSE if an error occurred. */
292 parse_output_constraint (const char **constraint_p, int operand_num,
293 int ninputs, int noutputs, bool *allows_mem,
294 bool *allows_reg, bool *is_inout)
296 const char *constraint = *constraint_p;
299 /* Assume the constraint doesn't allow the use of either a register
304 /* Allow the `=' or `+' to not be at the beginning of the string,
305 since it wasn't explicitly documented that way, and there is a
306 large body of code that puts it last. Swap the character to
307 the front, so as not to uglify any place else. */
308 p = strchr (constraint, '=');
310 p = strchr (constraint, '+');
312 /* If the string doesn't contain an `=', issue an error
316 error ("output operand constraint lacks `='");
320 /* If the constraint begins with `+', then the operand is both read
321 from and written to. */
322 *is_inout = (*p == '+');
324 /* Canonicalize the output constraint so that it begins with `='. */
325 if (p != constraint || is_inout)
328 size_t c_len = strlen (constraint);
331 warning ("output constraint `%c' for operand %d is not at the beginning",
334 /* Make a copy of the constraint. */
335 buf = alloca (c_len + 1);
336 strcpy (buf, constraint);
337 /* Swap the first character and the `=' or `+'. */
338 buf[p - constraint] = buf[0];
339 /* Make sure the first character is an `='. (Until we do this,
340 it might be a `+'.) */
342 /* Replace the constraint with the canonicalized string. */
343 *constraint_p = ggc_alloc_string (buf, c_len);
344 constraint = *constraint_p;
347 /* Loop through the constraint string. */
348 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
353 error ("operand constraint contains incorrectly positioned '+' or '='");
357 if (operand_num + 1 == ninputs + noutputs)
359 error ("`%%' constraint used with last operand");
364 case 'V': case 'm': case 'o':
368 case '?': case '!': case '*': case '&': case '#':
369 case 'E': case 'F': case 'G': case 'H':
370 case 's': case 'i': case 'n':
371 case 'I': case 'J': case 'K': case 'L': case 'M':
372 case 'N': case 'O': case 'P': case ',':
375 case '0': case '1': case '2': case '3': case '4':
376 case '5': case '6': case '7': case '8': case '9':
378 error ("matching constraint not valid in output operand");
382 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
383 excepting those that expand_call created. So match memory
400 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
402 #ifdef EXTRA_CONSTRAINT_STR
403 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
405 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
409 /* Otherwise we can't assume anything about the nature of
410 the constraint except that it isn't purely registers.
411 Treat it like "g" and hope for the best. */
422 /* Similar, but for input constraints. */
425 parse_input_constraint (const char **constraint_p, int input_num,
426 int ninputs, int noutputs, int ninout,
427 const char * const * constraints,
428 bool *allows_mem, bool *allows_reg)
430 const char *constraint = *constraint_p;
431 const char *orig_constraint = constraint;
432 size_t c_len = strlen (constraint);
434 bool saw_match = false;
436 /* Assume the constraint doesn't allow the use of either
437 a register or memory. */
441 /* Make sure constraint has neither `=', `+', nor '&'. */
443 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
444 switch (constraint[j])
446 case '+': case '=': case '&':
447 if (constraint == orig_constraint)
449 error ("input operand constraint contains `%c'", constraint[j]);
455 if (constraint == orig_constraint
456 && input_num + 1 == ninputs - ninout)
458 error ("`%%' constraint used with last operand");
463 case 'V': case 'm': case 'o':
468 case '?': case '!': case '*': case '#':
469 case 'E': case 'F': case 'G': case 'H':
470 case 's': case 'i': case 'n':
471 case 'I': case 'J': case 'K': case 'L': case 'M':
472 case 'N': case 'O': case 'P': case ',':
475 /* Whether or not a numeric constraint allows a register is
476 decided by the matching constraint, and so there is no need
477 to do anything special with them. We must handle them in
478 the default case, so that we don't unnecessarily force
479 operands to memory. */
480 case '0': case '1': case '2': case '3': case '4':
481 case '5': case '6': case '7': case '8': case '9':
488 match = strtoul (constraint + j, &end, 10);
489 if (match >= (unsigned long) noutputs)
491 error ("matching constraint references invalid operand number");
495 /* Try and find the real constraint for this dup. Only do this
496 if the matching constraint is the only alternative. */
498 && (j == 0 || (j == 1 && constraint[0] == '%')))
500 constraint = constraints[match];
501 *constraint_p = constraint;
502 c_len = strlen (constraint);
504 /* ??? At the end of the loop, we will skip the first part of
505 the matched constraint. This assumes not only that the
506 other constraint is an output constraint, but also that
507 the '=' or '+' come first. */
511 j = end - constraint;
512 /* Anticipate increment at end of loop. */
527 if (! ISALPHA (constraint[j]))
529 error ("invalid punctuation `%c' in constraint", constraint[j]);
532 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
535 #ifdef EXTRA_CONSTRAINT_STR
536 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
538 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
542 /* Otherwise we can't assume anything about the nature of
543 the constraint except that it isn't purely registers.
544 Treat it like "g" and hope for the best. */
552 if (saw_match && !*allows_reg)
553 warning ("matching constraint does not allow a register");
558 /* INPUT is one of the input operands from EXPR, an ASM_EXPR. Returns true
559 if it is an operand which must be passed in memory (i.e. an "m"
560 constraint), false otherwise. */
563 asm_op_is_mem_input (tree input, tree expr)
565 const char *constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (input)));
566 tree outputs = ASM_OUTPUTS (expr);
567 int noutputs = list_length (outputs);
568 const char **constraints
569 = (const char **) alloca ((noutputs) * sizeof (const char *));
571 bool allows_mem, allows_reg;
574 /* Collect output constraints. */
575 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
576 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
578 /* We pass 0 for input_num, ninputs and ninout; they are only used for
579 error checking which will be done at expand time. */
580 parse_input_constraint (&constraint, 0, 0, noutputs, 0, constraints,
581 &allows_mem, &allows_reg);
582 return (!allows_reg && allows_mem);
585 /* Check for overlap between registers marked in CLOBBERED_REGS and
586 anything inappropriate in DECL. Emit error and return TRUE for error,
590 decl_conflicts_with_clobbers_p (tree decl, const HARD_REG_SET clobbered_regs)
592 /* Conflicts between asm-declared register variables and the clobber
593 list are not allowed. */
594 if ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
595 && DECL_REGISTER (decl)
596 && REG_P (DECL_RTL (decl))
597 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
599 rtx reg = DECL_RTL (decl);
602 for (regno = REGNO (reg);
604 + hard_regno_nregs[REGNO (reg)][GET_MODE (reg)]);
606 if (TEST_HARD_REG_BIT (clobbered_regs, regno))
608 error ("asm-specifier for variable `%s' conflicts with asm clobber list",
609 IDENTIFIER_POINTER (DECL_NAME (decl)));
611 /* Reset registerness to stop multiple errors emitted for a
613 DECL_REGISTER (decl) = 0;
620 /* Generate RTL for an asm statement with arguments.
621 STRING is the instruction template.
622 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
623 Each output or input has an expression in the TREE_VALUE and
624 and a tree list in TREE_PURPOSE which in turn contains a constraint
625 name in TREE_VALUE (or NULL_TREE) and a constraint string
627 CLOBBERS is a list of STRING_CST nodes each naming a hard register
628 that is clobbered by this insn.
630 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
631 Some elements of OUTPUTS may be replaced with trees representing temporary
632 values. The caller should copy those temporary values to the originally
635 VOL nonzero means the insn is volatile; don't optimize it. */
638 expand_asm_operands (tree string, tree outputs, tree inputs,
639 tree clobbers, int vol, location_t locus)
641 rtvec argvec, constraintvec;
643 int ninputs = list_length (inputs);
644 int noutputs = list_length (outputs);
647 HARD_REG_SET clobbered_regs;
648 int clobber_conflict_found = 0;
652 /* Vector of RTX's of evaluated output operands. */
653 rtx *output_rtx = alloca (noutputs * sizeof (rtx));
654 int *inout_opnum = alloca (noutputs * sizeof (int));
655 rtx *real_output_rtx = alloca (noutputs * sizeof (rtx));
656 enum machine_mode *inout_mode
657 = alloca (noutputs * sizeof (enum machine_mode));
658 const char **constraints
659 = alloca ((noutputs + ninputs) * sizeof (const char *));
660 int old_generating_concat_p = generating_concat_p;
662 /* An ASM with no outputs needs to be treated as volatile, for now. */
666 if (! check_operand_nalternatives (outputs, inputs))
669 string = resolve_asm_operand_names (string, outputs, inputs);
671 /* Collect constraints. */
673 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
674 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
675 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
676 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
678 /* Sometimes we wish to automatically clobber registers across an asm.
679 Case in point is when the i386 backend moved from cc0 to a hard reg --
680 maintaining source-level compatibility means automatically clobbering
681 the flags register. */
682 clobbers = targetm.md_asm_clobbers (clobbers);
684 /* Count the number of meaningful clobbered registers, ignoring what
685 we would ignore later. */
687 CLEAR_HARD_REG_SET (clobbered_regs);
688 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
690 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
692 i = decode_reg_name (regname);
693 if (i >= 0 || i == -4)
696 error ("unknown register name `%s' in `asm'", regname);
698 /* Mark clobbered registers. */
701 /* Clobbering the PIC register is an error */
702 if (i == (int) PIC_OFFSET_TABLE_REGNUM)
704 error ("PIC register `%s' clobbered in `asm'", regname);
708 SET_HARD_REG_BIT (clobbered_regs, i);
712 /* First pass over inputs and outputs checks validity and sets
713 mark_addressable if needed. */
716 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
718 tree val = TREE_VALUE (tail);
719 tree type = TREE_TYPE (val);
720 const char *constraint;
725 /* If there's an erroneous arg, emit no insn. */
726 if (type == error_mark_node)
729 /* Try to parse the output constraint. If that fails, there's
730 no point in going further. */
731 constraint = constraints[i];
732 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
733 &allows_mem, &allows_reg, &is_inout))
740 && REG_P (DECL_RTL (val))
741 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
742 lang_hooks.mark_addressable (val);
749 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
751 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
755 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
757 bool allows_reg, allows_mem;
758 const char *constraint;
760 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
761 would get VOIDmode and that could cause a crash in reload. */
762 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
765 constraint = constraints[i + noutputs];
766 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
767 constraints, &allows_mem, &allows_reg))
770 if (! allows_reg && allows_mem)
771 lang_hooks.mark_addressable (TREE_VALUE (tail));
774 /* Second pass evaluates arguments. */
777 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
779 tree val = TREE_VALUE (tail);
780 tree type = TREE_TYPE (val);
787 ok = parse_output_constraint (&constraints[i], i, ninputs,
788 noutputs, &allows_mem, &allows_reg,
792 /* If an output operand is not a decl or indirect ref and our constraint
793 allows a register, make a temporary to act as an intermediate.
794 Make the asm insn write into that, then our caller will copy it to
795 the real output operand. Likewise for promoted variables. */
797 generating_concat_p = 0;
799 real_output_rtx[i] = NULL_RTX;
800 if ((TREE_CODE (val) == INDIRECT_REF
803 && (allows_mem || REG_P (DECL_RTL (val)))
804 && ! (REG_P (DECL_RTL (val))
805 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
809 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
811 op = validize_mem (op);
813 if (! allows_reg && !MEM_P (op))
814 error ("output number %d not directly addressable", i);
815 if ((! allows_mem && MEM_P (op))
816 || GET_CODE (op) == CONCAT)
818 real_output_rtx[i] = op;
819 op = gen_reg_rtx (GET_MODE (op));
821 emit_move_insn (op, real_output_rtx[i]);
826 op = assign_temp (type, 0, 0, 1);
827 op = validize_mem (op);
828 TREE_VALUE (tail) = make_tree (type, op);
832 generating_concat_p = old_generating_concat_p;
836 inout_mode[ninout] = TYPE_MODE (type);
837 inout_opnum[ninout++] = i;
840 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
841 clobber_conflict_found = 1;
844 /* Make vectors for the expression-rtx, constraint strings,
845 and named operands. */
847 argvec = rtvec_alloc (ninputs);
848 constraintvec = rtvec_alloc (ninputs);
850 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
851 : GET_MODE (output_rtx[0])),
852 TREE_STRING_POINTER (string),
853 empty_string, 0, argvec, constraintvec,
856 MEM_VOLATILE_P (body) = vol;
858 /* Eval the inputs and put them into ARGVEC.
859 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
861 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
863 bool allows_reg, allows_mem;
864 const char *constraint;
869 constraint = constraints[i + noutputs];
870 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
871 constraints, &allows_mem, &allows_reg);
874 generating_concat_p = 0;
876 val = TREE_VALUE (tail);
877 type = TREE_TYPE (val);
878 op = expand_expr (val, NULL_RTX, VOIDmode,
879 (allows_mem && !allows_reg
880 ? EXPAND_MEMORY : EXPAND_NORMAL));
882 /* Never pass a CONCAT to an ASM. */
883 if (GET_CODE (op) == CONCAT)
884 op = force_reg (GET_MODE (op), op);
886 op = validize_mem (op);
888 if (asm_operand_ok (op, constraint) <= 0)
891 op = force_reg (TYPE_MODE (type), op);
892 else if (!allows_mem)
893 warning ("asm operand %d probably doesn't match constraints",
897 /* We won't recognize either volatile memory or memory
898 with a queued address as available a memory_operand
899 at this point. Ignore it: clearly this *is* a memory. */
903 warning ("use of memory input without lvalue in "
904 "asm operand %d is deprecated", i + noutputs);
908 rtx mem = force_const_mem (TYPE_MODE (type), op);
910 op = validize_mem (mem);
912 op = force_reg (TYPE_MODE (type), op);
915 || GET_CODE (op) == SUBREG
916 || GET_CODE (op) == CONCAT)
918 tree qual_type = build_qualified_type (type,
921 rtx memloc = assign_temp (qual_type, 1, 1, 1);
922 memloc = validize_mem (memloc);
923 emit_move_insn (memloc, op);
929 generating_concat_p = old_generating_concat_p;
930 ASM_OPERANDS_INPUT (body, i) = op;
932 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
933 = gen_rtx_ASM_INPUT (TYPE_MODE (type), constraints[i + noutputs]);
935 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
936 clobber_conflict_found = 1;
939 /* Protect all the operands from the queue now that they have all been
942 generating_concat_p = 0;
944 /* For in-out operands, copy output rtx to input rtx. */
945 for (i = 0; i < ninout; i++)
947 int j = inout_opnum[i];
950 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
953 sprintf (buffer, "%d", j);
954 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
955 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
958 generating_concat_p = old_generating_concat_p;
960 /* Now, for each output, construct an rtx
961 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
962 ARGVEC CONSTRAINTS OPNAMES))
963 If there is more than one, put them inside a PARALLEL. */
965 if (noutputs == 1 && nclobbers == 0)
967 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
968 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
971 else if (noutputs == 0 && nclobbers == 0)
973 /* No output operands: put in a raw ASM_OPERANDS rtx. */
985 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
987 /* For each output operand, store a SET. */
988 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
991 = gen_rtx_SET (VOIDmode,
994 (GET_MODE (output_rtx[i]),
995 TREE_STRING_POINTER (string),
996 constraints[i], i, argvec, constraintvec,
999 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1002 /* If there are no outputs (but there are some clobbers)
1003 store the bare ASM_OPERANDS into the PARALLEL. */
1006 XVECEXP (body, 0, i++) = obody;
1008 /* Store (clobber REG) for each clobbered register specified. */
1010 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1012 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1013 int j = decode_reg_name (regname);
1018 if (j == -3) /* `cc', which is not a register */
1021 if (j == -4) /* `memory', don't cache memory across asm */
1023 XVECEXP (body, 0, i++)
1024 = gen_rtx_CLOBBER (VOIDmode,
1027 gen_rtx_SCRATCH (VOIDmode)));
1031 /* Ignore unknown register, error already signaled. */
1035 /* Use QImode since that's guaranteed to clobber just one reg. */
1036 clobbered_reg = gen_rtx_REG (QImode, j);
1038 /* Do sanity check for overlap between clobbers and respectively
1039 input and outputs that hasn't been handled. Such overlap
1040 should have been detected and reported above. */
1041 if (!clobber_conflict_found)
1045 /* We test the old body (obody) contents to avoid tripping
1046 over the under-construction body. */
1047 for (opno = 0; opno < noutputs; opno++)
1048 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1049 internal_error ("asm clobber conflict with output operand");
1051 for (opno = 0; opno < ninputs - ninout; opno++)
1052 if (reg_overlap_mentioned_p (clobbered_reg,
1053 ASM_OPERANDS_INPUT (obody, opno)))
1054 internal_error ("asm clobber conflict with input operand");
1057 XVECEXP (body, 0, i++)
1058 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1064 /* For any outputs that needed reloading into registers, spill them
1065 back to where they belong. */
1066 for (i = 0; i < noutputs; ++i)
1067 if (real_output_rtx[i])
1068 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1074 expand_asm_expr (tree exp)
1080 if (ASM_INPUT_P (exp))
1082 expand_asm (ASM_STRING (exp), ASM_VOLATILE_P (exp));
1086 outputs = ASM_OUTPUTS (exp);
1087 noutputs = list_length (outputs);
1088 /* o[I] is the place that output number I should be written. */
1089 o = (tree *) alloca (noutputs * sizeof (tree));
1091 /* Record the contents of OUTPUTS before it is modified. */
1092 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1093 o[i] = TREE_VALUE (tail);
1095 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
1096 OUTPUTS some trees for where the values were actually stored. */
1097 expand_asm_operands (ASM_STRING (exp), outputs, ASM_INPUTS (exp),
1098 ASM_CLOBBERS (exp), ASM_VOLATILE_P (exp),
1101 /* Copy all the intermediate outputs into the specified outputs. */
1102 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1104 if (o[i] != TREE_VALUE (tail))
1106 expand_assignment (o[i], TREE_VALUE (tail), 0);
1109 /* Restore the original value so that it's correct the next
1110 time we expand this function. */
1111 TREE_VALUE (tail) = o[i];
1116 /* A subroutine of expand_asm_operands. Check that all operands have
1117 the same number of alternatives. Return true if so. */
1120 check_operand_nalternatives (tree outputs, tree inputs)
1122 if (outputs || inputs)
1124 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1126 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1129 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1131 error ("too many alternatives in `asm'");
1138 const char *constraint
1139 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1141 if (n_occurrences (',', constraint) != nalternatives)
1143 error ("operand constraints for `asm' differ in number of alternatives");
1147 if (TREE_CHAIN (tmp))
1148 tmp = TREE_CHAIN (tmp);
1150 tmp = next, next = 0;
1157 /* A subroutine of expand_asm_operands. Check that all operand names
1158 are unique. Return true if so. We rely on the fact that these names
1159 are identifiers, and so have been canonicalized by get_identifier,
1160 so all we need are pointer comparisons. */
1163 check_unique_operand_names (tree outputs, tree inputs)
1167 for (i = outputs; i ; i = TREE_CHAIN (i))
1169 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1173 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1174 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1178 for (i = inputs; i ; i = TREE_CHAIN (i))
1180 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1184 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1185 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1187 for (j = outputs; j ; j = TREE_CHAIN (j))
1188 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1195 error ("duplicate asm operand name '%s'",
1196 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1200 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1201 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1202 STRING and in the constraints to those numbers. */
1205 resolve_asm_operand_names (tree string, tree outputs, tree inputs)
1212 check_unique_operand_names (outputs, inputs);
1214 /* Substitute [<name>] in input constraint strings. There should be no
1215 named operands in output constraints. */
1216 for (t = inputs; t ; t = TREE_CHAIN (t))
1218 c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1219 if (strchr (c, '[') != NULL)
1221 p = buffer = xstrdup (c);
1222 while ((p = strchr (p, '[')) != NULL)
1223 p = resolve_operand_name_1 (p, outputs, inputs);
1224 TREE_VALUE (TREE_PURPOSE (t))
1225 = build_string (strlen (buffer), buffer);
1230 /* Now check for any needed substitutions in the template. */
1231 c = TREE_STRING_POINTER (string);
1232 while ((c = strchr (c, '%')) != NULL)
1236 else if (ISALPHA (c[1]) && c[2] == '[')
1247 /* OK, we need to make a copy so we can perform the substitutions.
1248 Assume that we will not need extra space--we get to remove '['
1249 and ']', which means we cannot have a problem until we have more
1250 than 999 operands. */
1251 buffer = xstrdup (TREE_STRING_POINTER (string));
1252 p = buffer + (c - TREE_STRING_POINTER (string));
1254 while ((p = strchr (p, '%')) != NULL)
1258 else if (ISALPHA (p[1]) && p[2] == '[')
1266 p = resolve_operand_name_1 (p, outputs, inputs);
1269 string = build_string (strlen (buffer), buffer);
1276 /* A subroutine of resolve_operand_names. P points to the '[' for a
1277 potential named operand of the form [<name>]. In place, replace
1278 the name and brackets with a number. Return a pointer to the
1279 balance of the string after substitution. */
1282 resolve_operand_name_1 (char *p, tree outputs, tree inputs)
1289 /* Collect the operand name. */
1290 q = strchr (p, ']');
1293 error ("missing close brace for named operand");
1294 return strchr (p, '\0');
1298 /* Resolve the name to a number. */
1299 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
1301 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1304 const char *c = TREE_STRING_POINTER (name);
1305 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
1309 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
1311 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1314 const char *c = TREE_STRING_POINTER (name);
1315 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
1321 error ("undefined named operand '%s'", p + 1);
1325 /* Replace the name with the number. Unfortunately, not all libraries
1326 get the return value of sprintf correct, so search for the end of the
1327 generated string by hand. */
1328 sprintf (p, "%d", op);
1329 p = strchr (p, '\0');
1331 /* Verify the no extra buffer space assumption. */
1332 gcc_assert (p <= q);
1334 /* Shift the rest of the buffer down to fill the gap. */
1335 memmove (p, q + 1, strlen (q + 1) + 1);
1340 /* Generate RTL to evaluate the expression EXP. */
1343 expand_expr_stmt (tree exp)
1348 value = expand_expr (exp, const0_rtx, VOIDmode, 0);
1349 type = TREE_TYPE (exp);
1351 /* If all we do is reference a volatile value in memory,
1352 copy it to a register to be sure it is actually touched. */
1353 if (value && MEM_P (value) && TREE_THIS_VOLATILE (exp))
1355 if (TYPE_MODE (type) == VOIDmode)
1357 else if (TYPE_MODE (type) != BLKmode)
1358 value = copy_to_reg (value);
1361 rtx lab = gen_label_rtx ();
1363 /* Compare the value with itself to reference it. */
1364 emit_cmp_and_jump_insns (value, value, EQ,
1365 expand_expr (TYPE_SIZE (type),
1366 NULL_RTX, VOIDmode, 0),
1372 /* Free any temporaries used to evaluate this expression. */
1376 /* Warn if EXP contains any computations whose results are not used.
1377 Return 1 if a warning is printed; 0 otherwise. LOCUS is the
1378 (potential) location of the expression. */
1381 warn_if_unused_value (tree exp, location_t locus)
1384 if (TREE_USED (exp))
1387 /* Don't warn about void constructs. This includes casting to void,
1388 void function calls, and statement expressions with a final cast
1390 if (VOID_TYPE_P (TREE_TYPE (exp)))
1393 if (EXPR_HAS_LOCATION (exp))
1394 locus = EXPR_LOCATION (exp);
1396 switch (TREE_CODE (exp))
1398 case PREINCREMENT_EXPR:
1399 case POSTINCREMENT_EXPR:
1400 case PREDECREMENT_EXPR:
1401 case POSTDECREMENT_EXPR:
1406 case TRY_CATCH_EXPR:
1407 case WITH_CLEANUP_EXPR:
1412 /* For a binding, warn if no side effect within it. */
1413 exp = BIND_EXPR_BODY (exp);
1417 exp = TREE_OPERAND (exp, 0);
1420 case TRUTH_ORIF_EXPR:
1421 case TRUTH_ANDIF_EXPR:
1422 /* In && or ||, warn if 2nd operand has no side effect. */
1423 exp = TREE_OPERAND (exp, 1);
1427 if (TREE_NO_WARNING (exp))
1429 if (warn_if_unused_value (TREE_OPERAND (exp, 0), locus))
1431 /* Let people do `(foo (), 0)' without a warning. */
1432 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1434 exp = TREE_OPERAND (exp, 1);
1439 case NON_LVALUE_EXPR:
1440 /* Don't warn about conversions not explicit in the user's program. */
1441 if (TREE_NO_WARNING (exp))
1443 /* Assignment to a cast usually results in a cast of a modify.
1444 Don't complain about that. There can be an arbitrary number of
1445 casts before the modify, so we must loop until we find the first
1446 non-cast expression and then test to see if that is a modify. */
1448 tree tem = TREE_OPERAND (exp, 0);
1450 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1451 tem = TREE_OPERAND (tem, 0);
1453 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
1454 || TREE_CODE (tem) == CALL_EXPR)
1460 /* Don't warn about automatic dereferencing of references, since
1461 the user cannot control it. */
1462 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1464 exp = TREE_OPERAND (exp, 0);
1470 /* Referencing a volatile value is a side effect, so don't warn. */
1472 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1473 && TREE_THIS_VOLATILE (exp))
1476 /* If this is an expression which has no operands, there is no value
1477 to be unused. There are no such language-independent codes,
1478 but front ends may define such. */
1479 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
1480 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
1484 /* If this is an expression with side effects, don't warn. */
1485 if (TREE_SIDE_EFFECTS (exp))
1488 warning ("%Hvalue computed is not used", &locus);
1494 /* Generate RTL to return from the current function, with no value.
1495 (That is, we do not do anything about returning any value.) */
1498 expand_null_return (void)
1500 /* If this function was declared to return a value, but we
1501 didn't, clobber the return registers so that they are not
1502 propagated live to the rest of the function. */
1503 clobber_return_register ();
1505 expand_null_return_1 ();
1508 /* Generate RTL to return directly from the current function.
1509 (That is, we bypass any return value.) */
1512 expand_naked_return (void)
1516 clear_pending_stack_adjust ();
1517 do_pending_stack_adjust ();
1519 end_label = naked_return_label;
1521 end_label = naked_return_label = gen_label_rtx ();
1523 emit_jump (end_label);
1526 /* If the current function returns values in the most significant part
1527 of a register, shift return value VAL appropriately. The mode of
1528 the function's return type is known not to be BLKmode. */
1531 shift_return_value (rtx val)
1535 type = TREE_TYPE (DECL_RESULT (current_function_decl));
1536 if (targetm.calls.return_in_msb (type))
1539 HOST_WIDE_INT shift;
1541 target = DECL_RTL (DECL_RESULT (current_function_decl));
1542 shift = (GET_MODE_BITSIZE (GET_MODE (target))
1543 - BITS_PER_UNIT * int_size_in_bytes (type));
1545 val = expand_shift (LSHIFT_EXPR, GET_MODE (target),
1546 gen_lowpart (GET_MODE (target), val),
1547 build_int_cst (NULL_TREE, shift), target, 1);
1553 /* Generate RTL to return from the current function, with value VAL. */
1556 expand_value_return (rtx val)
1558 /* Copy the value to the return location
1559 unless it's already there. */
1561 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
1562 if (return_reg != val)
1564 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
1565 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
1567 int unsignedp = TYPE_UNSIGNED (type);
1568 enum machine_mode old_mode
1569 = DECL_MODE (DECL_RESULT (current_function_decl));
1570 enum machine_mode mode
1571 = promote_mode (type, old_mode, &unsignedp, 1);
1573 if (mode != old_mode)
1574 val = convert_modes (mode, old_mode, val, unsignedp);
1576 if (GET_CODE (return_reg) == PARALLEL)
1577 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
1579 emit_move_insn (return_reg, val);
1582 expand_null_return_1 ();
1585 /* Output a return with no value. */
1588 expand_null_return_1 (void)
1592 clear_pending_stack_adjust ();
1593 do_pending_stack_adjust ();
1595 end_label = return_label;
1597 end_label = return_label = gen_label_rtx ();
1598 emit_jump (end_label);
1601 /* Generate RTL to evaluate the expression RETVAL and return it
1602 from the current function. */
1605 expand_return (tree retval)
1611 /* If function wants no value, give it none. */
1612 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
1614 expand_expr (retval, NULL_RTX, VOIDmode, 0);
1615 expand_null_return ();
1619 if (retval == error_mark_node)
1621 /* Treat this like a return of no value from a function that
1623 expand_null_return ();
1626 else if ((TREE_CODE (retval) == MODIFY_EXPR
1627 || TREE_CODE (retval) == INIT_EXPR)
1628 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
1629 retval_rhs = TREE_OPERAND (retval, 1);
1631 retval_rhs = retval;
1633 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
1635 /* If we are returning the RESULT_DECL, then the value has already
1636 been stored into it, so we don't have to do anything special. */
1637 if (TREE_CODE (retval_rhs) == RESULT_DECL)
1638 expand_value_return (result_rtl);
1640 /* If the result is an aggregate that is being returned in one (or more)
1641 registers, load the registers here. The compiler currently can't handle
1642 copying a BLKmode value into registers. We could put this code in a
1643 more general area (for use by everyone instead of just function
1644 call/return), but until this feature is generally usable it is kept here
1645 (and in expand_call). */
1647 else if (retval_rhs != 0
1648 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
1649 && REG_P (result_rtl))
1652 unsigned HOST_WIDE_INT bitpos, xbitpos;
1653 unsigned HOST_WIDE_INT padding_correction = 0;
1654 unsigned HOST_WIDE_INT bytes
1655 = int_size_in_bytes (TREE_TYPE (retval_rhs));
1656 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1657 unsigned int bitsize
1658 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
1659 rtx *result_pseudos = alloca (sizeof (rtx) * n_regs);
1660 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
1661 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
1662 enum machine_mode tmpmode, result_reg_mode;
1666 expand_null_return ();
1670 /* If the structure doesn't take up a whole number of words, see
1671 whether the register value should be padded on the left or on
1672 the right. Set PADDING_CORRECTION to the number of padding
1673 bits needed on the left side.
1675 In most ABIs, the structure will be returned at the least end of
1676 the register, which translates to right padding on little-endian
1677 targets and left padding on big-endian targets. The opposite
1678 holds if the structure is returned at the most significant
1679 end of the register. */
1680 if (bytes % UNITS_PER_WORD != 0
1681 && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs))
1683 : BYTES_BIG_ENDIAN))
1684 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
1687 /* Copy the structure BITSIZE bits at a time. */
1688 for (bitpos = 0, xbitpos = padding_correction;
1689 bitpos < bytes * BITS_PER_UNIT;
1690 bitpos += bitsize, xbitpos += bitsize)
1692 /* We need a new destination pseudo each time xbitpos is
1693 on a word boundary and when xbitpos == padding_correction
1694 (the first time through). */
1695 if (xbitpos % BITS_PER_WORD == 0
1696 || xbitpos == padding_correction)
1698 /* Generate an appropriate register. */
1699 dst = gen_reg_rtx (word_mode);
1700 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
1702 /* Clear the destination before we move anything into it. */
1703 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
1706 /* We need a new source operand each time bitpos is on a word
1708 if (bitpos % BITS_PER_WORD == 0)
1709 src = operand_subword_force (result_val,
1710 bitpos / BITS_PER_WORD,
1713 /* Use bitpos for the source extraction (left justified) and
1714 xbitpos for the destination store (right justified). */
1715 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
1716 extract_bit_field (src, bitsize,
1717 bitpos % BITS_PER_WORD, 1,
1718 NULL_RTX, word_mode, word_mode));
1721 tmpmode = GET_MODE (result_rtl);
1722 if (tmpmode == BLKmode)
1724 /* Find the smallest integer mode large enough to hold the
1725 entire structure and use that mode instead of BLKmode
1726 on the USE insn for the return register. */
1727 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1728 tmpmode != VOIDmode;
1729 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1730 /* Have we found a large enough mode? */
1731 if (GET_MODE_SIZE (tmpmode) >= bytes)
1734 /* A suitable mode should have been found. */
1735 gcc_assert (tmpmode != VOIDmode);
1737 PUT_MODE (result_rtl, tmpmode);
1740 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
1741 result_reg_mode = word_mode;
1743 result_reg_mode = tmpmode;
1744 result_reg = gen_reg_rtx (result_reg_mode);
1746 for (i = 0; i < n_regs; i++)
1747 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
1750 if (tmpmode != result_reg_mode)
1751 result_reg = gen_lowpart (tmpmode, result_reg);
1753 expand_value_return (result_reg);
1755 else if (retval_rhs != 0
1756 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
1757 && (REG_P (result_rtl)
1758 || (GET_CODE (result_rtl) == PARALLEL)))
1760 /* Calculate the return value into a temporary (usually a pseudo
1762 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
1763 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
1765 val = assign_temp (nt, 0, 0, 1);
1766 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
1767 val = force_not_mem (val);
1768 /* Return the calculated value. */
1769 expand_value_return (shift_return_value (val));
1773 /* No hard reg used; calculate value into hard return reg. */
1774 expand_expr (retval, const0_rtx, VOIDmode, 0);
1775 expand_value_return (result_rtl);
1779 /* Given a pointer to a BLOCK node return nonzero if (and only if) the node
1780 in question represents the outermost pair of curly braces (i.e. the "body
1781 block") of a function or method.
1783 For any BLOCK node representing a "body block" of a function or method, the
1784 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
1785 represents the outermost (function) scope for the function or method (i.e.
1786 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
1787 *that* node in turn will point to the relevant FUNCTION_DECL node. */
1790 is_body_block (tree stmt)
1792 if (lang_hooks.no_body_blocks)
1795 if (TREE_CODE (stmt) == BLOCK)
1797 tree parent = BLOCK_SUPERCONTEXT (stmt);
1799 if (parent && TREE_CODE (parent) == BLOCK)
1801 tree grandparent = BLOCK_SUPERCONTEXT (parent);
1803 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
1811 /* Emit code to restore vital registers at the beginning of a nonlocal goto
1814 expand_nl_goto_receiver (void)
1816 /* Clobber the FP when we get here, so we have to make sure it's
1817 marked as used by this function. */
1818 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
1820 /* Mark the static chain as clobbered here so life information
1821 doesn't get messed up for it. */
1822 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
1824 #ifdef HAVE_nonlocal_goto
1825 if (! HAVE_nonlocal_goto)
1827 /* First adjust our frame pointer to its actual value. It was
1828 previously set to the start of the virtual area corresponding to
1829 the stacked variables when we branched here and now needs to be
1830 adjusted to the actual hardware fp value.
1832 Assignments are to virtual registers are converted by
1833 instantiate_virtual_regs into the corresponding assignment
1834 to the underlying register (fp in this case) that makes
1835 the original assignment true.
1836 So the following insn will actually be
1837 decrementing fp by STARTING_FRAME_OFFSET. */
1838 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
1840 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1841 if (fixed_regs[ARG_POINTER_REGNUM])
1843 #ifdef ELIMINABLE_REGS
1844 /* If the argument pointer can be eliminated in favor of the
1845 frame pointer, we don't need to restore it. We assume here
1846 that if such an elimination is present, it can always be used.
1847 This is the case on all known machines; if we don't make this
1848 assumption, we do unnecessary saving on many machines. */
1849 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1852 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1853 if (elim_regs[i].from == ARG_POINTER_REGNUM
1854 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1857 if (i == ARRAY_SIZE (elim_regs))
1860 /* Now restore our arg pointer from the address at which it
1861 was saved in our stack frame. */
1862 emit_move_insn (virtual_incoming_args_rtx,
1863 copy_to_reg (get_arg_pointer_save_area (cfun)));
1868 #ifdef HAVE_nonlocal_goto_receiver
1869 if (HAVE_nonlocal_goto_receiver)
1870 emit_insn (gen_nonlocal_goto_receiver ());
1873 /* @@@ This is a kludge. Not all machine descriptions define a blockage
1874 insn, but we must not allow the code we just generated to be reordered
1875 by scheduling. Specifically, the update of the frame pointer must
1876 happen immediately, not later. So emit an ASM_INPUT to act as blockage
1878 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
1881 /* Generate RTL for the automatic variable declaration DECL.
1882 (Other kinds of declarations are simply ignored if seen here.) */
1885 expand_decl (tree decl)
1889 type = TREE_TYPE (decl);
1891 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
1892 type in case this node is used in a reference. */
1893 if (TREE_CODE (decl) == CONST_DECL)
1895 DECL_MODE (decl) = TYPE_MODE (type);
1896 DECL_ALIGN (decl) = TYPE_ALIGN (type);
1897 DECL_SIZE (decl) = TYPE_SIZE (type);
1898 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
1902 /* Otherwise, only automatic variables need any expansion done. Static and
1903 external variables, and external functions, will be handled by
1904 `assemble_variable' (called from finish_decl). TYPE_DECL requires
1905 nothing. PARM_DECLs are handled in `assign_parms'. */
1906 if (TREE_CODE (decl) != VAR_DECL)
1909 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
1912 /* Create the RTL representation for the variable. */
1914 if (type == error_mark_node)
1915 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
1917 else if (DECL_SIZE (decl) == 0)
1918 /* Variable with incomplete type. */
1921 if (DECL_INITIAL (decl) == 0)
1922 /* Error message was already done; now avoid a crash. */
1923 x = gen_rtx_MEM (BLKmode, const0_rtx);
1925 /* An initializer is going to decide the size of this array.
1926 Until we know the size, represent its address with a reg. */
1927 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
1929 set_mem_attributes (x, decl, 1);
1930 SET_DECL_RTL (decl, x);
1932 else if (use_register_for_decl (decl))
1934 /* Automatic variable that can go in a register. */
1935 int unsignedp = TYPE_UNSIGNED (type);
1936 enum machine_mode reg_mode
1937 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
1939 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
1941 /* Note if the object is a user variable. */
1942 if (!DECL_ARTIFICIAL (decl))
1944 mark_user_reg (DECL_RTL (decl));
1946 /* Trust user variables which have a pointer type to really
1947 be pointers. Do not trust compiler generated temporaries
1948 as our type system is totally busted as it relates to
1949 pointer arithmetic which translates into lots of compiler
1950 generated objects with pointer types, but which are not really
1952 if (POINTER_TYPE_P (type))
1953 mark_reg_pointer (DECL_RTL (decl),
1954 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
1958 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
1959 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
1960 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
1961 STACK_CHECK_MAX_VAR_SIZE)))
1963 /* Variable of fixed size that goes on the stack. */
1968 /* If we previously made RTL for this decl, it must be an array
1969 whose size was determined by the initializer.
1970 The old address was a register; set that register now
1971 to the proper address. */
1972 if (DECL_RTL_SET_P (decl))
1974 gcc_assert (MEM_P (DECL_RTL (decl)));
1975 gcc_assert (REG_P (XEXP (DECL_RTL (decl), 0)));
1976 oldaddr = XEXP (DECL_RTL (decl), 0);
1979 /* Set alignment we actually gave this decl. */
1980 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
1981 : GET_MODE_BITSIZE (DECL_MODE (decl)));
1982 DECL_USER_ALIGN (decl) = 0;
1984 x = assign_temp (decl, 1, 1, 1);
1985 set_mem_attributes (x, decl, 1);
1986 SET_DECL_RTL (decl, x);
1990 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
1991 if (addr != oldaddr)
1992 emit_move_insn (oldaddr, addr);
1996 /* Dynamic-size object: must push space on the stack. */
1998 rtx address, size, x;
2000 /* Record the stack pointer on entry to block, if have
2001 not already done so. */
2002 do_pending_stack_adjust ();
2004 /* Compute the variable's size, in bytes. This will expand any
2005 needed SAVE_EXPRs for the first time. */
2006 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
2009 /* Allocate space on the stack for the variable. Note that
2010 DECL_ALIGN says how the variable is to be aligned and we
2011 cannot use it to conclude anything about the alignment of
2013 address = allocate_dynamic_stack_space (size, NULL_RTX,
2014 TYPE_ALIGN (TREE_TYPE (decl)));
2016 /* Reference the variable indirect through that rtx. */
2017 x = gen_rtx_MEM (DECL_MODE (decl), address);
2018 set_mem_attributes (x, decl, 1);
2019 SET_DECL_RTL (decl, x);
2022 /* Indicate the alignment we actually gave this variable. */
2023 #ifdef STACK_BOUNDARY
2024 DECL_ALIGN (decl) = STACK_BOUNDARY;
2026 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
2028 DECL_USER_ALIGN (decl) = 0;
2032 /* Emit code to save the current value of stack. */
2034 expand_stack_save (void)
2038 do_pending_stack_adjust ();
2039 emit_stack_save (SAVE_BLOCK, &ret, NULL_RTX);
2043 /* Emit code to restore the current value of stack. */
2045 expand_stack_restore (tree var)
2047 rtx sa = DECL_RTL (var);
2049 emit_stack_restore (SAVE_BLOCK, sa, NULL_RTX);
2052 /* Emit code to perform the initialization of a declaration DECL. */
2055 expand_decl_init (tree decl)
2057 int was_used = TREE_USED (decl);
2059 /* If this is a CONST_DECL, we don't have to generate any code. Likewise
2060 for static decls. */
2061 if (TREE_CODE (decl) == CONST_DECL
2062 || TREE_STATIC (decl))
2065 /* Compute and store the initial value now. */
2069 if (DECL_INITIAL (decl) == error_mark_node)
2071 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
2073 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
2074 || code == POINTER_TYPE || code == REFERENCE_TYPE)
2075 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
2078 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
2080 emit_line_note (DECL_SOURCE_LOCATION (decl));
2081 expand_assignment (decl, DECL_INITIAL (decl), 0);
2084 /* Don't let the initialization count as "using" the variable. */
2085 TREE_USED (decl) = was_used;
2087 /* Free any temporaries we made while initializing the decl. */
2088 preserve_temp_slots (NULL_RTX);
2094 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
2095 DECL_ELTS is the list of elements that belong to DECL's type.
2096 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
2099 expand_anon_union_decl (tree decl, tree cleanup ATTRIBUTE_UNUSED,
2105 /* If any of the elements are addressable, so is the entire union. */
2106 for (t = decl_elts; t; t = TREE_CHAIN (t))
2107 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
2109 TREE_ADDRESSABLE (decl) = 1;
2114 x = DECL_RTL (decl);
2116 /* Go through the elements, assigning RTL to each. */
2117 for (t = decl_elts; t; t = TREE_CHAIN (t))
2119 tree decl_elt = TREE_VALUE (t);
2120 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
2123 /* If any of the elements are addressable, so is the entire
2125 if (TREE_USED (decl_elt))
2126 TREE_USED (decl) = 1;
2128 /* Propagate the union's alignment to the elements. */
2129 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
2130 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
2132 /* If the element has BLKmode and the union doesn't, the union is
2133 aligned such that the element doesn't need to have BLKmode, so
2134 change the element's mode to the appropriate one for its size. */
2135 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
2136 DECL_MODE (decl_elt) = mode
2137 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
2139 if (mode == GET_MODE (x))
2142 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
2143 instead create a new MEM rtx with the proper mode. */
2144 decl_rtl = adjust_address_nv (x, mode, 0);
2147 gcc_assert (REG_P (x));
2148 decl_rtl = gen_lowpart_SUBREG (mode, x);
2150 SET_DECL_RTL (decl_elt, decl_rtl);
2154 /* Do the insertion of a case label into case_list. The labels are
2155 fed to us in descending order from the sorted vector of case labels used
2156 in the tree part of the middle end. So the list we construct is
2157 sorted in ascending order. */
2160 add_case_node (struct case_node *head, tree low, tree high, tree label)
2162 struct case_node *r;
2164 /* If there's no HIGH value, then this is not a case range; it's
2165 just a simple case label. But that's just a degenerate case
2167 If the bounds are equal, turn this into the one-value case. */
2168 if (!high || tree_int_cst_equal (low, high))
2171 /* Add this label to the chain. */
2172 r = ggc_alloc (sizeof (struct case_node));
2175 r->code_label = label;
2176 r->parent = r->left = NULL;
2181 /* Maximum number of case bit tests. */
2182 #define MAX_CASE_BIT_TESTS 3
2184 /* By default, enable case bit tests on targets with ashlsi3. */
2185 #ifndef CASE_USE_BIT_TESTS
2186 #define CASE_USE_BIT_TESTS (ashl_optab->handlers[word_mode].insn_code \
2187 != CODE_FOR_nothing)
2191 /* A case_bit_test represents a set of case nodes that may be
2192 selected from using a bit-wise comparison. HI and LO hold
2193 the integer to be tested against, LABEL contains the label
2194 to jump to upon success and BITS counts the number of case
2195 nodes handled by this test, typically the number of bits
2198 struct case_bit_test
2206 /* Determine whether "1 << x" is relatively cheap in word_mode. */
2209 bool lshift_cheap_p (void)
2211 static bool init = false;
2212 static bool cheap = true;
2216 rtx reg = gen_rtx_REG (word_mode, 10000);
2217 int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET);
2218 cheap = cost < COSTS_N_INSNS (3);
2225 /* Comparison function for qsort to order bit tests by decreasing
2226 number of case nodes, i.e. the node with the most cases gets
2230 case_bit_test_cmp (const void *p1, const void *p2)
2232 const struct case_bit_test *d1 = p1;
2233 const struct case_bit_test *d2 = p2;
2235 return d2->bits - d1->bits;
2238 /* Expand a switch statement by a short sequence of bit-wise
2239 comparisons. "switch(x)" is effectively converted into
2240 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
2243 INDEX_EXPR is the value being switched on, which is of
2244 type INDEX_TYPE. MINVAL is the lowest case value of in
2245 the case nodes, of INDEX_TYPE type, and RANGE is highest
2246 value minus MINVAL, also of type INDEX_TYPE. NODES is
2247 the set of case nodes, and DEFAULT_LABEL is the label to
2248 branch to should none of the cases match.
2250 There *MUST* be MAX_CASE_BIT_TESTS or less unique case
2254 emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
2255 tree range, case_node_ptr nodes, rtx default_label)
2257 struct case_bit_test test[MAX_CASE_BIT_TESTS];
2258 enum machine_mode mode;
2259 rtx expr, index, label;
2260 unsigned int i,j,lo,hi;
2261 struct case_node *n;
2265 for (n = nodes; n; n = n->right)
2267 label = label_rtx (n->code_label);
2268 for (i = 0; i < count; i++)
2269 if (label == test[i].label)
2274 gcc_assert (count < MAX_CASE_BIT_TESTS);
2277 test[i].label = label;
2284 lo = tree_low_cst (fold (build2 (MINUS_EXPR, index_type,
2285 n->low, minval)), 1);
2286 hi = tree_low_cst (fold (build2 (MINUS_EXPR, index_type,
2287 n->high, minval)), 1);
2288 for (j = lo; j <= hi; j++)
2289 if (j >= HOST_BITS_PER_WIDE_INT)
2290 test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
2292 test[i].lo |= (HOST_WIDE_INT) 1 << j;
2295 qsort (test, count, sizeof(*test), case_bit_test_cmp);
2297 index_expr = fold (build2 (MINUS_EXPR, index_type,
2298 convert (index_type, index_expr),
2299 convert (index_type, minval)));
2300 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
2301 do_pending_stack_adjust ();
2303 mode = TYPE_MODE (index_type);
2304 expr = expand_expr (range, NULL_RTX, VOIDmode, 0);
2305 emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
2308 index = convert_to_mode (word_mode, index, 0);
2309 index = expand_binop (word_mode, ashl_optab, const1_rtx,
2310 index, NULL_RTX, 1, OPTAB_WIDEN);
2312 for (i = 0; i < count; i++)
2314 expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
2315 expr = expand_binop (word_mode, and_optab, index, expr,
2316 NULL_RTX, 1, OPTAB_WIDEN);
2317 emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
2318 word_mode, 1, test[i].label);
2321 emit_jump (default_label);
2325 #define HAVE_casesi 0
2328 #ifndef HAVE_tablejump
2329 #define HAVE_tablejump 0
2332 /* Terminate a case (Pascal) or switch (C) statement
2333 in which ORIG_INDEX is the expression to be tested.
2334 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
2335 type as given in the source before any compiler conversions.
2336 Generate the code to test it and jump to the right place. */
2339 expand_case (tree exp)
2341 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
2342 rtx default_label = 0;
2343 struct case_node *n, *m;
2344 unsigned int count, uniq;
2350 rtx before_case, end, lab;
2352 tree vec = SWITCH_LABELS (exp);
2353 tree orig_type = TREE_TYPE (exp);
2354 tree index_expr = SWITCH_COND (exp);
2355 tree index_type = TREE_TYPE (index_expr);
2356 int unsignedp = TYPE_UNSIGNED (index_type);
2358 /* The insn after which the case dispatch should finally
2359 be emitted. Zero for a dummy. */
2362 /* A list of case labels; it is first built as a list and it may then
2363 be rearranged into a nearly balanced binary tree. */
2364 struct case_node *case_list = 0;
2366 /* Label to jump to if no case matches. */
2367 tree default_label_decl = 0;
2369 /* The switch body is lowered in gimplify.c, we should never have
2370 switches with a non-NULL SWITCH_BODY here. */
2371 gcc_assert (!SWITCH_BODY (exp));
2372 gcc_assert (SWITCH_LABELS (exp));
2374 for (i = TREE_VEC_LENGTH (vec); --i >= 0; )
2376 tree elt = TREE_VEC_ELT (vec, i);
2378 /* Handle default labels specially. */
2379 if (!CASE_HIGH (elt) && !CASE_LOW (elt))
2381 gcc_assert (!default_label_decl);
2382 default_label_decl = CASE_LABEL (elt);
2385 case_list = add_case_node (case_list, CASE_LOW (elt), CASE_HIGH (elt),
2389 do_pending_stack_adjust ();
2391 /* Make sure start points to something that won't need any transformation
2392 before the end of this function. */
2393 if (!NOTE_P (get_last_insn ()))
2394 emit_note (NOTE_INSN_DELETED);
2396 start = get_last_insn ();
2398 /* An ERROR_MARK occurs for various reasons including invalid data type. */
2399 if (index_type != error_mark_node)
2403 /* If we don't have a default-label, create one here,
2404 after the body of the switch. */
2405 if (default_label_decl == 0)
2408 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
2409 expand_label (default_label_decl);
2411 default_label = label_rtx (default_label_decl);
2413 before_case = get_last_insn ();
2415 /* Get upper and lower bounds of case values.
2416 Also convert all the case values to the index expr's data type. */
2420 for (n = case_list; n; n = n->right)
2422 /* Check low and high label values are integers. */
2423 gcc_assert (TREE_CODE (n->low) == INTEGER_CST);
2424 gcc_assert (TREE_CODE (n->high) == INTEGER_CST);
2426 n->low = convert (index_type, n->low);
2427 n->high = convert (index_type, n->high);
2429 /* Count the elements and track the largest and smallest
2430 of them (treating them as signed even if they are not). */
2438 if (INT_CST_LT (n->low, minval))
2440 if (INT_CST_LT (maxval, n->high))
2443 /* A range counts double, since it requires two compares. */
2444 if (! tree_int_cst_equal (n->low, n->high))
2447 /* Count the number of unique case node targets. */
2449 lab = label_rtx (n->code_label);
2450 for (m = case_list; m != n; m = m->right)
2451 if (label_rtx (m->code_label) == lab)
2458 /* Compute span of values. */
2460 range = fold (build2 (MINUS_EXPR, index_type, maxval, minval));
2464 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
2465 emit_jump (default_label);
2468 /* Try implementing this switch statement by a short sequence of
2469 bit-wise comparisons. However, we let the binary-tree case
2470 below handle constant index expressions. */
2471 else if (CASE_USE_BIT_TESTS
2472 && ! TREE_CONSTANT (index_expr)
2473 && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
2474 && compare_tree_int (range, 0) > 0
2475 && lshift_cheap_p ()
2476 && ((uniq == 1 && count >= 3)
2477 || (uniq == 2 && count >= 5)
2478 || (uniq == 3 && count >= 6)))
2480 /* Optimize the case where all the case values fit in a
2481 word without having to subtract MINVAL. In this case,
2482 we can optimize away the subtraction. */
2483 if (compare_tree_int (minval, 0) > 0
2484 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
2486 minval = integer_zero_node;
2489 emit_case_bit_tests (index_type, index_expr, minval, range,
2490 case_list, default_label);
2493 /* If range of values is much bigger than number of values,
2494 make a sequence of conditional branches instead of a dispatch.
2495 If the switch-index is a constant, do it this way
2496 because we can optimize it. */
2498 else if (count < case_values_threshold ()
2499 || compare_tree_int (range,
2500 (optimize_size ? 3 : 10) * count) > 0
2501 /* RANGE may be signed, and really large ranges will show up
2502 as negative numbers. */
2503 || compare_tree_int (range, 0) < 0
2504 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
2507 || TREE_CONSTANT (index_expr)
2508 /* If neither casesi or tablejump is available, we can
2509 only go this way. */
2510 || (!HAVE_casesi && !HAVE_tablejump))
2512 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
2514 /* If the index is a short or char that we do not have
2515 an insn to handle comparisons directly, convert it to
2516 a full integer now, rather than letting each comparison
2517 generate the conversion. */
2519 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
2520 && ! have_insn_for (COMPARE, GET_MODE (index)))
2522 enum machine_mode wider_mode;
2523 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
2524 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
2525 if (have_insn_for (COMPARE, wider_mode))
2527 index = convert_to_mode (wider_mode, index, unsignedp);
2532 do_pending_stack_adjust ();
2535 index = copy_to_reg (index);
2536 if (GET_CODE (index) == CONST_INT
2537 || TREE_CODE (index_expr) == INTEGER_CST)
2539 /* Make a tree node with the proper constant value
2540 if we don't already have one. */
2541 if (TREE_CODE (index_expr) != INTEGER_CST)
2544 = build_int_cst_wide (NULL_TREE, INTVAL (index),
2545 unsignedp || INTVAL (index) >= 0
2547 index_expr = convert (index_type, index_expr);
2550 /* For constant index expressions we need only
2551 issue an unconditional branch to the appropriate
2552 target code. The job of removing any unreachable
2553 code is left to the optimization phase if the
2554 "-O" option is specified. */
2555 for (n = case_list; n; n = n->right)
2556 if (! tree_int_cst_lt (index_expr, n->low)
2557 && ! tree_int_cst_lt (n->high, index_expr))
2561 emit_jump (label_rtx (n->code_label));
2563 emit_jump (default_label);
2567 /* If the index expression is not constant we generate
2568 a binary decision tree to select the appropriate
2569 target code. This is done as follows:
2571 The list of cases is rearranged into a binary tree,
2572 nearly optimal assuming equal probability for each case.
2574 The tree is transformed into RTL, eliminating
2575 redundant test conditions at the same time.
2577 If program flow could reach the end of the
2578 decision tree an unconditional jump to the
2579 default code is emitted. */
2582 = (TREE_CODE (orig_type) != ENUMERAL_TYPE
2583 && estimate_case_costs (case_list));
2584 balance_case_nodes (&case_list, NULL);
2585 emit_case_nodes (index, case_list, default_label, index_type);
2586 emit_jump (default_label);
2591 table_label = gen_label_rtx ();
2592 if (! try_casesi (index_type, index_expr, minval, range,
2593 table_label, default_label))
2596 index_type = integer_type_node;
2598 /* Index jumptables from zero for suitable values of
2599 minval to avoid a subtraction. */
2601 && compare_tree_int (minval, 0) > 0
2602 && compare_tree_int (minval, 3) < 0)
2604 minval = integer_zero_node;
2608 ok = try_tablejump (index_type, index_expr, minval, range,
2609 table_label, default_label);
2613 /* Get table of labels to jump to, in order of case index. */
2615 ncases = tree_low_cst (range, 0) + 1;
2616 labelvec = alloca (ncases * sizeof (rtx));
2617 memset (labelvec, 0, ncases * sizeof (rtx));
2619 for (n = case_list; n; n = n->right)
2621 /* Compute the low and high bounds relative to the minimum
2622 value since that should fit in a HOST_WIDE_INT while the
2623 actual values may not. */
2625 = tree_low_cst (fold (build2 (MINUS_EXPR, index_type,
2626 n->low, minval)), 1);
2627 HOST_WIDE_INT i_high
2628 = tree_low_cst (fold (build2 (MINUS_EXPR, index_type,
2629 n->high, minval)), 1);
2632 for (i = i_low; i <= i_high; i ++)
2634 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
2637 /* Fill in the gaps with the default. */
2638 for (i = 0; i < ncases; i++)
2639 if (labelvec[i] == 0)
2640 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
2642 /* Output the table. */
2643 emit_label (table_label);
2645 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
2646 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
2647 gen_rtx_LABEL_REF (Pmode, table_label),
2648 gen_rtvec_v (ncases, labelvec),
2649 const0_rtx, const0_rtx));
2651 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
2652 gen_rtvec_v (ncases, labelvec)));
2654 /* If the case insn drops through the table,
2655 after the table we must jump to the default-label.
2656 Otherwise record no drop-through after the table. */
2657 #ifdef CASE_DROPS_THROUGH
2658 emit_jump (default_label);
2664 before_case = NEXT_INSN (before_case);
2665 end = get_last_insn ();
2666 fail = squeeze_notes (&before_case, &end);
2668 reorder_insns (before_case, end, start);
2674 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
2677 do_jump_if_equal (rtx op1, rtx op2, rtx label, int unsignedp)
2679 if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
2685 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
2686 (GET_MODE (op1) == VOIDmode
2687 ? GET_MODE (op2) : GET_MODE (op1)),
2691 /* Not all case values are encountered equally. This function
2692 uses a heuristic to weight case labels, in cases where that
2693 looks like a reasonable thing to do.
2695 Right now, all we try to guess is text, and we establish the
2698 chars above space: 16
2707 If we find any cases in the switch that are not either -1 or in the range
2708 of valid ASCII characters, or are control characters other than those
2709 commonly used with "\", don't treat this switch scanning text.
2711 Return 1 if these nodes are suitable for cost estimation, otherwise
2715 estimate_case_costs (case_node_ptr node)
2717 tree min_ascii = integer_minus_one_node;
2718 tree max_ascii = convert (TREE_TYPE (node->high),
2719 build_int_cst (NULL_TREE, 127));
2723 /* If we haven't already made the cost table, make it now. Note that the
2724 lower bound of the table is -1, not zero. */
2726 if (! cost_table_initialized)
2728 cost_table_initialized = 1;
2730 for (i = 0; i < 128; i++)
2733 COST_TABLE (i) = 16;
2734 else if (ISPUNCT (i))
2736 else if (ISCNTRL (i))
2737 COST_TABLE (i) = -1;
2740 COST_TABLE (' ') = 8;
2741 COST_TABLE ('\t') = 4;
2742 COST_TABLE ('\0') = 4;
2743 COST_TABLE ('\n') = 2;
2744 COST_TABLE ('\f') = 1;
2745 COST_TABLE ('\v') = 1;
2746 COST_TABLE ('\b') = 1;
2749 /* See if all the case expressions look like text. It is text if the
2750 constant is >= -1 and the highest constant is <= 127. Do all comparisons
2751 as signed arithmetic since we don't want to ever access cost_table with a
2752 value less than -1. Also check that none of the constants in a range
2753 are strange control characters. */
2755 for (n = node; n; n = n->right)
2757 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
2760 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
2761 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
2762 if (COST_TABLE (i) < 0)
2766 /* All interesting values are within the range of interesting
2767 ASCII characters. */
2771 /* Take an ordered list of case nodes
2772 and transform them into a near optimal binary tree,
2773 on the assumption that any target code selection value is as
2774 likely as any other.
2776 The transformation is performed by splitting the ordered
2777 list into two equal sections plus a pivot. The parts are
2778 then attached to the pivot as left and right branches. Each
2779 branch is then transformed recursively. */
2782 balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
2795 /* Count the number of entries on branch. Also count the ranges. */
2799 if (!tree_int_cst_equal (np->low, np->high))
2803 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
2807 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
2815 /* Split this list if it is long enough for that to help. */
2820 /* Find the place in the list that bisects the list's total cost,
2821 Here I gets half the total cost. */
2826 /* Skip nodes while their cost does not reach that amount. */
2827 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2828 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
2829 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
2832 npp = &(*npp)->right;
2837 /* Leave this branch lopsided, but optimize left-hand
2838 side and fill in `parent' fields for right-hand side. */
2840 np->parent = parent;
2841 balance_case_nodes (&np->left, np);
2842 for (; np->right; np = np->right)
2843 np->right->parent = np;
2847 /* If there are just three nodes, split at the middle one. */
2849 npp = &(*npp)->right;
2852 /* Find the place in the list that bisects the list's total cost,
2853 where ranges count as 2.
2854 Here I gets half the total cost. */
2855 i = (i + ranges + 1) / 2;
2858 /* Skip nodes while their cost does not reach that amount. */
2859 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2864 npp = &(*npp)->right;
2869 np->parent = parent;
2872 /* Optimize each of the two split parts. */
2873 balance_case_nodes (&np->left, np);
2874 balance_case_nodes (&np->right, np);
2878 /* Else leave this branch as one level,
2879 but fill in `parent' fields. */
2881 np->parent = parent;
2882 for (; np->right; np = np->right)
2883 np->right->parent = np;
2888 /* Search the parent sections of the case node tree
2889 to see if a test for the lower bound of NODE would be redundant.
2890 INDEX_TYPE is the type of the index expression.
2892 The instructions to generate the case decision tree are
2893 output in the same order as nodes are processed so it is
2894 known that if a parent node checks the range of the current
2895 node minus one that the current node is bounded at its lower
2896 span. Thus the test would be redundant. */
2899 node_has_low_bound (case_node_ptr node, tree index_type)
2902 case_node_ptr pnode;
2904 /* If the lower bound of this node is the lowest value in the index type,
2905 we need not test it. */
2907 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
2910 /* If this node has a left branch, the value at the left must be less
2911 than that at this node, so it cannot be bounded at the bottom and
2912 we need not bother testing any further. */
2917 low_minus_one = fold (build2 (MINUS_EXPR, TREE_TYPE (node->low),
2918 node->low, integer_one_node));
2920 /* If the subtraction above overflowed, we can't verify anything.
2921 Otherwise, look for a parent that tests our value - 1. */
2923 if (! tree_int_cst_lt (low_minus_one, node->low))
2926 for (pnode = node->parent; pnode; pnode = pnode->parent)
2927 if (tree_int_cst_equal (low_minus_one, pnode->high))
2933 /* Search the parent sections of the case node tree
2934 to see if a test for the upper bound of NODE would be redundant.
2935 INDEX_TYPE is the type of the index expression.
2937 The instructions to generate the case decision tree are
2938 output in the same order as nodes are processed so it is
2939 known that if a parent node checks the range of the current
2940 node plus one that the current node is bounded at its upper
2941 span. Thus the test would be redundant. */
2944 node_has_high_bound (case_node_ptr node, tree index_type)
2947 case_node_ptr pnode;
2949 /* If there is no upper bound, obviously no test is needed. */
2951 if (TYPE_MAX_VALUE (index_type) == NULL)
2954 /* If the upper bound of this node is the highest value in the type
2955 of the index expression, we need not test against it. */
2957 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
2960 /* If this node has a right branch, the value at the right must be greater
2961 than that at this node, so it cannot be bounded at the top and
2962 we need not bother testing any further. */
2967 high_plus_one = fold (build2 (PLUS_EXPR, TREE_TYPE (node->high),
2968 node->high, integer_one_node));
2970 /* If the addition above overflowed, we can't verify anything.
2971 Otherwise, look for a parent that tests our value + 1. */
2973 if (! tree_int_cst_lt (node->high, high_plus_one))
2976 for (pnode = node->parent; pnode; pnode = pnode->parent)
2977 if (tree_int_cst_equal (high_plus_one, pnode->low))
2983 /* Search the parent sections of the
2984 case node tree to see if both tests for the upper and lower
2985 bounds of NODE would be redundant. */
2988 node_is_bounded (case_node_ptr node, tree index_type)
2990 return (node_has_low_bound (node, index_type)
2991 && node_has_high_bound (node, index_type));
2994 /* Emit step-by-step code to select a case for the value of INDEX.
2995 The thus generated decision tree follows the form of the
2996 case-node binary tree NODE, whose nodes represent test conditions.
2997 INDEX_TYPE is the type of the index of the switch.
2999 Care is taken to prune redundant tests from the decision tree
3000 by detecting any boundary conditions already checked by
3001 emitted rtx. (See node_has_high_bound, node_has_low_bound
3002 and node_is_bounded, above.)
3004 Where the test conditions can be shown to be redundant we emit
3005 an unconditional jump to the target code. As a further
3006 optimization, the subordinates of a tree node are examined to
3007 check for bounded nodes. In this case conditional and/or
3008 unconditional jumps as a result of the boundary check for the
3009 current node are arranged to target the subordinates associated
3010 code for out of bound conditions on the current node.
3012 We can assume that when control reaches the code generated here,
3013 the index value has already been compared with the parents
3014 of this node, and determined to be on the same side of each parent
3015 as this node is. Thus, if this node tests for the value 51,
3016 and a parent tested for 52, we don't need to consider
3017 the possibility of a value greater than 51. If another parent
3018 tests for the value 50, then this node need not test anything. */
3021 emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
3024 /* If INDEX has an unsigned type, we must make unsigned branches. */
3025 int unsignedp = TYPE_UNSIGNED (index_type);
3026 enum machine_mode mode = GET_MODE (index);
3027 enum machine_mode imode = TYPE_MODE (index_type);
3029 /* See if our parents have already tested everything for us.
3030 If they have, emit an unconditional jump for this node. */
3031 if (node_is_bounded (node, index_type))
3032 emit_jump (label_rtx (node->code_label));
3034 else if (tree_int_cst_equal (node->low, node->high))
3036 /* Node is single valued. First see if the index expression matches
3037 this node and then check our children, if any. */
3039 do_jump_if_equal (index,
3040 convert_modes (mode, imode,
3041 expand_expr (node->low, NULL_RTX,
3044 label_rtx (node->code_label), unsignedp);
3046 if (node->right != 0 && node->left != 0)
3048 /* This node has children on both sides.
3049 Dispatch to one side or the other
3050 by comparing the index value with this node's value.
3051 If one subtree is bounded, check that one first,
3052 so we can avoid real branches in the tree. */
3054 if (node_is_bounded (node->right, index_type))
3056 emit_cmp_and_jump_insns (index,
3059 expand_expr (node->high, NULL_RTX,
3062 GT, NULL_RTX, mode, unsignedp,
3063 label_rtx (node->right->code_label));
3064 emit_case_nodes (index, node->left, default_label, index_type);
3067 else if (node_is_bounded (node->left, index_type))
3069 emit_cmp_and_jump_insns (index,
3072 expand_expr (node->high, NULL_RTX,
3075 LT, NULL_RTX, mode, unsignedp,
3076 label_rtx (node->left->code_label));
3077 emit_case_nodes (index, node->right, default_label, index_type);
3080 /* If both children are single-valued cases with no
3081 children, finish up all the work. This way, we can save
3082 one ordered comparison. */
3083 else if (tree_int_cst_equal (node->right->low, node->right->high)
3084 && node->right->left == 0
3085 && node->right->right == 0
3086 && tree_int_cst_equal (node->left->low, node->left->high)
3087 && node->left->left == 0
3088 && node->left->right == 0)
3090 /* Neither node is bounded. First distinguish the two sides;
3091 then emit the code for one side at a time. */
3093 /* See if the value matches what the right hand side
3095 do_jump_if_equal (index,
3096 convert_modes (mode, imode,
3097 expand_expr (node->right->low,
3101 label_rtx (node->right->code_label),
3104 /* See if the value matches what the left hand side
3106 do_jump_if_equal (index,
3107 convert_modes (mode, imode,
3108 expand_expr (node->left->low,
3112 label_rtx (node->left->code_label),
3118 /* Neither node is bounded. First distinguish the two sides;
3119 then emit the code for one side at a time. */
3121 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
3123 /* See if the value is on the right. */
3124 emit_cmp_and_jump_insns (index,
3127 expand_expr (node->high, NULL_RTX,
3130 GT, NULL_RTX, mode, unsignedp,
3131 label_rtx (test_label));
3133 /* Value must be on the left.
3134 Handle the left-hand subtree. */
3135 emit_case_nodes (index, node->left, default_label, index_type);
3136 /* If left-hand subtree does nothing,
3138 emit_jump (default_label);
3140 /* Code branches here for the right-hand subtree. */
3141 expand_label (test_label);
3142 emit_case_nodes (index, node->right, default_label, index_type);
3146 else if (node->right != 0 && node->left == 0)
3148 /* Here we have a right child but no left so we issue conditional
3149 branch to default and process the right child.
3151 Omit the conditional branch to default if we it avoid only one
3152 right child; it costs too much space to save so little time. */
3154 if (node->right->right || node->right->left
3155 || !tree_int_cst_equal (node->right->low, node->right->high))
3157 if (!node_has_low_bound (node, index_type))
3159 emit_cmp_and_jump_insns (index,
3162 expand_expr (node->high, NULL_RTX,
3165 LT, NULL_RTX, mode, unsignedp,
3169 emit_case_nodes (index, node->right, default_label, index_type);
3172 /* We cannot process node->right normally
3173 since we haven't ruled out the numbers less than
3174 this node's value. So handle node->right explicitly. */
3175 do_jump_if_equal (index,
3178 expand_expr (node->right->low, NULL_RTX,
3181 label_rtx (node->right->code_label), unsignedp);
3184 else if (node->right == 0 && node->left != 0)
3186 /* Just one subtree, on the left. */
3187 if (node->left->left || node->left->right
3188 || !tree_int_cst_equal (node->left->low, node->left->high))
3190 if (!node_has_high_bound (node, index_type))
3192 emit_cmp_and_jump_insns (index,
3195 expand_expr (node->high, NULL_RTX,
3198 GT, NULL_RTX, mode, unsignedp,
3202 emit_case_nodes (index, node->left, default_label, index_type);
3205 /* We cannot process node->left normally
3206 since we haven't ruled out the numbers less than
3207 this node's value. So handle node->left explicitly. */
3208 do_jump_if_equal (index,
3211 expand_expr (node->left->low, NULL_RTX,
3214 label_rtx (node->left->code_label), unsignedp);
3219 /* Node is a range. These cases are very similar to those for a single
3220 value, except that we do not start by testing whether this node
3221 is the one to branch to. */
3223 if (node->right != 0 && node->left != 0)
3225 /* Node has subtrees on both sides.
3226 If the right-hand subtree is bounded,
3227 test for it first, since we can go straight there.
3228 Otherwise, we need to make a branch in the control structure,
3229 then handle the two subtrees. */
3230 tree test_label = 0;
3232 if (node_is_bounded (node->right, index_type))
3233 /* Right hand node is fully bounded so we can eliminate any
3234 testing and branch directly to the target code. */
3235 emit_cmp_and_jump_insns (index,
3238 expand_expr (node->high, NULL_RTX,
3241 GT, NULL_RTX, mode, unsignedp,
3242 label_rtx (node->right->code_label));
3245 /* Right hand node requires testing.
3246 Branch to a label where we will handle it later. */
3248 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
3249 emit_cmp_and_jump_insns (index,
3252 expand_expr (node->high, NULL_RTX,
3255 GT, NULL_RTX, mode, unsignedp,
3256 label_rtx (test_label));
3259 /* Value belongs to this node or to the left-hand subtree. */
3261 emit_cmp_and_jump_insns (index,
3264 expand_expr (node->low, NULL_RTX,
3267 GE, NULL_RTX, mode, unsignedp,
3268 label_rtx (node->code_label));
3270 /* Handle the left-hand subtree. */
3271 emit_case_nodes (index, node->left, default_label, index_type);
3273 /* If right node had to be handled later, do that now. */
3277 /* If the left-hand subtree fell through,
3278 don't let it fall into the right-hand subtree. */
3279 emit_jump (default_label);
3281 expand_label (test_label);
3282 emit_case_nodes (index, node->right, default_label, index_type);
3286 else if (node->right != 0 && node->left == 0)
3288 /* Deal with values to the left of this node,
3289 if they are possible. */
3290 if (!node_has_low_bound (node, index_type))
3292 emit_cmp_and_jump_insns (index,
3295 expand_expr (node->low, NULL_RTX,
3298 LT, NULL_RTX, mode, unsignedp,
3302 /* Value belongs to this node or to the right-hand subtree. */
3304 emit_cmp_and_jump_insns (index,
3307 expand_expr (node->high, NULL_RTX,
3310 LE, NULL_RTX, mode, unsignedp,
3311 label_rtx (node->code_label));
3313 emit_case_nodes (index, node->right, default_label, index_type);
3316 else if (node->right == 0 && node->left != 0)
3318 /* Deal with values to the right of this node,
3319 if they are possible. */
3320 if (!node_has_high_bound (node, index_type))
3322 emit_cmp_and_jump_insns (index,
3325 expand_expr (node->high, NULL_RTX,
3328 GT, NULL_RTX, mode, unsignedp,
3332 /* Value belongs to this node or to the left-hand subtree. */
3334 emit_cmp_and_jump_insns (index,
3337 expand_expr (node->low, NULL_RTX,
3340 GE, NULL_RTX, mode, unsignedp,
3341 label_rtx (node->code_label));
3343 emit_case_nodes (index, node->left, default_label, index_type);
3348 /* Node has no children so we check low and high bounds to remove
3349 redundant tests. Only one of the bounds can exist,
3350 since otherwise this node is bounded--a case tested already. */
3351 int high_bound = node_has_high_bound (node, index_type);
3352 int low_bound = node_has_low_bound (node, index_type);
3354 if (!high_bound && low_bound)
3356 emit_cmp_and_jump_insns (index,
3359 expand_expr (node->high, NULL_RTX,
3362 GT, NULL_RTX, mode, unsignedp,
3366 else if (!low_bound && high_bound)
3368 emit_cmp_and_jump_insns (index,
3371 expand_expr (node->low, NULL_RTX,
3374 LT, NULL_RTX, mode, unsignedp,
3377 else if (!low_bound && !high_bound)
3379 /* Widen LOW and HIGH to the same width as INDEX. */
3380 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
3381 tree low = build1 (CONVERT_EXPR, type, node->low);
3382 tree high = build1 (CONVERT_EXPR, type, node->high);
3383 rtx low_rtx, new_index, new_bound;
3385 /* Instead of doing two branches, emit one unsigned branch for
3386 (index-low) > (high-low). */
3387 low_rtx = expand_expr (low, NULL_RTX, mode, 0);
3388 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
3389 NULL_RTX, unsignedp,
3391 new_bound = expand_expr (fold (build2 (MINUS_EXPR, type,
3395 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
3396 mode, 1, default_label);
3399 emit_jump (label_rtx (node->code_label));