1 /* Expands front end tree to back end RTL for GCC
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4 2010 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 The functions whose names start with `expand_' are called by the
25 expander to generate RTL instructions for various kinds of constructs. */
29 #include "coretypes.h"
33 #include "hard-reg-set.h"
39 #include "insn-config.h"
44 #include "diagnostic-core.h"
47 #include "langhooks.h"
53 #include "alloc-pool.h"
54 #include "pretty-print.h"
58 /* Functions and data structures for expanding case statements. */
60 /* Case label structure, used to hold info on labels within case
61 statements. We handle "range" labels; for a single-value label
62 as in C, the high and low limits are the same.
64 We start with a vector of case nodes sorted in ascending order, and
65 the default label as the last element in the vector. Before expanding
66 to RTL, we transform this vector into a list linked via the RIGHT
67 fields in the case_node struct. Nodes with higher case values are
70 Switch statements can be output in three forms. A branch table is
71 used if there are more than a few labels and the labels are dense
72 within the range between the smallest and largest case value. If a
73 branch table is used, no further manipulations are done with the case
76 The alternative to the use of a branch table is to generate a series
77 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
78 and PARENT fields to hold a binary tree. Initially the tree is
79 totally unbalanced, with everything on the right. We balance the tree
80 with nodes on the left having lower case values than the parent
81 and nodes on the right having higher values. We then output the tree
84 For very small, suitable switch statements, we can generate a series
85 of simple bit test and branches instead. */
89 struct case_node *left; /* Left son in binary tree */
90 struct case_node *right; /* Right son in binary tree; also node chain */
91 struct case_node *parent; /* Parent of node in binary tree */
92 tree low; /* Lowest index value for this label */
93 tree high; /* Highest index value for this label */
94 tree code_label; /* Label to jump to when node matches */
97 typedef struct case_node case_node;
98 typedef struct case_node *case_node_ptr;
100 /* These are used by estimate_case_costs and balance_case_nodes. */
102 /* This must be a signed type, and non-ANSI compilers lack signed char. */
103 static short cost_table_[129];
104 static int use_cost_table;
105 static int cost_table_initialized;
107 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
109 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
111 static int n_occurrences (int, const char *);
112 static bool tree_conflicts_with_clobbers_p (tree, HARD_REG_SET *);
113 static void expand_nl_goto_receiver (void);
114 static bool check_operand_nalternatives (tree, tree);
115 static bool check_unique_operand_names (tree, tree, tree);
116 static char *resolve_operand_name_1 (char *, tree, tree, tree);
117 static void expand_null_return_1 (void);
118 static void expand_value_return (rtx);
119 static int estimate_case_costs (case_node_ptr);
120 static bool lshift_cheap_p (void);
121 static int case_bit_test_cmp (const void *, const void *);
122 static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
123 static void balance_case_nodes (case_node_ptr *, case_node_ptr);
124 static int node_has_low_bound (case_node_ptr, tree);
125 static int node_has_high_bound (case_node_ptr, tree);
126 static int node_is_bounded (case_node_ptr, tree);
127 static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
128 static struct case_node *add_case_node (struct case_node *, tree,
129 tree, tree, tree, alloc_pool);
132 /* Return the rtx-label that corresponds to a LABEL_DECL,
133 creating it if necessary. */
136 label_rtx (tree label)
138 gcc_assert (TREE_CODE (label) == LABEL_DECL);
140 if (!DECL_RTL_SET_P (label))
142 rtx r = gen_label_rtx ();
143 SET_DECL_RTL (label, r);
144 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
145 LABEL_PRESERVE_P (r) = 1;
148 return DECL_RTL (label);
151 /* As above, but also put it on the forced-reference list of the
152 function that contains it. */
154 force_label_rtx (tree label)
156 rtx ref = label_rtx (label);
157 tree function = decl_function_context (label);
159 gcc_assert (function);
161 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref, forced_labels);
165 /* Add an unconditional jump to LABEL as the next sequential instruction. */
168 emit_jump (rtx label)
170 do_pending_stack_adjust ();
171 emit_jump_insn (gen_jump (label));
175 /* Emit code to jump to the address
176 specified by the pointer expression EXP. */
179 expand_computed_goto (tree exp)
181 rtx x = expand_normal (exp);
183 x = convert_memory_address (Pmode, x);
185 do_pending_stack_adjust ();
186 emit_indirect_jump (x);
189 /* Handle goto statements and the labels that they can go to. */
191 /* Specify the location in the RTL code of a label LABEL,
192 which is a LABEL_DECL tree node.
194 This is used for the kind of label that the user can jump to with a
195 goto statement, and for alternatives of a switch or case statement.
196 RTL labels generated for loops and conditionals don't go through here;
197 they are generated directly at the RTL level, by other functions below.
199 Note that this has nothing to do with defining label *names*.
200 Languages vary in how they do that and what that even means. */
203 expand_label (tree label)
205 rtx label_r = label_rtx (label);
207 do_pending_stack_adjust ();
208 emit_label (label_r);
209 if (DECL_NAME (label))
210 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
212 if (DECL_NONLOCAL (label))
214 expand_nl_goto_receiver ();
215 nonlocal_goto_handler_labels
216 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
217 nonlocal_goto_handler_labels);
220 if (FORCED_LABEL (label))
221 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels);
223 if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
224 maybe_set_first_label_num (label_r);
227 /* Generate RTL code for a `goto' statement with target label LABEL.
228 LABEL should be a LABEL_DECL tree node that was or will later be
229 defined with `expand_label'. */
232 expand_goto (tree label)
234 #ifdef ENABLE_CHECKING
235 /* Check for a nonlocal goto to a containing function. Should have
236 gotten translated to __builtin_nonlocal_goto. */
237 tree context = decl_function_context (label);
238 gcc_assert (!context || context == current_function_decl);
241 emit_jump (label_rtx (label));
244 /* Return the number of times character C occurs in string S. */
246 n_occurrences (int c, const char *s)
254 /* Generate RTL for an asm statement (explicit assembler code).
255 STRING is a STRING_CST node containing the assembler code text,
256 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
257 insn is volatile; don't optimize it. */
260 expand_asm_loc (tree string, int vol, location_t locus)
264 if (TREE_CODE (string) == ADDR_EXPR)
265 string = TREE_OPERAND (string, 0);
267 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
268 ggc_strdup (TREE_STRING_POINTER (string)),
271 MEM_VOLATILE_P (body) = vol;
276 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
277 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
278 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
279 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
280 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
281 constraint allows the use of a register operand. And, *IS_INOUT
282 will be true if the operand is read-write, i.e., if it is used as
283 an input as well as an output. If *CONSTRAINT_P is not in
284 canonical form, it will be made canonical. (Note that `+' will be
285 replaced with `=' as part of this process.)
287 Returns TRUE if all went well; FALSE if an error occurred. */
290 parse_output_constraint (const char **constraint_p, int operand_num,
291 int ninputs, int noutputs, bool *allows_mem,
292 bool *allows_reg, bool *is_inout)
294 const char *constraint = *constraint_p;
297 /* Assume the constraint doesn't allow the use of either a register
302 /* Allow the `=' or `+' to not be at the beginning of the string,
303 since it wasn't explicitly documented that way, and there is a
304 large body of code that puts it last. Swap the character to
305 the front, so as not to uglify any place else. */
306 p = strchr (constraint, '=');
308 p = strchr (constraint, '+');
310 /* If the string doesn't contain an `=', issue an error
314 error ("output operand constraint lacks %<=%>");
318 /* If the constraint begins with `+', then the operand is both read
319 from and written to. */
320 *is_inout = (*p == '+');
322 /* Canonicalize the output constraint so that it begins with `='. */
323 if (p != constraint || *is_inout)
326 size_t c_len = strlen (constraint);
329 warning (0, "output constraint %qc for operand %d "
330 "is not at the beginning",
333 /* Make a copy of the constraint. */
334 buf = XALLOCAVEC (char, c_len + 1);
335 strcpy (buf, constraint);
336 /* Swap the first character and the `=' or `+'. */
337 buf[p - constraint] = buf[0];
338 /* Make sure the first character is an `='. (Until we do this,
339 it might be a `+'.) */
341 /* Replace the constraint with the canonicalized string. */
342 *constraint_p = ggc_alloc_string (buf, c_len);
343 constraint = *constraint_p;
346 /* Loop through the constraint string. */
347 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
352 error ("operand constraint contains incorrectly positioned "
357 if (operand_num + 1 == ninputs + noutputs)
359 error ("%<%%%> constraint used with last operand");
364 case 'V': case TARGET_MEM_CONSTRAINT: case 'o':
368 case '?': case '!': case '*': case '&': case '#':
369 case 'E': case 'F': case 'G': case 'H':
370 case 's': case 'i': case 'n':
371 case 'I': case 'J': case 'K': case 'L': case 'M':
372 case 'N': case 'O': case 'P': case ',':
375 case '0': case '1': case '2': case '3': case '4':
376 case '5': case '6': case '7': case '8': case '9':
378 error ("matching constraint not valid in output operand");
382 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
383 excepting those that expand_call created. So match memory
400 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
402 #ifdef EXTRA_CONSTRAINT_STR
403 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
405 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
409 /* Otherwise we can't assume anything about the nature of
410 the constraint except that it isn't purely registers.
411 Treat it like "g" and hope for the best. */
422 /* Similar, but for input constraints. */
425 parse_input_constraint (const char **constraint_p, int input_num,
426 int ninputs, int noutputs, int ninout,
427 const char * const * constraints,
428 bool *allows_mem, bool *allows_reg)
430 const char *constraint = *constraint_p;
431 const char *orig_constraint = constraint;
432 size_t c_len = strlen (constraint);
434 bool saw_match = false;
436 /* Assume the constraint doesn't allow the use of either
437 a register or memory. */
441 /* Make sure constraint has neither `=', `+', nor '&'. */
443 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
444 switch (constraint[j])
446 case '+': case '=': case '&':
447 if (constraint == orig_constraint)
449 error ("input operand constraint contains %qc", constraint[j]);
455 if (constraint == orig_constraint
456 && input_num + 1 == ninputs - ninout)
458 error ("%<%%%> constraint used with last operand");
463 case 'V': case TARGET_MEM_CONSTRAINT: case 'o':
468 case '?': case '!': case '*': case '#':
469 case 'E': case 'F': case 'G': case 'H':
470 case 's': case 'i': case 'n':
471 case 'I': case 'J': case 'K': case 'L': case 'M':
472 case 'N': case 'O': case 'P': case ',':
475 /* Whether or not a numeric constraint allows a register is
476 decided by the matching constraint, and so there is no need
477 to do anything special with them. We must handle them in
478 the default case, so that we don't unnecessarily force
479 operands to memory. */
480 case '0': case '1': case '2': case '3': case '4':
481 case '5': case '6': case '7': case '8': case '9':
488 match = strtoul (constraint + j, &end, 10);
489 if (match >= (unsigned long) noutputs)
491 error ("matching constraint references invalid operand number");
495 /* Try and find the real constraint for this dup. Only do this
496 if the matching constraint is the only alternative. */
498 && (j == 0 || (j == 1 && constraint[0] == '%')))
500 constraint = constraints[match];
501 *constraint_p = constraint;
502 c_len = strlen (constraint);
504 /* ??? At the end of the loop, we will skip the first part of
505 the matched constraint. This assumes not only that the
506 other constraint is an output constraint, but also that
507 the '=' or '+' come first. */
511 j = end - constraint;
512 /* Anticipate increment at end of loop. */
527 if (! ISALPHA (constraint[j]))
529 error ("invalid punctuation %qc in constraint", constraint[j]);
532 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
535 #ifdef EXTRA_CONSTRAINT_STR
536 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
538 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
542 /* Otherwise we can't assume anything about the nature of
543 the constraint except that it isn't purely registers.
544 Treat it like "g" and hope for the best. */
552 if (saw_match && !*allows_reg)
553 warning (0, "matching constraint does not allow a register");
558 /* Return DECL iff there's an overlap between *REGS and DECL, where DECL
559 can be an asm-declared register. Called via walk_tree. */
562 decl_overlaps_hard_reg_set_p (tree *declp, int *walk_subtrees ATTRIBUTE_UNUSED,
566 const HARD_REG_SET *const regs = (const HARD_REG_SET *) data;
568 if (TREE_CODE (decl) == VAR_DECL)
570 if (DECL_HARD_REGISTER (decl)
571 && REG_P (DECL_RTL (decl))
572 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
574 rtx reg = DECL_RTL (decl);
576 if (overlaps_hard_reg_set_p (*regs, GET_MODE (reg), REGNO (reg)))
581 else if (TYPE_P (decl) || TREE_CODE (decl) == PARM_DECL)
586 /* If there is an overlap between *REGS and DECL, return the first overlap
589 tree_overlaps_hard_reg_set (tree decl, HARD_REG_SET *regs)
591 return walk_tree (&decl, decl_overlaps_hard_reg_set_p, regs, NULL);
594 /* Check for overlap between registers marked in CLOBBERED_REGS and
595 anything inappropriate in T. Emit error and return the register
596 variable definition for error, NULL_TREE for ok. */
599 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
601 /* Conflicts between asm-declared register variables and the clobber
602 list are not allowed. */
603 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
607 error ("asm-specifier for variable %qE conflicts with asm clobber list",
608 DECL_NAME (overlap));
610 /* Reset registerness to stop multiple errors emitted for a single
612 DECL_REGISTER (overlap) = 0;
619 /* Generate RTL for an asm statement with arguments.
620 STRING is the instruction template.
621 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
622 Each output or input has an expression in the TREE_VALUE and
623 a tree list in TREE_PURPOSE which in turn contains a constraint
624 name in TREE_VALUE (or NULL_TREE) and a constraint string
626 CLOBBERS is a list of STRING_CST nodes each naming a hard register
627 that is clobbered by this insn.
629 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
630 Some elements of OUTPUTS may be replaced with trees representing temporary
631 values. The caller should copy those temporary values to the originally
634 VOL nonzero means the insn is volatile; don't optimize it. */
637 expand_asm_operands (tree string, tree outputs, tree inputs,
638 tree clobbers, tree labels, int vol, location_t locus)
640 rtvec argvec, constraintvec, labelvec;
642 int ninputs = list_length (inputs);
643 int noutputs = list_length (outputs);
644 int nlabels = list_length (labels);
647 HARD_REG_SET clobbered_regs;
648 int clobber_conflict_found = 0;
652 /* Vector of RTX's of evaluated output operands. */
653 rtx *output_rtx = XALLOCAVEC (rtx, noutputs);
654 int *inout_opnum = XALLOCAVEC (int, noutputs);
655 rtx *real_output_rtx = XALLOCAVEC (rtx, noutputs);
656 enum machine_mode *inout_mode = XALLOCAVEC (enum machine_mode, noutputs);
657 const char **constraints = XALLOCAVEC (const char *, noutputs + ninputs);
658 int old_generating_concat_p = generating_concat_p;
660 /* An ASM with no outputs needs to be treated as volatile, for now. */
664 if (! check_operand_nalternatives (outputs, inputs))
667 string = resolve_asm_operand_names (string, outputs, inputs, labels);
669 /* Collect constraints. */
671 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
672 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
673 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
674 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
676 /* Sometimes we wish to automatically clobber registers across an asm.
677 Case in point is when the i386 backend moved from cc0 to a hard reg --
678 maintaining source-level compatibility means automatically clobbering
679 the flags register. */
680 clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers);
682 /* Count the number of meaningful clobbered registers, ignoring what
683 we would ignore later. */
685 CLEAR_HARD_REG_SET (clobbered_regs);
686 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
691 if (TREE_VALUE (tail) == error_mark_node)
693 regname = TREE_STRING_POINTER (TREE_VALUE (tail));
695 i = decode_reg_name_and_count (regname, &nregs);
699 error ("unknown register name %qs in %<asm%>", regname);
701 /* Mark clobbered registers. */
706 for (reg = i; reg < i + nregs; reg++)
710 /* Clobbering the PIC register is an error. */
711 if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
713 error ("PIC register clobbered by %qs in %<asm%>", regname);
717 SET_HARD_REG_BIT (clobbered_regs, reg);
722 /* First pass over inputs and outputs checks validity and sets
723 mark_addressable if needed. */
726 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
728 tree val = TREE_VALUE (tail);
729 tree type = TREE_TYPE (val);
730 const char *constraint;
735 /* If there's an erroneous arg, emit no insn. */
736 if (type == error_mark_node)
739 /* Try to parse the output constraint. If that fails, there's
740 no point in going further. */
741 constraint = constraints[i];
742 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
743 &allows_mem, &allows_reg, &is_inout))
750 && REG_P (DECL_RTL (val))
751 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
752 mark_addressable (val);
759 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
761 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
765 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
767 bool allows_reg, allows_mem;
768 const char *constraint;
770 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
771 would get VOIDmode and that could cause a crash in reload. */
772 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
775 constraint = constraints[i + noutputs];
776 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
777 constraints, &allows_mem, &allows_reg))
780 if (! allows_reg && allows_mem)
781 mark_addressable (TREE_VALUE (tail));
784 /* Second pass evaluates arguments. */
786 /* Make sure stack is consistent for asm goto. */
788 do_pending_stack_adjust ();
791 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
793 tree val = TREE_VALUE (tail);
794 tree type = TREE_TYPE (val);
801 ok = parse_output_constraint (&constraints[i], i, ninputs,
802 noutputs, &allows_mem, &allows_reg,
806 /* If an output operand is not a decl or indirect ref and our constraint
807 allows a register, make a temporary to act as an intermediate.
808 Make the asm insn write into that, then our caller will copy it to
809 the real output operand. Likewise for promoted variables. */
811 generating_concat_p = 0;
813 real_output_rtx[i] = NULL_RTX;
814 if ((TREE_CODE (val) == INDIRECT_REF
817 && (allows_mem || REG_P (DECL_RTL (val)))
818 && ! (REG_P (DECL_RTL (val))
819 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
823 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
825 op = validize_mem (op);
827 if (! allows_reg && !MEM_P (op))
828 error ("output number %d not directly addressable", i);
829 if ((! allows_mem && MEM_P (op))
830 || GET_CODE (op) == CONCAT)
832 real_output_rtx[i] = op;
833 op = gen_reg_rtx (GET_MODE (op));
835 emit_move_insn (op, real_output_rtx[i]);
840 op = assign_temp (type, 0, 0, 1);
841 op = validize_mem (op);
842 if (!MEM_P (op) && TREE_CODE (TREE_VALUE (tail)) == SSA_NAME)
843 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (TREE_VALUE (tail)), op);
844 TREE_VALUE (tail) = make_tree (type, op);
848 generating_concat_p = old_generating_concat_p;
852 inout_mode[ninout] = TYPE_MODE (type);
853 inout_opnum[ninout++] = i;
856 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
857 clobber_conflict_found = 1;
860 /* Make vectors for the expression-rtx, constraint strings,
861 and named operands. */
863 argvec = rtvec_alloc (ninputs);
864 constraintvec = rtvec_alloc (ninputs);
865 labelvec = rtvec_alloc (nlabels);
867 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
868 : GET_MODE (output_rtx[0])),
869 ggc_strdup (TREE_STRING_POINTER (string)),
870 empty_string, 0, argvec, constraintvec,
873 MEM_VOLATILE_P (body) = vol;
875 /* Eval the inputs and put them into ARGVEC.
876 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
878 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
880 bool allows_reg, allows_mem;
881 const char *constraint;
886 constraint = constraints[i + noutputs];
887 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
888 constraints, &allows_mem, &allows_reg);
891 generating_concat_p = 0;
893 val = TREE_VALUE (tail);
894 type = TREE_TYPE (val);
895 /* EXPAND_INITIALIZER will not generate code for valid initializer
896 constants, but will still generate code for other types of operand.
897 This is the behavior we want for constant constraints. */
898 op = expand_expr (val, NULL_RTX, VOIDmode,
899 allows_reg ? EXPAND_NORMAL
900 : allows_mem ? EXPAND_MEMORY
901 : EXPAND_INITIALIZER);
903 /* Never pass a CONCAT to an ASM. */
904 if (GET_CODE (op) == CONCAT)
905 op = force_reg (GET_MODE (op), op);
907 op = validize_mem (op);
909 if (asm_operand_ok (op, constraint, NULL) <= 0)
911 if (allows_reg && TYPE_MODE (type) != BLKmode)
912 op = force_reg (TYPE_MODE (type), op);
913 else if (!allows_mem)
914 warning (0, "asm operand %d probably doesn%'t match constraints",
918 /* We won't recognize either volatile memory or memory
919 with a queued address as available a memory_operand
920 at this point. Ignore it: clearly this *is* a memory. */
924 warning (0, "use of memory input without lvalue in "
925 "asm operand %d is deprecated", i + noutputs);
929 rtx mem = force_const_mem (TYPE_MODE (type), op);
931 op = validize_mem (mem);
933 op = force_reg (TYPE_MODE (type), op);
936 || GET_CODE (op) == SUBREG
937 || GET_CODE (op) == CONCAT)
939 tree qual_type = build_qualified_type (type,
942 rtx memloc = assign_temp (qual_type, 1, 1, 1);
943 memloc = validize_mem (memloc);
944 emit_move_insn (memloc, op);
950 generating_concat_p = old_generating_concat_p;
951 ASM_OPERANDS_INPUT (body, i) = op;
953 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
954 = gen_rtx_ASM_INPUT (TYPE_MODE (type),
955 ggc_strdup (constraints[i + noutputs]));
957 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
958 clobber_conflict_found = 1;
961 /* Protect all the operands from the queue now that they have all been
964 generating_concat_p = 0;
966 /* For in-out operands, copy output rtx to input rtx. */
967 for (i = 0; i < ninout; i++)
969 int j = inout_opnum[i];
972 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
975 sprintf (buffer, "%d", j);
976 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
977 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
980 /* Copy labels to the vector. */
981 for (i = 0, tail = labels; i < nlabels; ++i, tail = TREE_CHAIN (tail))
982 ASM_OPERANDS_LABEL (body, i)
983 = gen_rtx_LABEL_REF (Pmode, label_rtx (TREE_VALUE (tail)));
985 generating_concat_p = old_generating_concat_p;
987 /* Now, for each output, construct an rtx
988 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
989 ARGVEC CONSTRAINTS OPNAMES))
990 If there is more than one, put them inside a PARALLEL. */
992 if (nlabels > 0 && nclobbers == 0)
994 gcc_assert (noutputs == 0);
995 emit_jump_insn (body);
997 else if (noutputs == 0 && nclobbers == 0)
999 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1002 else if (noutputs == 1 && nclobbers == 0)
1004 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]);
1005 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1015 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1017 /* For each output operand, store a SET. */
1018 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1020 XVECEXP (body, 0, i)
1021 = gen_rtx_SET (VOIDmode,
1023 gen_rtx_ASM_OPERANDS
1024 (GET_MODE (output_rtx[i]),
1025 ggc_strdup (TREE_STRING_POINTER (string)),
1026 ggc_strdup (constraints[i]),
1027 i, argvec, constraintvec, labelvec, locus));
1029 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1032 /* If there are no outputs (but there are some clobbers)
1033 store the bare ASM_OPERANDS into the PARALLEL. */
1036 XVECEXP (body, 0, i++) = obody;
1038 /* Store (clobber REG) for each clobbered register specified. */
1040 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1042 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1044 int j = decode_reg_name_and_count (regname, &nregs);
1049 if (j == -3) /* `cc', which is not a register */
1052 if (j == -4) /* `memory', don't cache memory across asm */
1054 XVECEXP (body, 0, i++)
1055 = gen_rtx_CLOBBER (VOIDmode,
1058 gen_rtx_SCRATCH (VOIDmode)));
1062 /* Ignore unknown register, error already signaled. */
1066 for (reg = j; reg < j + nregs; reg++)
1068 /* Use QImode since that's guaranteed to clobber just
1070 clobbered_reg = gen_rtx_REG (QImode, reg);
1072 /* Do sanity check for overlap between clobbers and
1073 respectively input and outputs that hasn't been
1074 handled. Such overlap should have been detected and
1076 if (!clobber_conflict_found)
1080 /* We test the old body (obody) contents to avoid
1081 tripping over the under-construction body. */
1082 for (opno = 0; opno < noutputs; opno++)
1083 if (reg_overlap_mentioned_p (clobbered_reg,
1086 ("asm clobber conflict with output operand");
1088 for (opno = 0; opno < ninputs - ninout; opno++)
1089 if (reg_overlap_mentioned_p (clobbered_reg,
1090 ASM_OPERANDS_INPUT (obody,
1093 ("asm clobber conflict with input operand");
1096 XVECEXP (body, 0, i++)
1097 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1102 emit_jump_insn (body);
1107 /* For any outputs that needed reloading into registers, spill them
1108 back to where they belong. */
1109 for (i = 0; i < noutputs; ++i)
1110 if (real_output_rtx[i])
1111 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1113 crtl->has_asm_statement = 1;
1118 expand_asm_stmt (gimple stmt)
1121 tree outputs, tail, t;
1125 tree str, out, in, cl, labels;
1126 location_t locus = gimple_location (stmt);
1128 /* Meh... convert the gimple asm operands into real tree lists.
1129 Eventually we should make all routines work on the vectors instead
1130 of relying on TREE_CHAIN. */
1132 n = gimple_asm_noutputs (stmt);
1135 t = out = gimple_asm_output_op (stmt, 0);
1136 for (i = 1; i < n; i++)
1137 t = TREE_CHAIN (t) = gimple_asm_output_op (stmt, i);
1141 n = gimple_asm_ninputs (stmt);
1144 t = in = gimple_asm_input_op (stmt, 0);
1145 for (i = 1; i < n; i++)
1146 t = TREE_CHAIN (t) = gimple_asm_input_op (stmt, i);
1150 n = gimple_asm_nclobbers (stmt);
1153 t = cl = gimple_asm_clobber_op (stmt, 0);
1154 for (i = 1; i < n; i++)
1155 t = TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i);
1159 n = gimple_asm_nlabels (stmt);
1162 t = labels = gimple_asm_label_op (stmt, 0);
1163 for (i = 1; i < n; i++)
1164 t = TREE_CHAIN (t) = gimple_asm_label_op (stmt, i);
1167 s = gimple_asm_string (stmt);
1168 str = build_string (strlen (s), s);
1170 if (gimple_asm_input_p (stmt))
1172 expand_asm_loc (str, gimple_asm_volatile_p (stmt), locus);
1177 noutputs = gimple_asm_noutputs (stmt);
1178 /* o[I] is the place that output number I should be written. */
1179 o = (tree *) alloca (noutputs * sizeof (tree));
1181 /* Record the contents of OUTPUTS before it is modified. */
1182 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1183 o[i] = TREE_VALUE (tail);
1185 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
1186 OUTPUTS some trees for where the values were actually stored. */
1187 expand_asm_operands (str, outputs, in, cl, labels,
1188 gimple_asm_volatile_p (stmt), locus);
1190 /* Copy all the intermediate outputs into the specified outputs. */
1191 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1193 if (o[i] != TREE_VALUE (tail))
1195 expand_assignment (o[i], TREE_VALUE (tail), false);
1198 /* Restore the original value so that it's correct the next
1199 time we expand this function. */
1200 TREE_VALUE (tail) = o[i];
1205 /* A subroutine of expand_asm_operands. Check that all operands have
1206 the same number of alternatives. Return true if so. */
1209 check_operand_nalternatives (tree outputs, tree inputs)
1211 if (outputs || inputs)
1213 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1215 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1218 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1220 error ("too many alternatives in %<asm%>");
1227 const char *constraint
1228 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1230 if (n_occurrences (',', constraint) != nalternatives)
1232 error ("operand constraints for %<asm%> differ "
1233 "in number of alternatives");
1237 if (TREE_CHAIN (tmp))
1238 tmp = TREE_CHAIN (tmp);
1240 tmp = next, next = 0;
1247 /* A subroutine of expand_asm_operands. Check that all operand names
1248 are unique. Return true if so. We rely on the fact that these names
1249 are identifiers, and so have been canonicalized by get_identifier,
1250 so all we need are pointer comparisons. */
1253 check_unique_operand_names (tree outputs, tree inputs, tree labels)
1255 tree i, j, i_name = NULL_TREE;
1257 for (i = outputs; i ; i = TREE_CHAIN (i))
1259 i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1263 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1264 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1268 for (i = inputs; i ; i = TREE_CHAIN (i))
1270 i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1274 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1275 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1277 for (j = outputs; j ; j = TREE_CHAIN (j))
1278 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1282 for (i = labels; i ; i = TREE_CHAIN (i))
1284 i_name = TREE_PURPOSE (i);
1288 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1289 if (simple_cst_equal (i_name, TREE_PURPOSE (j)))
1291 for (j = inputs; j ; j = TREE_CHAIN (j))
1292 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1299 error ("duplicate asm operand name %qs", TREE_STRING_POINTER (i_name));
1303 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1304 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1305 STRING and in the constraints to those numbers. */
1308 resolve_asm_operand_names (tree string, tree outputs, tree inputs, tree labels)
1315 check_unique_operand_names (outputs, inputs, labels);
1317 /* Substitute [<name>] in input constraint strings. There should be no
1318 named operands in output constraints. */
1319 for (t = inputs; t ; t = TREE_CHAIN (t))
1321 c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1322 if (strchr (c, '[') != NULL)
1324 p = buffer = xstrdup (c);
1325 while ((p = strchr (p, '[')) != NULL)
1326 p = resolve_operand_name_1 (p, outputs, inputs, NULL);
1327 TREE_VALUE (TREE_PURPOSE (t))
1328 = build_string (strlen (buffer), buffer);
1333 /* Now check for any needed substitutions in the template. */
1334 c = TREE_STRING_POINTER (string);
1335 while ((c = strchr (c, '%')) != NULL)
1339 else if (ISALPHA (c[1]) && c[2] == '[')
1343 c += 1 + (c[1] == '%');
1350 /* OK, we need to make a copy so we can perform the substitutions.
1351 Assume that we will not need extra space--we get to remove '['
1352 and ']', which means we cannot have a problem until we have more
1353 than 999 operands. */
1354 buffer = xstrdup (TREE_STRING_POINTER (string));
1355 p = buffer + (c - TREE_STRING_POINTER (string));
1357 while ((p = strchr (p, '%')) != NULL)
1361 else if (ISALPHA (p[1]) && p[2] == '[')
1365 p += 1 + (p[1] == '%');
1369 p = resolve_operand_name_1 (p, outputs, inputs, labels);
1372 string = build_string (strlen (buffer), buffer);
1379 /* A subroutine of resolve_operand_names. P points to the '[' for a
1380 potential named operand of the form [<name>]. In place, replace
1381 the name and brackets with a number. Return a pointer to the
1382 balance of the string after substitution. */
1385 resolve_operand_name_1 (char *p, tree outputs, tree inputs, tree labels)
1391 /* Collect the operand name. */
1392 q = strchr (++p, ']');
1395 error ("missing close brace for named operand");
1396 return strchr (p, '\0');
1400 /* Resolve the name to a number. */
1401 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
1403 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1404 if (name && strcmp (TREE_STRING_POINTER (name), p) == 0)
1407 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
1409 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1410 if (name && strcmp (TREE_STRING_POINTER (name), p) == 0)
1413 for (t = labels; t ; t = TREE_CHAIN (t), op++)
1415 tree name = TREE_PURPOSE (t);
1416 if (name && strcmp (TREE_STRING_POINTER (name), p) == 0)
1420 error ("undefined named operand %qs", identifier_to_locale (p));
1424 /* Replace the name with the number. Unfortunately, not all libraries
1425 get the return value of sprintf correct, so search for the end of the
1426 generated string by hand. */
1427 sprintf (--p, "%d", op);
1428 p = strchr (p, '\0');
1430 /* Verify the no extra buffer space assumption. */
1431 gcc_assert (p <= q);
1433 /* Shift the rest of the buffer down to fill the gap. */
1434 memmove (p, q + 1, strlen (q + 1) + 1);
1439 /* Generate RTL to evaluate the expression EXP. */
1442 expand_expr_stmt (tree exp)
1447 value = expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
1448 type = TREE_TYPE (exp);
1450 /* If all we do is reference a volatile value in memory,
1451 copy it to a register to be sure it is actually touched. */
1452 if (value && MEM_P (value) && TREE_THIS_VOLATILE (exp))
1454 if (TYPE_MODE (type) == VOIDmode)
1456 else if (TYPE_MODE (type) != BLKmode)
1457 value = copy_to_reg (value);
1460 rtx lab = gen_label_rtx ();
1462 /* Compare the value with itself to reference it. */
1463 emit_cmp_and_jump_insns (value, value, EQ,
1464 expand_normal (TYPE_SIZE (type)),
1470 /* Free any temporaries used to evaluate this expression. */
1474 /* Warn if EXP contains any computations whose results are not used.
1475 Return 1 if a warning is printed; 0 otherwise. LOCUS is the
1476 (potential) location of the expression. */
1479 warn_if_unused_value (const_tree exp, location_t locus)
1482 if (TREE_USED (exp) || TREE_NO_WARNING (exp))
1485 /* Don't warn about void constructs. This includes casting to void,
1486 void function calls, and statement expressions with a final cast
1488 if (VOID_TYPE_P (TREE_TYPE (exp)))
1491 if (EXPR_HAS_LOCATION (exp))
1492 locus = EXPR_LOCATION (exp);
1494 switch (TREE_CODE (exp))
1496 case PREINCREMENT_EXPR:
1497 case POSTINCREMENT_EXPR:
1498 case PREDECREMENT_EXPR:
1499 case POSTDECREMENT_EXPR:
1504 case TRY_CATCH_EXPR:
1505 case WITH_CLEANUP_EXPR:
1511 /* For a binding, warn if no side effect within it. */
1512 exp = BIND_EXPR_BODY (exp);
1516 case NON_LVALUE_EXPR:
1517 exp = TREE_OPERAND (exp, 0);
1520 case TRUTH_ORIF_EXPR:
1521 case TRUTH_ANDIF_EXPR:
1522 /* In && or ||, warn if 2nd operand has no side effect. */
1523 exp = TREE_OPERAND (exp, 1);
1527 if (warn_if_unused_value (TREE_OPERAND (exp, 0), locus))
1529 /* Let people do `(foo (), 0)' without a warning. */
1530 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1532 exp = TREE_OPERAND (exp, 1);
1536 /* If this is an expression with side effects, don't warn; this
1537 case commonly appears in macro expansions. */
1538 if (TREE_SIDE_EFFECTS (exp))
1543 /* Don't warn about automatic dereferencing of references, since
1544 the user cannot control it. */
1545 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1547 exp = TREE_OPERAND (exp, 0);
1553 /* Referencing a volatile value is a side effect, so don't warn. */
1554 if ((DECL_P (exp) || REFERENCE_CLASS_P (exp))
1555 && TREE_THIS_VOLATILE (exp))
1558 /* If this is an expression which has no operands, there is no value
1559 to be unused. There are no such language-independent codes,
1560 but front ends may define such. */
1561 if (EXPRESSION_CLASS_P (exp) && TREE_OPERAND_LENGTH (exp) == 0)
1565 warning_at (locus, OPT_Wunused_value, "value computed is not used");
1571 /* Generate RTL to return from the current function, with no value.
1572 (That is, we do not do anything about returning any value.) */
1575 expand_null_return (void)
1577 /* If this function was declared to return a value, but we
1578 didn't, clobber the return registers so that they are not
1579 propagated live to the rest of the function. */
1580 clobber_return_register ();
1582 expand_null_return_1 ();
1585 /* Generate RTL to return directly from the current function.
1586 (That is, we bypass any return value.) */
1589 expand_naked_return (void)
1593 clear_pending_stack_adjust ();
1594 do_pending_stack_adjust ();
1596 end_label = naked_return_label;
1598 end_label = naked_return_label = gen_label_rtx ();
1600 emit_jump (end_label);
1603 /* Generate RTL to return from the current function, with value VAL. */
1606 expand_value_return (rtx val)
1608 /* Copy the value to the return location unless it's already there. */
1610 tree decl = DECL_RESULT (current_function_decl);
1611 rtx return_reg = DECL_RTL (decl);
1612 if (return_reg != val)
1614 tree funtype = TREE_TYPE (current_function_decl);
1615 tree type = TREE_TYPE (decl);
1616 int unsignedp = TYPE_UNSIGNED (type);
1617 enum machine_mode old_mode = DECL_MODE (decl);
1618 enum machine_mode mode;
1619 if (DECL_BY_REFERENCE (decl))
1620 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
1622 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
1624 if (mode != old_mode)
1625 val = convert_modes (mode, old_mode, val, unsignedp);
1627 if (GET_CODE (return_reg) == PARALLEL)
1628 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
1630 emit_move_insn (return_reg, val);
1633 expand_null_return_1 ();
1636 /* Output a return with no value. */
1639 expand_null_return_1 (void)
1641 clear_pending_stack_adjust ();
1642 do_pending_stack_adjust ();
1643 emit_jump (return_label);
1646 /* Generate RTL to evaluate the expression RETVAL and return it
1647 from the current function. */
1650 expand_return (tree retval)
1656 /* If function wants no value, give it none. */
1657 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
1659 expand_normal (retval);
1660 expand_null_return ();
1664 if (retval == error_mark_node)
1666 /* Treat this like a return of no value from a function that
1668 expand_null_return ();
1671 else if ((TREE_CODE (retval) == MODIFY_EXPR
1672 || TREE_CODE (retval) == INIT_EXPR)
1673 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
1674 retval_rhs = TREE_OPERAND (retval, 1);
1676 retval_rhs = retval;
1678 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
1680 /* If we are returning the RESULT_DECL, then the value has already
1681 been stored into it, so we don't have to do anything special. */
1682 if (TREE_CODE (retval_rhs) == RESULT_DECL)
1683 expand_value_return (result_rtl);
1685 /* If the result is an aggregate that is being returned in one (or more)
1686 registers, load the registers here. The compiler currently can't handle
1687 copying a BLKmode value into registers. We could put this code in a
1688 more general area (for use by everyone instead of just function
1689 call/return), but until this feature is generally usable it is kept here
1690 (and in expand_call). */
1692 else if (retval_rhs != 0
1693 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
1694 && REG_P (result_rtl))
1697 unsigned HOST_WIDE_INT bitpos, xbitpos;
1698 unsigned HOST_WIDE_INT padding_correction = 0;
1699 unsigned HOST_WIDE_INT bytes
1700 = int_size_in_bytes (TREE_TYPE (retval_rhs));
1701 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1702 unsigned int bitsize
1703 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
1704 rtx *result_pseudos = XALLOCAVEC (rtx, n_regs);
1705 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
1706 rtx result_val = expand_normal (retval_rhs);
1707 enum machine_mode tmpmode, result_reg_mode;
1711 expand_null_return ();
1715 /* If the structure doesn't take up a whole number of words, see
1716 whether the register value should be padded on the left or on
1717 the right. Set PADDING_CORRECTION to the number of padding
1718 bits needed on the left side.
1720 In most ABIs, the structure will be returned at the least end of
1721 the register, which translates to right padding on little-endian
1722 targets and left padding on big-endian targets. The opposite
1723 holds if the structure is returned at the most significant
1724 end of the register. */
1725 if (bytes % UNITS_PER_WORD != 0
1726 && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs))
1728 : BYTES_BIG_ENDIAN))
1729 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
1732 /* Copy the structure BITSIZE bits at a time. */
1733 for (bitpos = 0, xbitpos = padding_correction;
1734 bitpos < bytes * BITS_PER_UNIT;
1735 bitpos += bitsize, xbitpos += bitsize)
1737 /* We need a new destination pseudo each time xbitpos is
1738 on a word boundary and when xbitpos == padding_correction
1739 (the first time through). */
1740 if (xbitpos % BITS_PER_WORD == 0
1741 || xbitpos == padding_correction)
1743 /* Generate an appropriate register. */
1744 dst = gen_reg_rtx (word_mode);
1745 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
1747 /* Clear the destination before we move anything into it. */
1748 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
1751 /* We need a new source operand each time bitpos is on a word
1753 if (bitpos % BITS_PER_WORD == 0)
1754 src = operand_subword_force (result_val,
1755 bitpos / BITS_PER_WORD,
1758 /* Use bitpos for the source extraction (left justified) and
1759 xbitpos for the destination store (right justified). */
1760 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
1761 extract_bit_field (src, bitsize,
1762 bitpos % BITS_PER_WORD, 1, false,
1763 NULL_RTX, word_mode, word_mode));
1766 tmpmode = GET_MODE (result_rtl);
1767 if (tmpmode == BLKmode)
1769 /* Find the smallest integer mode large enough to hold the
1770 entire structure and use that mode instead of BLKmode
1771 on the USE insn for the return register. */
1772 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1773 tmpmode != VOIDmode;
1774 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1775 /* Have we found a large enough mode? */
1776 if (GET_MODE_SIZE (tmpmode) >= bytes)
1779 /* A suitable mode should have been found. */
1780 gcc_assert (tmpmode != VOIDmode);
1782 PUT_MODE (result_rtl, tmpmode);
1785 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
1786 result_reg_mode = word_mode;
1788 result_reg_mode = tmpmode;
1789 result_reg = gen_reg_rtx (result_reg_mode);
1791 for (i = 0; i < n_regs; i++)
1792 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
1795 if (tmpmode != result_reg_mode)
1796 result_reg = gen_lowpart (tmpmode, result_reg);
1798 expand_value_return (result_reg);
1800 else if (retval_rhs != 0
1801 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
1802 && (REG_P (result_rtl)
1803 || (GET_CODE (result_rtl) == PARALLEL)))
1805 /* Calculate the return value into a temporary (usually a pseudo
1807 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
1808 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
1810 val = assign_temp (nt, 0, 0, 1);
1811 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
1812 val = force_not_mem (val);
1813 /* Return the calculated value. */
1814 expand_value_return (val);
1818 /* No hard reg used; calculate value into hard return reg. */
1819 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
1820 expand_value_return (result_rtl);
1824 /* Emit code to restore vital registers at the beginning of a nonlocal goto
1827 expand_nl_goto_receiver (void)
1831 /* Clobber the FP when we get here, so we have to make sure it's
1832 marked as used by this function. */
1833 emit_use (hard_frame_pointer_rtx);
1835 /* Mark the static chain as clobbered here so life information
1836 doesn't get messed up for it. */
1837 chain = targetm.calls.static_chain (current_function_decl, true);
1838 if (chain && REG_P (chain))
1839 emit_clobber (chain);
1841 #ifdef HAVE_nonlocal_goto
1842 if (! HAVE_nonlocal_goto)
1844 /* First adjust our frame pointer to its actual value. It was
1845 previously set to the start of the virtual area corresponding to
1846 the stacked variables when we branched here and now needs to be
1847 adjusted to the actual hardware fp value.
1849 Assignments are to virtual registers are converted by
1850 instantiate_virtual_regs into the corresponding assignment
1851 to the underlying register (fp in this case) that makes
1852 the original assignment true.
1853 So the following insn will actually be
1854 decrementing fp by STARTING_FRAME_OFFSET. */
1855 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
1857 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
1858 if (fixed_regs[ARG_POINTER_REGNUM])
1860 #ifdef ELIMINABLE_REGS
1861 /* If the argument pointer can be eliminated in favor of the
1862 frame pointer, we don't need to restore it. We assume here
1863 that if such an elimination is present, it can always be used.
1864 This is the case on all known machines; if we don't make this
1865 assumption, we do unnecessary saving on many machines. */
1866 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1869 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1870 if (elim_regs[i].from == ARG_POINTER_REGNUM
1871 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1874 if (i == ARRAY_SIZE (elim_regs))
1877 /* Now restore our arg pointer from the address at which it
1878 was saved in our stack frame. */
1879 emit_move_insn (crtl->args.internal_arg_pointer,
1880 copy_to_reg (get_arg_pointer_save_area ()));
1885 #ifdef HAVE_nonlocal_goto_receiver
1886 if (HAVE_nonlocal_goto_receiver)
1887 emit_insn (gen_nonlocal_goto_receiver ());
1890 /* We must not allow the code we just generated to be reordered by
1891 scheduling. Specifically, the update of the frame pointer must
1892 happen immediately, not later. */
1893 emit_insn (gen_blockage ());
1896 /* Generate RTL for the automatic variable declaration DECL.
1897 (Other kinds of declarations are simply ignored if seen here.) */
1900 expand_decl (tree decl)
1904 type = TREE_TYPE (decl);
1906 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
1907 type in case this node is used in a reference. */
1908 if (TREE_CODE (decl) == CONST_DECL)
1910 DECL_MODE (decl) = TYPE_MODE (type);
1911 DECL_ALIGN (decl) = TYPE_ALIGN (type);
1912 DECL_SIZE (decl) = TYPE_SIZE (type);
1913 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
1917 /* Otherwise, only automatic variables need any expansion done. Static and
1918 external variables, and external functions, will be handled by
1919 `assemble_variable' (called from finish_decl). TYPE_DECL requires
1920 nothing. PARM_DECLs are handled in `assign_parms'. */
1921 if (TREE_CODE (decl) != VAR_DECL)
1924 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
1927 /* Create the RTL representation for the variable. */
1929 if (type == error_mark_node)
1930 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
1932 else if (DECL_SIZE (decl) == 0)
1934 /* Variable with incomplete type. */
1936 if (DECL_INITIAL (decl) == 0)
1937 /* Error message was already done; now avoid a crash. */
1938 x = gen_rtx_MEM (BLKmode, const0_rtx);
1940 /* An initializer is going to decide the size of this array.
1941 Until we know the size, represent its address with a reg. */
1942 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
1944 set_mem_attributes (x, decl, 1);
1945 SET_DECL_RTL (decl, x);
1947 else if (use_register_for_decl (decl))
1949 /* Automatic variable that can go in a register. */
1950 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1952 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
1954 /* Note if the object is a user variable. */
1955 if (!DECL_ARTIFICIAL (decl))
1956 mark_user_reg (DECL_RTL (decl));
1958 if (POINTER_TYPE_P (type))
1959 mark_reg_pointer (DECL_RTL (decl),
1960 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
1969 /* Variable-sized decls are dealt with in the gimplifier. */
1970 gcc_assert (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST);
1972 /* If we previously made RTL for this decl, it must be an array
1973 whose size was determined by the initializer.
1974 The old address was a register; set that register now
1975 to the proper address. */
1976 if (DECL_RTL_SET_P (decl))
1978 gcc_assert (MEM_P (DECL_RTL (decl)));
1979 gcc_assert (REG_P (XEXP (DECL_RTL (decl), 0)));
1980 oldaddr = XEXP (DECL_RTL (decl), 0);
1983 /* Set alignment we actually gave this decl. */
1984 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
1985 : GET_MODE_BITSIZE (DECL_MODE (decl)));
1986 DECL_USER_ALIGN (decl) = 0;
1988 x = assign_temp (decl, 1, 1, 1);
1989 set_mem_attributes (x, decl, 1);
1990 SET_DECL_RTL (decl, x);
1994 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
1995 if (addr != oldaddr)
1996 emit_move_insn (oldaddr, addr);
2001 /* Emit code to save the current value of stack. */
2003 expand_stack_save (void)
2007 do_pending_stack_adjust ();
2008 emit_stack_save (SAVE_BLOCK, &ret);
2012 /* Emit code to restore the current value of stack. */
2014 expand_stack_restore (tree var)
2016 rtx sa = expand_normal (var);
2018 sa = convert_memory_address (Pmode, sa);
2019 emit_stack_restore (SAVE_BLOCK, sa);
2022 /* Do the insertion of a case label into case_list. The labels are
2023 fed to us in descending order from the sorted vector of case labels used
2024 in the tree part of the middle end. So the list we construct is
2025 sorted in ascending order. The bounds on the case range, LOW and HIGH,
2026 are converted to case's index type TYPE. */
2028 static struct case_node *
2029 add_case_node (struct case_node *head, tree type, tree low, tree high,
2030 tree label, alloc_pool case_node_pool)
2032 tree min_value, max_value;
2033 struct case_node *r;
2035 gcc_assert (TREE_CODE (low) == INTEGER_CST);
2036 gcc_assert (!high || TREE_CODE (high) == INTEGER_CST);
2038 min_value = TYPE_MIN_VALUE (type);
2039 max_value = TYPE_MAX_VALUE (type);
2041 /* If there's no HIGH value, then this is not a case range; it's
2042 just a simple case label. But that's just a degenerate case
2044 If the bounds are equal, turn this into the one-value case. */
2045 if (!high || tree_int_cst_equal (low, high))
2047 /* If the simple case value is unreachable, ignore it. */
2048 if ((TREE_CODE (min_value) == INTEGER_CST
2049 && tree_int_cst_compare (low, min_value) < 0)
2050 || (TREE_CODE (max_value) == INTEGER_CST
2051 && tree_int_cst_compare (low, max_value) > 0))
2053 low = fold_convert (type, low);
2058 /* If the entire case range is unreachable, ignore it. */
2059 if ((TREE_CODE (min_value) == INTEGER_CST
2060 && tree_int_cst_compare (high, min_value) < 0)
2061 || (TREE_CODE (max_value) == INTEGER_CST
2062 && tree_int_cst_compare (low, max_value) > 0))
2065 /* If the lower bound is less than the index type's minimum
2066 value, truncate the range bounds. */
2067 if (TREE_CODE (min_value) == INTEGER_CST
2068 && tree_int_cst_compare (low, min_value) < 0)
2070 low = fold_convert (type, low);
2072 /* If the upper bound is greater than the index type's maximum
2073 value, truncate the range bounds. */
2074 if (TREE_CODE (max_value) == INTEGER_CST
2075 && tree_int_cst_compare (high, max_value) > 0)
2077 high = fold_convert (type, high);
2081 /* Add this label to the chain. Make sure to drop overflow flags. */
2082 r = (struct case_node *) pool_alloc (case_node_pool);
2083 r->low = build_int_cst_wide (TREE_TYPE (low), TREE_INT_CST_LOW (low),
2084 TREE_INT_CST_HIGH (low));
2085 r->high = build_int_cst_wide (TREE_TYPE (high), TREE_INT_CST_LOW (high),
2086 TREE_INT_CST_HIGH (high));
2087 r->code_label = label;
2088 r->parent = r->left = NULL;
2093 /* Maximum number of case bit tests. */
2094 #define MAX_CASE_BIT_TESTS 3
2096 /* By default, enable case bit tests on targets with ashlsi3. */
2097 #ifndef CASE_USE_BIT_TESTS
2098 #define CASE_USE_BIT_TESTS (optab_handler (ashl_optab, word_mode) \
2099 != CODE_FOR_nothing)
2103 /* A case_bit_test represents a set of case nodes that may be
2104 selected from using a bit-wise comparison. HI and LO hold
2105 the integer to be tested against, LABEL contains the label
2106 to jump to upon success and BITS counts the number of case
2107 nodes handled by this test, typically the number of bits
2110 struct case_bit_test
2118 /* Determine whether "1 << x" is relatively cheap in word_mode. */
2121 bool lshift_cheap_p (void)
2123 static bool init[2] = {false, false};
2124 static bool cheap[2] = {true, true};
2126 bool speed_p = optimize_insn_for_speed_p ();
2130 rtx reg = gen_rtx_REG (word_mode, 10000);
2131 int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET,
2133 cheap[speed_p] = cost < COSTS_N_INSNS (3);
2134 init[speed_p] = true;
2137 return cheap[speed_p];
2140 /* Comparison function for qsort to order bit tests by decreasing
2141 number of case nodes, i.e. the node with the most cases gets
2145 case_bit_test_cmp (const void *p1, const void *p2)
2147 const struct case_bit_test *const d1 = (const struct case_bit_test *) p1;
2148 const struct case_bit_test *const d2 = (const struct case_bit_test *) p2;
2150 if (d2->bits != d1->bits)
2151 return d2->bits - d1->bits;
2153 /* Stabilize the sort. */
2154 return CODE_LABEL_NUMBER (d2->label) - CODE_LABEL_NUMBER (d1->label);
2157 /* Expand a switch statement by a short sequence of bit-wise
2158 comparisons. "switch(x)" is effectively converted into
2159 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
2162 INDEX_EXPR is the value being switched on, which is of
2163 type INDEX_TYPE. MINVAL is the lowest case value of in
2164 the case nodes, of INDEX_TYPE type, and RANGE is highest
2165 value minus MINVAL, also of type INDEX_TYPE. NODES is
2166 the set of case nodes, and DEFAULT_LABEL is the label to
2167 branch to should none of the cases match.
2169 There *MUST* be MAX_CASE_BIT_TESTS or less unique case
2173 emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
2174 tree range, case_node_ptr nodes, rtx default_label)
2176 struct case_bit_test test[MAX_CASE_BIT_TESTS];
2177 enum machine_mode mode;
2178 rtx expr, index, label;
2179 unsigned int i,j,lo,hi;
2180 struct case_node *n;
2184 for (n = nodes; n; n = n->right)
2186 label = label_rtx (n->code_label);
2187 for (i = 0; i < count; i++)
2188 if (label == test[i].label)
2193 gcc_assert (count < MAX_CASE_BIT_TESTS);
2196 test[i].label = label;
2203 lo = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2204 n->low, minval), 1);
2205 hi = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2206 n->high, minval), 1);
2207 for (j = lo; j <= hi; j++)
2208 if (j >= HOST_BITS_PER_WIDE_INT)
2209 test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
2211 test[i].lo |= (HOST_WIDE_INT) 1 << j;
2214 qsort (test, count, sizeof(*test), case_bit_test_cmp);
2216 index_expr = fold_build2 (MINUS_EXPR, index_type,
2217 fold_convert (index_type, index_expr),
2218 fold_convert (index_type, minval));
2219 index = expand_normal (index_expr);
2220 do_pending_stack_adjust ();
2222 mode = TYPE_MODE (index_type);
2223 expr = expand_normal (range);
2225 emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
2228 index = convert_to_mode (word_mode, index, 0);
2229 index = expand_binop (word_mode, ashl_optab, const1_rtx,
2230 index, NULL_RTX, 1, OPTAB_WIDEN);
2232 for (i = 0; i < count; i++)
2234 expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
2235 expr = expand_binop (word_mode, and_optab, index, expr,
2236 NULL_RTX, 1, OPTAB_WIDEN);
2237 emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
2238 word_mode, 1, test[i].label);
2242 emit_jump (default_label);
2246 #define HAVE_casesi 0
2249 #ifndef HAVE_tablejump
2250 #define HAVE_tablejump 0
2253 /* Return true if a switch should be expanded as a bit test.
2254 INDEX_EXPR is the index expression, RANGE is the difference between
2255 highest and lowest case, UNIQ is number of unique case node targets
2256 not counting the default case and COUNT is the number of comparisons
2257 needed, not counting the default case. */
2259 expand_switch_using_bit_tests_p (tree index_expr, tree range,
2260 unsigned int uniq, unsigned int count)
2262 return (CASE_USE_BIT_TESTS
2263 && ! TREE_CONSTANT (index_expr)
2264 && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
2265 && compare_tree_int (range, 0) > 0
2266 && lshift_cheap_p ()
2267 && ((uniq == 1 && count >= 3)
2268 || (uniq == 2 && count >= 5)
2269 || (uniq == 3 && count >= 6)));
2272 /* Terminate a case (Pascal/Ada) or switch (C) statement
2273 in which ORIG_INDEX is the expression to be tested.
2274 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
2275 type as given in the source before any compiler conversions.
2276 Generate the code to test it and jump to the right place. */
2279 expand_case (gimple stmt)
2281 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
2282 rtx default_label = 0;
2283 struct case_node *n;
2284 unsigned int count, uniq;
2290 rtx before_case, end, lab;
2292 tree index_expr = gimple_switch_index (stmt);
2293 tree index_type = TREE_TYPE (index_expr);
2294 int unsignedp = TYPE_UNSIGNED (index_type);
2296 /* The insn after which the case dispatch should finally
2297 be emitted. Zero for a dummy. */
2300 /* A list of case labels; it is first built as a list and it may then
2301 be rearranged into a nearly balanced binary tree. */
2302 struct case_node *case_list = 0;
2304 /* Label to jump to if no case matches. */
2305 tree default_label_decl = NULL_TREE;
2307 alloc_pool case_node_pool = create_alloc_pool ("struct case_node pool",
2308 sizeof (struct case_node),
2311 do_pending_stack_adjust ();
2313 /* An ERROR_MARK occurs for various reasons including invalid data type. */
2314 if (index_type != error_mark_node)
2317 bitmap label_bitmap;
2320 /* cleanup_tree_cfg removes all SWITCH_EXPR with their index
2321 expressions being INTEGER_CST. */
2322 gcc_assert (TREE_CODE (index_expr) != INTEGER_CST);
2324 /* The default case, if ever taken, is the first element. */
2325 elt = gimple_switch_label (stmt, 0);
2326 if (!CASE_LOW (elt) && !CASE_HIGH (elt))
2328 default_label_decl = CASE_LABEL (elt);
2332 for (i = gimple_switch_num_labels (stmt) - 1; i >= stopi; --i)
2335 elt = gimple_switch_label (stmt, i);
2337 low = CASE_LOW (elt);
2339 high = CASE_HIGH (elt);
2341 /* Discard empty ranges. */
2342 if (high && tree_int_cst_lt (high, low))
2345 case_list = add_case_node (case_list, index_type, low, high,
2346 CASE_LABEL (elt), case_node_pool);
2350 before_case = start = get_last_insn ();
2351 if (default_label_decl)
2352 default_label = label_rtx (default_label_decl);
2354 /* Get upper and lower bounds of case values. */
2358 label_bitmap = BITMAP_ALLOC (NULL);
2359 for (n = case_list; n; n = n->right)
2361 /* Count the elements and track the largest and smallest
2362 of them (treating them as signed even if they are not). */
2370 if (tree_int_cst_lt (n->low, minval))
2372 if (tree_int_cst_lt (maxval, n->high))
2375 /* A range counts double, since it requires two compares. */
2376 if (! tree_int_cst_equal (n->low, n->high))
2379 /* If we have not seen this label yet, then increase the
2380 number of unique case node targets seen. */
2381 lab = label_rtx (n->code_label);
2382 if (bitmap_set_bit (label_bitmap, CODE_LABEL_NUMBER (lab)))
2386 BITMAP_FREE (label_bitmap);
2388 /* cleanup_tree_cfg removes all SWITCH_EXPR with a single
2389 destination, such as one with a default case only. However,
2390 it doesn't remove cases that are out of range for the switch
2391 type, so we may still get a zero here. */
2395 emit_jump (default_label);
2396 free_alloc_pool (case_node_pool);
2400 /* Compute span of values. */
2401 range = fold_build2 (MINUS_EXPR, index_type, maxval, minval);
2403 /* Try implementing this switch statement by a short sequence of
2404 bit-wise comparisons. However, we let the binary-tree case
2405 below handle constant index expressions. */
2406 if (expand_switch_using_bit_tests_p (index_expr, range, uniq, count))
2408 /* Optimize the case where all the case values fit in a
2409 word without having to subtract MINVAL. In this case,
2410 we can optimize away the subtraction. */
2411 if (compare_tree_int (minval, 0) > 0
2412 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
2414 minval = build_int_cst (index_type, 0);
2417 emit_case_bit_tests (index_type, index_expr, minval, range,
2418 case_list, default_label);
2421 /* If range of values is much bigger than number of values,
2422 make a sequence of conditional branches instead of a dispatch.
2423 If the switch-index is a constant, do it this way
2424 because we can optimize it. */
2426 else if (count < targetm.case_values_threshold ()
2427 || compare_tree_int (range,
2428 (optimize_insn_for_size_p () ? 3 : 10) * count) > 0
2429 /* RANGE may be signed, and really large ranges will show up
2430 as negative numbers. */
2431 || compare_tree_int (range, 0) < 0
2432 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
2435 || !flag_jump_tables
2436 || TREE_CONSTANT (index_expr)
2437 /* If neither casesi or tablejump is available, we can
2438 only go this way. */
2439 || (!HAVE_casesi && !HAVE_tablejump))
2441 index = expand_normal (index_expr);
2443 /* If the index is a short or char that we do not have
2444 an insn to handle comparisons directly, convert it to
2445 a full integer now, rather than letting each comparison
2446 generate the conversion. */
2448 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
2449 && ! have_insn_for (COMPARE, GET_MODE (index)))
2451 enum machine_mode wider_mode;
2452 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
2453 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
2454 if (have_insn_for (COMPARE, wider_mode))
2456 index = convert_to_mode (wider_mode, index, unsignedp);
2461 do_pending_stack_adjust ();
2464 index = copy_to_reg (index);
2466 /* We generate a binary decision tree to select the
2467 appropriate target code. This is done as follows:
2469 The list of cases is rearranged into a binary tree,
2470 nearly optimal assuming equal probability for each case.
2472 The tree is transformed into RTL, eliminating
2473 redundant test conditions at the same time.
2475 If program flow could reach the end of the
2476 decision tree an unconditional jump to the
2477 default code is emitted. */
2479 use_cost_table = estimate_case_costs (case_list);
2480 balance_case_nodes (&case_list, NULL);
2481 emit_case_nodes (index, case_list, default_label, index_type);
2483 emit_jump (default_label);
2487 rtx fallback_label = label_rtx (case_list->code_label);
2488 table_label = gen_label_rtx ();
2489 if (! try_casesi (index_type, index_expr, minval, range,
2490 table_label, default_label, fallback_label))
2494 /* Index jumptables from zero for suitable values of
2495 minval to avoid a subtraction. */
2496 if (optimize_insn_for_speed_p ()
2497 && compare_tree_int (minval, 0) > 0
2498 && compare_tree_int (minval, 3) < 0)
2500 minval = build_int_cst (index_type, 0);
2504 ok = try_tablejump (index_type, index_expr, minval, range,
2505 table_label, default_label);
2509 /* Get table of labels to jump to, in order of case index. */
2511 ncases = tree_low_cst (range, 0) + 1;
2512 labelvec = XALLOCAVEC (rtx, ncases);
2513 memset (labelvec, 0, ncases * sizeof (rtx));
2515 for (n = case_list; n; n = n->right)
2517 /* Compute the low and high bounds relative to the minimum
2518 value since that should fit in a HOST_WIDE_INT while the
2519 actual values may not. */
2521 = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2522 n->low, minval), 1);
2523 HOST_WIDE_INT i_high
2524 = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2525 n->high, minval), 1);
2528 for (i = i_low; i <= i_high; i ++)
2530 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
2533 /* Fill in the gaps with the default. We may have gaps at
2534 the beginning if we tried to avoid the minval subtraction,
2535 so substitute some label even if the default label was
2536 deemed unreachable. */
2538 default_label = fallback_label;
2539 for (i = 0; i < ncases; i++)
2540 if (labelvec[i] == 0)
2541 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
2543 /* Output the table. */
2544 emit_label (table_label);
2546 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
2547 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
2548 gen_rtx_LABEL_REF (Pmode, table_label),
2549 gen_rtvec_v (ncases, labelvec),
2550 const0_rtx, const0_rtx));
2552 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
2553 gen_rtvec_v (ncases, labelvec)));
2555 /* Record no drop-through after the table. */
2559 before_case = NEXT_INSN (before_case);
2560 end = get_last_insn ();
2561 reorder_insns (before_case, end, start);
2565 free_alloc_pool (case_node_pool);
2568 /* Generate code to jump to LABEL if OP0 and OP1 are equal in mode MODE. */
2571 do_jump_if_equal (enum machine_mode mode, rtx op0, rtx op1, rtx label,
2574 do_compare_rtx_and_jump (op0, op1, EQ, unsignedp, mode,
2575 NULL_RTX, NULL_RTX, label, -1);
2578 /* Not all case values are encountered equally. This function
2579 uses a heuristic to weight case labels, in cases where that
2580 looks like a reasonable thing to do.
2582 Right now, all we try to guess is text, and we establish the
2585 chars above space: 16
2594 If we find any cases in the switch that are not either -1 or in the range
2595 of valid ASCII characters, or are control characters other than those
2596 commonly used with "\", don't treat this switch scanning text.
2598 Return 1 if these nodes are suitable for cost estimation, otherwise
2602 estimate_case_costs (case_node_ptr node)
2604 tree min_ascii = integer_minus_one_node;
2605 tree max_ascii = build_int_cst (TREE_TYPE (node->high), 127);
2609 /* If we haven't already made the cost table, make it now. Note that the
2610 lower bound of the table is -1, not zero. */
2612 if (! cost_table_initialized)
2614 cost_table_initialized = 1;
2616 for (i = 0; i < 128; i++)
2619 COST_TABLE (i) = 16;
2620 else if (ISPUNCT (i))
2622 else if (ISCNTRL (i))
2623 COST_TABLE (i) = -1;
2626 COST_TABLE (' ') = 8;
2627 COST_TABLE ('\t') = 4;
2628 COST_TABLE ('\0') = 4;
2629 COST_TABLE ('\n') = 2;
2630 COST_TABLE ('\f') = 1;
2631 COST_TABLE ('\v') = 1;
2632 COST_TABLE ('\b') = 1;
2635 /* See if all the case expressions look like text. It is text if the
2636 constant is >= -1 and the highest constant is <= 127. Do all comparisons
2637 as signed arithmetic since we don't want to ever access cost_table with a
2638 value less than -1. Also check that none of the constants in a range
2639 are strange control characters. */
2641 for (n = node; n; n = n->right)
2643 if (tree_int_cst_lt (n->low, min_ascii)
2644 || tree_int_cst_lt (max_ascii, n->high))
2647 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
2648 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
2649 if (COST_TABLE (i) < 0)
2653 /* All interesting values are within the range of interesting
2654 ASCII characters. */
2658 /* Take an ordered list of case nodes
2659 and transform them into a near optimal binary tree,
2660 on the assumption that any target code selection value is as
2661 likely as any other.
2663 The transformation is performed by splitting the ordered
2664 list into two equal sections plus a pivot. The parts are
2665 then attached to the pivot as left and right branches. Each
2666 branch is then transformed recursively. */
2669 balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
2682 /* Count the number of entries on branch. Also count the ranges. */
2686 if (!tree_int_cst_equal (np->low, np->high))
2690 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
2694 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
2702 /* Split this list if it is long enough for that to help. */
2707 /* Find the place in the list that bisects the list's total cost,
2708 Here I gets half the total cost. */
2713 /* Skip nodes while their cost does not reach that amount. */
2714 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2715 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
2716 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
2719 npp = &(*npp)->right;
2724 /* Leave this branch lopsided, but optimize left-hand
2725 side and fill in `parent' fields for right-hand side. */
2727 np->parent = parent;
2728 balance_case_nodes (&np->left, np);
2729 for (; np->right; np = np->right)
2730 np->right->parent = np;
2734 /* If there are just three nodes, split at the middle one. */
2736 npp = &(*npp)->right;
2739 /* Find the place in the list that bisects the list's total cost,
2740 where ranges count as 2.
2741 Here I gets half the total cost. */
2742 i = (i + ranges + 1) / 2;
2745 /* Skip nodes while their cost does not reach that amount. */
2746 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2751 npp = &(*npp)->right;
2756 np->parent = parent;
2759 /* Optimize each of the two split parts. */
2760 balance_case_nodes (&np->left, np);
2761 balance_case_nodes (&np->right, np);
2765 /* Else leave this branch as one level,
2766 but fill in `parent' fields. */
2768 np->parent = parent;
2769 for (; np->right; np = np->right)
2770 np->right->parent = np;
2775 /* Search the parent sections of the case node tree
2776 to see if a test for the lower bound of NODE would be redundant.
2777 INDEX_TYPE is the type of the index expression.
2779 The instructions to generate the case decision tree are
2780 output in the same order as nodes are processed so it is
2781 known that if a parent node checks the range of the current
2782 node minus one that the current node is bounded at its lower
2783 span. Thus the test would be redundant. */
2786 node_has_low_bound (case_node_ptr node, tree index_type)
2789 case_node_ptr pnode;
2791 /* If the lower bound of this node is the lowest value in the index type,
2792 we need not test it. */
2794 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
2797 /* If this node has a left branch, the value at the left must be less
2798 than that at this node, so it cannot be bounded at the bottom and
2799 we need not bother testing any further. */
2804 low_minus_one = fold_build2 (MINUS_EXPR, TREE_TYPE (node->low),
2806 build_int_cst (TREE_TYPE (node->low), 1));
2808 /* If the subtraction above overflowed, we can't verify anything.
2809 Otherwise, look for a parent that tests our value - 1. */
2811 if (! tree_int_cst_lt (low_minus_one, node->low))
2814 for (pnode = node->parent; pnode; pnode = pnode->parent)
2815 if (tree_int_cst_equal (low_minus_one, pnode->high))
2821 /* Search the parent sections of the case node tree
2822 to see if a test for the upper bound of NODE would be redundant.
2823 INDEX_TYPE is the type of the index expression.
2825 The instructions to generate the case decision tree are
2826 output in the same order as nodes are processed so it is
2827 known that if a parent node checks the range of the current
2828 node plus one that the current node is bounded at its upper
2829 span. Thus the test would be redundant. */
2832 node_has_high_bound (case_node_ptr node, tree index_type)
2835 case_node_ptr pnode;
2837 /* If there is no upper bound, obviously no test is needed. */
2839 if (TYPE_MAX_VALUE (index_type) == NULL)
2842 /* If the upper bound of this node is the highest value in the type
2843 of the index expression, we need not test against it. */
2845 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
2848 /* If this node has a right branch, the value at the right must be greater
2849 than that at this node, so it cannot be bounded at the top and
2850 we need not bother testing any further. */
2855 high_plus_one = fold_build2 (PLUS_EXPR, TREE_TYPE (node->high),
2857 build_int_cst (TREE_TYPE (node->high), 1));
2859 /* If the addition above overflowed, we can't verify anything.
2860 Otherwise, look for a parent that tests our value + 1. */
2862 if (! tree_int_cst_lt (node->high, high_plus_one))
2865 for (pnode = node->parent; pnode; pnode = pnode->parent)
2866 if (tree_int_cst_equal (high_plus_one, pnode->low))
2872 /* Search the parent sections of the
2873 case node tree to see if both tests for the upper and lower
2874 bounds of NODE would be redundant. */
2877 node_is_bounded (case_node_ptr node, tree index_type)
2879 return (node_has_low_bound (node, index_type)
2880 && node_has_high_bound (node, index_type));
2883 /* Emit step-by-step code to select a case for the value of INDEX.
2884 The thus generated decision tree follows the form of the
2885 case-node binary tree NODE, whose nodes represent test conditions.
2886 INDEX_TYPE is the type of the index of the switch.
2888 Care is taken to prune redundant tests from the decision tree
2889 by detecting any boundary conditions already checked by
2890 emitted rtx. (See node_has_high_bound, node_has_low_bound
2891 and node_is_bounded, above.)
2893 Where the test conditions can be shown to be redundant we emit
2894 an unconditional jump to the target code. As a further
2895 optimization, the subordinates of a tree node are examined to
2896 check for bounded nodes. In this case conditional and/or
2897 unconditional jumps as a result of the boundary check for the
2898 current node are arranged to target the subordinates associated
2899 code for out of bound conditions on the current node.
2901 We can assume that when control reaches the code generated here,
2902 the index value has already been compared with the parents
2903 of this node, and determined to be on the same side of each parent
2904 as this node is. Thus, if this node tests for the value 51,
2905 and a parent tested for 52, we don't need to consider
2906 the possibility of a value greater than 51. If another parent
2907 tests for the value 50, then this node need not test anything. */
2910 emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
2913 /* If INDEX has an unsigned type, we must make unsigned branches. */
2914 int unsignedp = TYPE_UNSIGNED (index_type);
2915 enum machine_mode mode = GET_MODE (index);
2916 enum machine_mode imode = TYPE_MODE (index_type);
2918 /* Handle indices detected as constant during RTL expansion. */
2919 if (mode == VOIDmode)
2922 /* See if our parents have already tested everything for us.
2923 If they have, emit an unconditional jump for this node. */
2924 if (node_is_bounded (node, index_type))
2925 emit_jump (label_rtx (node->code_label));
2927 else if (tree_int_cst_equal (node->low, node->high))
2929 /* Node is single valued. First see if the index expression matches
2930 this node and then check our children, if any. */
2932 do_jump_if_equal (mode, index,
2933 convert_modes (mode, imode,
2934 expand_normal (node->low),
2936 label_rtx (node->code_label), unsignedp);
2938 if (node->right != 0 && node->left != 0)
2940 /* This node has children on both sides.
2941 Dispatch to one side or the other
2942 by comparing the index value with this node's value.
2943 If one subtree is bounded, check that one first,
2944 so we can avoid real branches in the tree. */
2946 if (node_is_bounded (node->right, index_type))
2948 emit_cmp_and_jump_insns (index,
2951 expand_normal (node->high),
2953 GT, NULL_RTX, mode, unsignedp,
2954 label_rtx (node->right->code_label));
2955 emit_case_nodes (index, node->left, default_label, index_type);
2958 else if (node_is_bounded (node->left, index_type))
2960 emit_cmp_and_jump_insns (index,
2963 expand_normal (node->high),
2965 LT, NULL_RTX, mode, unsignedp,
2966 label_rtx (node->left->code_label));
2967 emit_case_nodes (index, node->right, default_label, index_type);
2970 /* If both children are single-valued cases with no
2971 children, finish up all the work. This way, we can save
2972 one ordered comparison. */
2973 else if (tree_int_cst_equal (node->right->low, node->right->high)
2974 && node->right->left == 0
2975 && node->right->right == 0
2976 && tree_int_cst_equal (node->left->low, node->left->high)
2977 && node->left->left == 0
2978 && node->left->right == 0)
2980 /* Neither node is bounded. First distinguish the two sides;
2981 then emit the code for one side at a time. */
2983 /* See if the value matches what the right hand side
2985 do_jump_if_equal (mode, index,
2986 convert_modes (mode, imode,
2987 expand_normal (node->right->low),
2989 label_rtx (node->right->code_label),
2992 /* See if the value matches what the left hand side
2994 do_jump_if_equal (mode, index,
2995 convert_modes (mode, imode,
2996 expand_normal (node->left->low),
2998 label_rtx (node->left->code_label),
3004 /* Neither node is bounded. First distinguish the two sides;
3005 then emit the code for one side at a time. */
3008 = build_decl (CURR_INSN_LOCATION,
3009 LABEL_DECL, NULL_TREE, NULL_TREE);
3011 /* See if the value is on the right. */
3012 emit_cmp_and_jump_insns (index,
3015 expand_normal (node->high),
3017 GT, NULL_RTX, mode, unsignedp,
3018 label_rtx (test_label));
3020 /* Value must be on the left.
3021 Handle the left-hand subtree. */
3022 emit_case_nodes (index, node->left, default_label, index_type);
3023 /* If left-hand subtree does nothing,
3026 emit_jump (default_label);
3028 /* Code branches here for the right-hand subtree. */
3029 expand_label (test_label);
3030 emit_case_nodes (index, node->right, default_label, index_type);
3034 else if (node->right != 0 && node->left == 0)
3036 /* Here we have a right child but no left so we issue a conditional
3037 branch to default and process the right child.
3039 Omit the conditional branch to default if the right child
3040 does not have any children and is single valued; it would
3041 cost too much space to save so little time. */
3043 if (node->right->right || node->right->left
3044 || !tree_int_cst_equal (node->right->low, node->right->high))
3046 if (!node_has_low_bound (node, index_type))
3048 emit_cmp_and_jump_insns (index,
3051 expand_normal (node->high),
3053 LT, NULL_RTX, mode, unsignedp,
3057 emit_case_nodes (index, node->right, default_label, index_type);
3060 /* We cannot process node->right normally
3061 since we haven't ruled out the numbers less than
3062 this node's value. So handle node->right explicitly. */
3063 do_jump_if_equal (mode, index,
3066 expand_normal (node->right->low),
3068 label_rtx (node->right->code_label), unsignedp);
3071 else if (node->right == 0 && node->left != 0)
3073 /* Just one subtree, on the left. */
3074 if (node->left->left || node->left->right
3075 || !tree_int_cst_equal (node->left->low, node->left->high))
3077 if (!node_has_high_bound (node, index_type))
3079 emit_cmp_and_jump_insns (index,
3082 expand_normal (node->high),
3084 GT, NULL_RTX, mode, unsignedp,
3088 emit_case_nodes (index, node->left, default_label, index_type);
3091 /* We cannot process node->left normally
3092 since we haven't ruled out the numbers less than
3093 this node's value. So handle node->left explicitly. */
3094 do_jump_if_equal (mode, index,
3097 expand_normal (node->left->low),
3099 label_rtx (node->left->code_label), unsignedp);
3104 /* Node is a range. These cases are very similar to those for a single
3105 value, except that we do not start by testing whether this node
3106 is the one to branch to. */
3108 if (node->right != 0 && node->left != 0)
3110 /* Node has subtrees on both sides.
3111 If the right-hand subtree is bounded,
3112 test for it first, since we can go straight there.
3113 Otherwise, we need to make a branch in the control structure,
3114 then handle the two subtrees. */
3115 tree test_label = 0;
3117 if (node_is_bounded (node->right, index_type))
3118 /* Right hand node is fully bounded so we can eliminate any
3119 testing and branch directly to the target code. */
3120 emit_cmp_and_jump_insns (index,
3123 expand_normal (node->high),
3125 GT, NULL_RTX, mode, unsignedp,
3126 label_rtx (node->right->code_label));
3129 /* Right hand node requires testing.
3130 Branch to a label where we will handle it later. */
3132 test_label = build_decl (CURR_INSN_LOCATION,
3133 LABEL_DECL, NULL_TREE, NULL_TREE);
3134 emit_cmp_and_jump_insns (index,
3137 expand_normal (node->high),
3139 GT, NULL_RTX, mode, unsignedp,
3140 label_rtx (test_label));
3143 /* Value belongs to this node or to the left-hand subtree. */
3145 emit_cmp_and_jump_insns (index,
3148 expand_normal (node->low),
3150 GE, NULL_RTX, mode, unsignedp,
3151 label_rtx (node->code_label));
3153 /* Handle the left-hand subtree. */
3154 emit_case_nodes (index, node->left, default_label, index_type);
3156 /* If right node had to be handled later, do that now. */
3160 /* If the left-hand subtree fell through,
3161 don't let it fall into the right-hand subtree. */
3163 emit_jump (default_label);
3165 expand_label (test_label);
3166 emit_case_nodes (index, node->right, default_label, index_type);
3170 else if (node->right != 0 && node->left == 0)
3172 /* Deal with values to the left of this node,
3173 if they are possible. */
3174 if (!node_has_low_bound (node, index_type))
3176 emit_cmp_and_jump_insns (index,
3179 expand_normal (node->low),
3181 LT, NULL_RTX, mode, unsignedp,
3185 /* Value belongs to this node or to the right-hand subtree. */
3187 emit_cmp_and_jump_insns (index,
3190 expand_normal (node->high),
3192 LE, NULL_RTX, mode, unsignedp,
3193 label_rtx (node->code_label));
3195 emit_case_nodes (index, node->right, default_label, index_type);
3198 else if (node->right == 0 && node->left != 0)
3200 /* Deal with values to the right of this node,
3201 if they are possible. */
3202 if (!node_has_high_bound (node, index_type))
3204 emit_cmp_and_jump_insns (index,
3207 expand_normal (node->high),
3209 GT, NULL_RTX, mode, unsignedp,
3213 /* Value belongs to this node or to the left-hand subtree. */
3215 emit_cmp_and_jump_insns (index,
3218 expand_normal (node->low),
3220 GE, NULL_RTX, mode, unsignedp,
3221 label_rtx (node->code_label));
3223 emit_case_nodes (index, node->left, default_label, index_type);
3228 /* Node has no children so we check low and high bounds to remove
3229 redundant tests. Only one of the bounds can exist,
3230 since otherwise this node is bounded--a case tested already. */
3231 int high_bound = node_has_high_bound (node, index_type);
3232 int low_bound = node_has_low_bound (node, index_type);
3234 if (!high_bound && low_bound)
3236 emit_cmp_and_jump_insns (index,
3239 expand_normal (node->high),
3241 GT, NULL_RTX, mode, unsignedp,
3245 else if (!low_bound && high_bound)
3247 emit_cmp_and_jump_insns (index,
3250 expand_normal (node->low),
3252 LT, NULL_RTX, mode, unsignedp,
3255 else if (!low_bound && !high_bound)
3257 /* Widen LOW and HIGH to the same width as INDEX. */
3258 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
3259 tree low = build1 (CONVERT_EXPR, type, node->low);
3260 tree high = build1 (CONVERT_EXPR, type, node->high);
3261 rtx low_rtx, new_index, new_bound;
3263 /* Instead of doing two branches, emit one unsigned branch for
3264 (index-low) > (high-low). */
3265 low_rtx = expand_expr (low, NULL_RTX, mode, EXPAND_NORMAL);
3266 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
3267 NULL_RTX, unsignedp,
3269 new_bound = expand_expr (fold_build2 (MINUS_EXPR, type,
3271 NULL_RTX, mode, EXPAND_NORMAL);
3273 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
3274 mode, 1, default_label);
3277 emit_jump (label_rtx (node->code_label));