1 /* Expands front end tree to back end RTL for GCC
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 The functions whose names start with `expand_' are called by the
25 expander to generate RTL instructions for various kinds of constructs. */
29 #include "coretypes.h"
33 #include "hard-reg-set.h"
39 #include "insn-config.h"
47 #include "langhooks.h"
52 #include "alloc-pool.h"
54 /* Functions and data structures for expanding case statements. */
56 /* Case label structure, used to hold info on labels within case
57 statements. We handle "range" labels; for a single-value label
58 as in C, the high and low limits are the same.
60 We start with a vector of case nodes sorted in ascending order, and
61 the default label as the last element in the vector. Before expanding
62 to RTL, we transform this vector into a list linked via the RIGHT
63 fields in the case_node struct. Nodes with higher case values are
66 Switch statements can be output in three forms. A branch table is
67 used if there are more than a few labels and the labels are dense
68 within the range between the smallest and largest case value. If a
69 branch table is used, no further manipulations are done with the case
72 The alternative to the use of a branch table is to generate a series
73 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
74 and PARENT fields to hold a binary tree. Initially the tree is
75 totally unbalanced, with everything on the right. We balance the tree
76 with nodes on the left having lower case values than the parent
77 and nodes on the right having higher values. We then output the tree
80 For very small, suitable switch statements, we can generate a series
81 of simple bit test and branches instead. */
85 struct case_node *left; /* Left son in binary tree */
86 struct case_node *right; /* Right son in binary tree; also node chain */
87 struct case_node *parent; /* Parent of node in binary tree */
88 tree low; /* Lowest index value for this label */
89 tree high; /* Highest index value for this label */
90 tree code_label; /* Label to jump to when node matches */
93 typedef struct case_node case_node;
94 typedef struct case_node *case_node_ptr;
96 /* These are used by estimate_case_costs and balance_case_nodes. */
98 /* This must be a signed type, and non-ANSI compilers lack signed char. */
99 static short cost_table_[129];
100 static int use_cost_table;
101 static int cost_table_initialized;
103 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
105 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
107 static int n_occurrences (int, const char *);
108 static bool tree_conflicts_with_clobbers_p (tree, HARD_REG_SET *);
109 static void expand_nl_goto_receiver (void);
110 static bool check_operand_nalternatives (tree, tree);
111 static bool check_unique_operand_names (tree, tree);
112 static char *resolve_operand_name_1 (char *, tree, tree);
113 static void expand_null_return_1 (void);
114 static void expand_value_return (rtx);
115 static int estimate_case_costs (case_node_ptr);
116 static bool lshift_cheap_p (void);
117 static int case_bit_test_cmp (const void *, const void *);
118 static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
119 static void balance_case_nodes (case_node_ptr *, case_node_ptr);
120 static int node_has_low_bound (case_node_ptr, tree);
121 static int node_has_high_bound (case_node_ptr, tree);
122 static int node_is_bounded (case_node_ptr, tree);
123 static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
124 static struct case_node *add_case_node (struct case_node *, tree,
125 tree, tree, tree, alloc_pool);
128 /* Return the rtx-label that corresponds to a LABEL_DECL,
129 creating it if necessary. */
132 label_rtx (tree label)
134 gcc_assert (TREE_CODE (label) == LABEL_DECL);
136 if (!DECL_RTL_SET_P (label))
138 rtx r = gen_label_rtx ();
139 SET_DECL_RTL (label, r);
140 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
141 LABEL_PRESERVE_P (r) = 1;
144 return DECL_RTL (label);
147 /* As above, but also put it on the forced-reference list of the
148 function that contains it. */
150 force_label_rtx (tree label)
152 rtx ref = label_rtx (label);
153 tree function = decl_function_context (label);
156 gcc_assert (function);
158 if (function != current_function_decl)
159 p = find_function_data (function);
163 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref, forced_labels);
167 /* Add an unconditional jump to LABEL as the next sequential instruction. */
170 emit_jump (rtx label)
172 do_pending_stack_adjust ();
173 emit_jump_insn (gen_jump (label));
177 /* Emit code to jump to the address
178 specified by the pointer expression EXP. */
181 expand_computed_goto (tree exp)
183 rtx x = expand_normal (exp);
185 x = convert_memory_address (Pmode, x);
187 do_pending_stack_adjust ();
188 emit_indirect_jump (x);
191 /* Handle goto statements and the labels that they can go to. */
193 /* Specify the location in the RTL code of a label LABEL,
194 which is a LABEL_DECL tree node.
196 This is used for the kind of label that the user can jump to with a
197 goto statement, and for alternatives of a switch or case statement.
198 RTL labels generated for loops and conditionals don't go through here;
199 they are generated directly at the RTL level, by other functions below.
201 Note that this has nothing to do with defining label *names*.
202 Languages vary in how they do that and what that even means. */
205 expand_label (tree label)
207 rtx label_r = label_rtx (label);
209 do_pending_stack_adjust ();
210 emit_label (label_r);
211 if (DECL_NAME (label))
212 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
214 if (DECL_NONLOCAL (label))
216 expand_nl_goto_receiver ();
217 nonlocal_goto_handler_labels
218 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
219 nonlocal_goto_handler_labels);
222 if (FORCED_LABEL (label))
223 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels);
225 if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
226 maybe_set_first_label_num (label_r);
229 /* Generate RTL code for a `goto' statement with target label LABEL.
230 LABEL should be a LABEL_DECL tree node that was or will later be
231 defined with `expand_label'. */
234 expand_goto (tree label)
236 #ifdef ENABLE_CHECKING
237 /* Check for a nonlocal goto to a containing function. Should have
238 gotten translated to __builtin_nonlocal_goto. */
239 tree context = decl_function_context (label);
240 gcc_assert (!context || context == current_function_decl);
243 emit_jump (label_rtx (label));
246 /* Return the number of times character C occurs in string S. */
248 n_occurrences (int c, const char *s)
256 /* Generate RTL for an asm statement (explicit assembler code).
257 STRING is a STRING_CST node containing the assembler code text,
258 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
259 insn is volatile; don't optimize it. */
262 expand_asm_loc (tree string, int vol, location_t locus)
266 if (TREE_CODE (string) == ADDR_EXPR)
267 string = TREE_OPERAND (string, 0);
269 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
270 ggc_strdup (TREE_STRING_POINTER (string)),
273 MEM_VOLATILE_P (body) = vol;
278 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
279 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
280 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
281 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
282 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
283 constraint allows the use of a register operand. And, *IS_INOUT
284 will be true if the operand is read-write, i.e., if it is used as
285 an input as well as an output. If *CONSTRAINT_P is not in
286 canonical form, it will be made canonical. (Note that `+' will be
287 replaced with `=' as part of this process.)
289 Returns TRUE if all went well; FALSE if an error occurred. */
292 parse_output_constraint (const char **constraint_p, int operand_num,
293 int ninputs, int noutputs, bool *allows_mem,
294 bool *allows_reg, bool *is_inout)
296 const char *constraint = *constraint_p;
299 /* Assume the constraint doesn't allow the use of either a register
304 /* Allow the `=' or `+' to not be at the beginning of the string,
305 since it wasn't explicitly documented that way, and there is a
306 large body of code that puts it last. Swap the character to
307 the front, so as not to uglify any place else. */
308 p = strchr (constraint, '=');
310 p = strchr (constraint, '+');
312 /* If the string doesn't contain an `=', issue an error
316 error ("output operand constraint lacks %<=%>");
320 /* If the constraint begins with `+', then the operand is both read
321 from and written to. */
322 *is_inout = (*p == '+');
324 /* Canonicalize the output constraint so that it begins with `='. */
325 if (p != constraint || *is_inout)
328 size_t c_len = strlen (constraint);
331 warning (0, "output constraint %qc for operand %d "
332 "is not at the beginning",
335 /* Make a copy of the constraint. */
336 buf = XALLOCAVEC (char, c_len + 1);
337 strcpy (buf, constraint);
338 /* Swap the first character and the `=' or `+'. */
339 buf[p - constraint] = buf[0];
340 /* Make sure the first character is an `='. (Until we do this,
341 it might be a `+'.) */
343 /* Replace the constraint with the canonicalized string. */
344 *constraint_p = ggc_alloc_string (buf, c_len);
345 constraint = *constraint_p;
348 /* Loop through the constraint string. */
349 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
354 error ("operand constraint contains incorrectly positioned "
359 if (operand_num + 1 == ninputs + noutputs)
361 error ("%<%%%> constraint used with last operand");
366 case 'V': case TARGET_MEM_CONSTRAINT: case 'o':
370 case '?': case '!': case '*': case '&': case '#':
371 case 'E': case 'F': case 'G': case 'H':
372 case 's': case 'i': case 'n':
373 case 'I': case 'J': case 'K': case 'L': case 'M':
374 case 'N': case 'O': case 'P': case ',':
377 case '0': case '1': case '2': case '3': case '4':
378 case '5': case '6': case '7': case '8': case '9':
380 error ("matching constraint not valid in output operand");
384 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
385 excepting those that expand_call created. So match memory
402 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
404 #ifdef EXTRA_CONSTRAINT_STR
405 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
407 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
411 /* Otherwise we can't assume anything about the nature of
412 the constraint except that it isn't purely registers.
413 Treat it like "g" and hope for the best. */
424 /* Similar, but for input constraints. */
427 parse_input_constraint (const char **constraint_p, int input_num,
428 int ninputs, int noutputs, int ninout,
429 const char * const * constraints,
430 bool *allows_mem, bool *allows_reg)
432 const char *constraint = *constraint_p;
433 const char *orig_constraint = constraint;
434 size_t c_len = strlen (constraint);
436 bool saw_match = false;
438 /* Assume the constraint doesn't allow the use of either
439 a register or memory. */
443 /* Make sure constraint has neither `=', `+', nor '&'. */
445 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
446 switch (constraint[j])
448 case '+': case '=': case '&':
449 if (constraint == orig_constraint)
451 error ("input operand constraint contains %qc", constraint[j]);
457 if (constraint == orig_constraint
458 && input_num + 1 == ninputs - ninout)
460 error ("%<%%%> constraint used with last operand");
465 case 'V': case TARGET_MEM_CONSTRAINT: case 'o':
470 case '?': case '!': case '*': case '#':
471 case 'E': case 'F': case 'G': case 'H':
472 case 's': case 'i': case 'n':
473 case 'I': case 'J': case 'K': case 'L': case 'M':
474 case 'N': case 'O': case 'P': case ',':
477 /* Whether or not a numeric constraint allows a register is
478 decided by the matching constraint, and so there is no need
479 to do anything special with them. We must handle them in
480 the default case, so that we don't unnecessarily force
481 operands to memory. */
482 case '0': case '1': case '2': case '3': case '4':
483 case '5': case '6': case '7': case '8': case '9':
490 match = strtoul (constraint + j, &end, 10);
491 if (match >= (unsigned long) noutputs)
493 error ("matching constraint references invalid operand number");
497 /* Try and find the real constraint for this dup. Only do this
498 if the matching constraint is the only alternative. */
500 && (j == 0 || (j == 1 && constraint[0] == '%')))
502 constraint = constraints[match];
503 *constraint_p = constraint;
504 c_len = strlen (constraint);
506 /* ??? At the end of the loop, we will skip the first part of
507 the matched constraint. This assumes not only that the
508 other constraint is an output constraint, but also that
509 the '=' or '+' come first. */
513 j = end - constraint;
514 /* Anticipate increment at end of loop. */
529 if (! ISALPHA (constraint[j]))
531 error ("invalid punctuation %qc in constraint", constraint[j]);
534 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
537 #ifdef EXTRA_CONSTRAINT_STR
538 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
540 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
544 /* Otherwise we can't assume anything about the nature of
545 the constraint except that it isn't purely registers.
546 Treat it like "g" and hope for the best. */
554 if (saw_match && !*allows_reg)
555 warning (0, "matching constraint does not allow a register");
560 /* Return DECL iff there's an overlap between *REGS and DECL, where DECL
561 can be an asm-declared register. Called via walk_tree. */
564 decl_overlaps_hard_reg_set_p (tree *declp, int *walk_subtrees ATTRIBUTE_UNUSED,
568 const HARD_REG_SET *const regs = (const HARD_REG_SET *) data;
570 if (TREE_CODE (decl) == VAR_DECL)
572 if (DECL_HARD_REGISTER (decl)
573 && REG_P (DECL_RTL (decl))
574 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
576 rtx reg = DECL_RTL (decl);
578 if (overlaps_hard_reg_set_p (*regs, GET_MODE (reg), REGNO (reg)))
583 else if (TYPE_P (decl) || TREE_CODE (decl) == PARM_DECL)
588 /* If there is an overlap between *REGS and DECL, return the first overlap
591 tree_overlaps_hard_reg_set (tree decl, HARD_REG_SET *regs)
593 return walk_tree (&decl, decl_overlaps_hard_reg_set_p, regs, NULL);
596 /* Check for overlap between registers marked in CLOBBERED_REGS and
597 anything inappropriate in T. Emit error and return the register
598 variable definition for error, NULL_TREE for ok. */
601 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
603 /* Conflicts between asm-declared register variables and the clobber
604 list are not allowed. */
605 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
609 error ("asm-specifier for variable %qs conflicts with asm clobber list",
610 IDENTIFIER_POINTER (DECL_NAME (overlap)));
612 /* Reset registerness to stop multiple errors emitted for a single
614 DECL_REGISTER (overlap) = 0;
621 /* Generate RTL for an asm statement with arguments.
622 STRING is the instruction template.
623 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
624 Each output or input has an expression in the TREE_VALUE and
625 a tree list in TREE_PURPOSE which in turn contains a constraint
626 name in TREE_VALUE (or NULL_TREE) and a constraint string
628 CLOBBERS is a list of STRING_CST nodes each naming a hard register
629 that is clobbered by this insn.
631 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
632 Some elements of OUTPUTS may be replaced with trees representing temporary
633 values. The caller should copy those temporary values to the originally
636 VOL nonzero means the insn is volatile; don't optimize it. */
639 expand_asm_operands (tree string, tree outputs, tree inputs,
640 tree clobbers, int vol, location_t locus)
642 rtvec argvec, constraintvec;
644 int ninputs = list_length (inputs);
645 int noutputs = list_length (outputs);
648 HARD_REG_SET clobbered_regs;
649 int clobber_conflict_found = 0;
653 /* Vector of RTX's of evaluated output operands. */
654 rtx *output_rtx = XALLOCAVEC (rtx, noutputs);
655 int *inout_opnum = XALLOCAVEC (int, noutputs);
656 rtx *real_output_rtx = XALLOCAVEC (rtx, noutputs);
657 enum machine_mode *inout_mode = XALLOCAVEC (enum machine_mode, noutputs);
658 const char **constraints = XALLOCAVEC (const char *, noutputs + ninputs);
659 int old_generating_concat_p = generating_concat_p;
661 /* An ASM with no outputs needs to be treated as volatile, for now. */
665 if (! check_operand_nalternatives (outputs, inputs))
668 string = resolve_asm_operand_names (string, outputs, inputs);
670 /* Collect constraints. */
672 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
673 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
674 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
675 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
677 /* Sometimes we wish to automatically clobber registers across an asm.
678 Case in point is when the i386 backend moved from cc0 to a hard reg --
679 maintaining source-level compatibility means automatically clobbering
680 the flags register. */
681 clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers);
683 /* Count the number of meaningful clobbered registers, ignoring what
684 we would ignore later. */
686 CLEAR_HARD_REG_SET (clobbered_regs);
687 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
691 if (TREE_VALUE (tail) == error_mark_node)
693 regname = TREE_STRING_POINTER (TREE_VALUE (tail));
695 i = decode_reg_name (regname);
696 if (i >= 0 || i == -4)
699 error ("unknown register name %qs in %<asm%>", regname);
701 /* Mark clobbered registers. */
704 /* Clobbering the PIC register is an error. */
705 if (i == (int) PIC_OFFSET_TABLE_REGNUM)
707 error ("PIC register %qs clobbered in %<asm%>", regname);
711 SET_HARD_REG_BIT (clobbered_regs, i);
715 /* First pass over inputs and outputs checks validity and sets
716 mark_addressable if needed. */
719 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
721 tree val = TREE_VALUE (tail);
722 tree type = TREE_TYPE (val);
723 const char *constraint;
728 /* If there's an erroneous arg, emit no insn. */
729 if (type == error_mark_node)
732 /* Try to parse the output constraint. If that fails, there's
733 no point in going further. */
734 constraint = constraints[i];
735 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
736 &allows_mem, &allows_reg, &is_inout))
743 && REG_P (DECL_RTL (val))
744 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
745 lang_hooks.mark_addressable (val);
752 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
754 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
758 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
760 bool allows_reg, allows_mem;
761 const char *constraint;
763 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
764 would get VOIDmode and that could cause a crash in reload. */
765 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
768 constraint = constraints[i + noutputs];
769 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
770 constraints, &allows_mem, &allows_reg))
773 if (! allows_reg && allows_mem)
774 lang_hooks.mark_addressable (TREE_VALUE (tail));
777 /* Second pass evaluates arguments. */
780 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
782 tree val = TREE_VALUE (tail);
783 tree type = TREE_TYPE (val);
790 ok = parse_output_constraint (&constraints[i], i, ninputs,
791 noutputs, &allows_mem, &allows_reg,
795 /* If an output operand is not a decl or indirect ref and our constraint
796 allows a register, make a temporary to act as an intermediate.
797 Make the asm insn write into that, then our caller will copy it to
798 the real output operand. Likewise for promoted variables. */
800 generating_concat_p = 0;
802 real_output_rtx[i] = NULL_RTX;
803 if ((TREE_CODE (val) == INDIRECT_REF
806 && (allows_mem || REG_P (DECL_RTL (val)))
807 && ! (REG_P (DECL_RTL (val))
808 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
812 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
814 op = validize_mem (op);
816 if (! allows_reg && !MEM_P (op))
817 error ("output number %d not directly addressable", i);
818 if ((! allows_mem && MEM_P (op))
819 || GET_CODE (op) == CONCAT)
821 real_output_rtx[i] = op;
822 op = gen_reg_rtx (GET_MODE (op));
824 emit_move_insn (op, real_output_rtx[i]);
829 op = assign_temp (type, 0, 0, 1);
830 op = validize_mem (op);
831 TREE_VALUE (tail) = make_tree (type, op);
835 generating_concat_p = old_generating_concat_p;
839 inout_mode[ninout] = TYPE_MODE (type);
840 inout_opnum[ninout++] = i;
843 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
844 clobber_conflict_found = 1;
847 /* Make vectors for the expression-rtx, constraint strings,
848 and named operands. */
850 argvec = rtvec_alloc (ninputs);
851 constraintvec = rtvec_alloc (ninputs);
853 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
854 : GET_MODE (output_rtx[0])),
855 ggc_strdup (TREE_STRING_POINTER (string)),
856 empty_string, 0, argvec, constraintvec,
859 MEM_VOLATILE_P (body) = vol;
861 /* Eval the inputs and put them into ARGVEC.
862 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
864 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
866 bool allows_reg, allows_mem;
867 const char *constraint;
872 constraint = constraints[i + noutputs];
873 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
874 constraints, &allows_mem, &allows_reg);
877 generating_concat_p = 0;
879 val = TREE_VALUE (tail);
880 type = TREE_TYPE (val);
881 /* EXPAND_INITIALIZER will not generate code for valid initializer
882 constants, but will still generate code for other types of operand.
883 This is the behavior we want for constant constraints. */
884 op = expand_expr (val, NULL_RTX, VOIDmode,
885 allows_reg ? EXPAND_NORMAL
886 : allows_mem ? EXPAND_MEMORY
887 : EXPAND_INITIALIZER);
889 /* Never pass a CONCAT to an ASM. */
890 if (GET_CODE (op) == CONCAT)
891 op = force_reg (GET_MODE (op), op);
893 op = validize_mem (op);
895 if (asm_operand_ok (op, constraint) <= 0)
897 if (allows_reg && TYPE_MODE (type) != BLKmode)
898 op = force_reg (TYPE_MODE (type), op);
899 else if (!allows_mem)
900 warning (0, "asm operand %d probably doesn%'t match constraints",
904 /* We won't recognize either volatile memory or memory
905 with a queued address as available a memory_operand
906 at this point. Ignore it: clearly this *is* a memory. */
910 warning (0, "use of memory input without lvalue in "
911 "asm operand %d is deprecated", i + noutputs);
915 rtx mem = force_const_mem (TYPE_MODE (type), op);
917 op = validize_mem (mem);
919 op = force_reg (TYPE_MODE (type), op);
922 || GET_CODE (op) == SUBREG
923 || GET_CODE (op) == CONCAT)
925 tree qual_type = build_qualified_type (type,
928 rtx memloc = assign_temp (qual_type, 1, 1, 1);
929 memloc = validize_mem (memloc);
930 emit_move_insn (memloc, op);
936 generating_concat_p = old_generating_concat_p;
937 ASM_OPERANDS_INPUT (body, i) = op;
939 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
940 = gen_rtx_ASM_INPUT (TYPE_MODE (type),
941 ggc_strdup (constraints[i + noutputs]));
943 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
944 clobber_conflict_found = 1;
947 /* Protect all the operands from the queue now that they have all been
950 generating_concat_p = 0;
952 /* For in-out operands, copy output rtx to input rtx. */
953 for (i = 0; i < ninout; i++)
955 int j = inout_opnum[i];
958 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
961 sprintf (buffer, "%d", j);
962 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
963 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
966 generating_concat_p = old_generating_concat_p;
968 /* Now, for each output, construct an rtx
969 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
970 ARGVEC CONSTRAINTS OPNAMES))
971 If there is more than one, put them inside a PARALLEL. */
973 if (noutputs == 1 && nclobbers == 0)
975 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]);
976 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
979 else if (noutputs == 0 && nclobbers == 0)
981 /* No output operands: put in a raw ASM_OPERANDS rtx. */
993 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
995 /* For each output operand, store a SET. */
996 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
999 = gen_rtx_SET (VOIDmode,
1001 gen_rtx_ASM_OPERANDS
1002 (GET_MODE (output_rtx[i]),
1003 ggc_strdup (TREE_STRING_POINTER (string)),
1004 ggc_strdup (constraints[i]),
1005 i, argvec, constraintvec, locus));
1007 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1010 /* If there are no outputs (but there are some clobbers)
1011 store the bare ASM_OPERANDS into the PARALLEL. */
1014 XVECEXP (body, 0, i++) = obody;
1016 /* Store (clobber REG) for each clobbered register specified. */
1018 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1020 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1021 int j = decode_reg_name (regname);
1026 if (j == -3) /* `cc', which is not a register */
1029 if (j == -4) /* `memory', don't cache memory across asm */
1031 XVECEXP (body, 0, i++)
1032 = gen_rtx_CLOBBER (VOIDmode,
1035 gen_rtx_SCRATCH (VOIDmode)));
1039 /* Ignore unknown register, error already signaled. */
1043 /* Use QImode since that's guaranteed to clobber just one reg. */
1044 clobbered_reg = gen_rtx_REG (QImode, j);
1046 /* Do sanity check for overlap between clobbers and respectively
1047 input and outputs that hasn't been handled. Such overlap
1048 should have been detected and reported above. */
1049 if (!clobber_conflict_found)
1053 /* We test the old body (obody) contents to avoid tripping
1054 over the under-construction body. */
1055 for (opno = 0; opno < noutputs; opno++)
1056 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1057 internal_error ("asm clobber conflict with output operand");
1059 for (opno = 0; opno < ninputs - ninout; opno++)
1060 if (reg_overlap_mentioned_p (clobbered_reg,
1061 ASM_OPERANDS_INPUT (obody, opno)))
1062 internal_error ("asm clobber conflict with input operand");
1065 XVECEXP (body, 0, i++)
1066 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1072 /* For any outputs that needed reloading into registers, spill them
1073 back to where they belong. */
1074 for (i = 0; i < noutputs; ++i)
1075 if (real_output_rtx[i])
1076 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1078 crtl->has_asm_statement = 1;
1083 expand_asm_expr (tree exp)
1089 if (ASM_INPUT_P (exp))
1091 expand_asm_loc (ASM_STRING (exp), ASM_VOLATILE_P (exp), input_location);
1095 outputs = ASM_OUTPUTS (exp);
1096 noutputs = list_length (outputs);
1097 /* o[I] is the place that output number I should be written. */
1098 o = (tree *) alloca (noutputs * sizeof (tree));
1100 /* Record the contents of OUTPUTS before it is modified. */
1101 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1102 o[i] = TREE_VALUE (tail);
1104 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
1105 OUTPUTS some trees for where the values were actually stored. */
1106 expand_asm_operands (ASM_STRING (exp), outputs, ASM_INPUTS (exp),
1107 ASM_CLOBBERS (exp), ASM_VOLATILE_P (exp),
1110 /* Copy all the intermediate outputs into the specified outputs. */
1111 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1113 if (o[i] != TREE_VALUE (tail))
1115 expand_assignment (o[i], TREE_VALUE (tail), false);
1118 /* Restore the original value so that it's correct the next
1119 time we expand this function. */
1120 TREE_VALUE (tail) = o[i];
1125 /* A subroutine of expand_asm_operands. Check that all operands have
1126 the same number of alternatives. Return true if so. */
1129 check_operand_nalternatives (tree outputs, tree inputs)
1131 if (outputs || inputs)
1133 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1135 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1138 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1140 error ("too many alternatives in %<asm%>");
1147 const char *constraint
1148 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1150 if (n_occurrences (',', constraint) != nalternatives)
1152 error ("operand constraints for %<asm%> differ "
1153 "in number of alternatives");
1157 if (TREE_CHAIN (tmp))
1158 tmp = TREE_CHAIN (tmp);
1160 tmp = next, next = 0;
1167 /* A subroutine of expand_asm_operands. Check that all operand names
1168 are unique. Return true if so. We rely on the fact that these names
1169 are identifiers, and so have been canonicalized by get_identifier,
1170 so all we need are pointer comparisons. */
1173 check_unique_operand_names (tree outputs, tree inputs)
1177 for (i = outputs; i ; i = TREE_CHAIN (i))
1179 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1183 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1184 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1188 for (i = inputs; i ; i = TREE_CHAIN (i))
1190 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1194 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1195 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1197 for (j = outputs; j ; j = TREE_CHAIN (j))
1198 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1205 error ("duplicate asm operand name %qs",
1206 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1210 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1211 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1212 STRING and in the constraints to those numbers. */
1215 resolve_asm_operand_names (tree string, tree outputs, tree inputs)
1222 check_unique_operand_names (outputs, inputs);
1224 /* Substitute [<name>] in input constraint strings. There should be no
1225 named operands in output constraints. */
1226 for (t = inputs; t ; t = TREE_CHAIN (t))
1228 c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1229 if (strchr (c, '[') != NULL)
1231 p = buffer = xstrdup (c);
1232 while ((p = strchr (p, '[')) != NULL)
1233 p = resolve_operand_name_1 (p, outputs, inputs);
1234 TREE_VALUE (TREE_PURPOSE (t))
1235 = build_string (strlen (buffer), buffer);
1240 /* Now check for any needed substitutions in the template. */
1241 c = TREE_STRING_POINTER (string);
1242 while ((c = strchr (c, '%')) != NULL)
1246 else if (ISALPHA (c[1]) && c[2] == '[')
1257 /* OK, we need to make a copy so we can perform the substitutions.
1258 Assume that we will not need extra space--we get to remove '['
1259 and ']', which means we cannot have a problem until we have more
1260 than 999 operands. */
1261 buffer = xstrdup (TREE_STRING_POINTER (string));
1262 p = buffer + (c - TREE_STRING_POINTER (string));
1264 while ((p = strchr (p, '%')) != NULL)
1268 else if (ISALPHA (p[1]) && p[2] == '[')
1276 p = resolve_operand_name_1 (p, outputs, inputs);
1279 string = build_string (strlen (buffer), buffer);
1286 /* A subroutine of resolve_operand_names. P points to the '[' for a
1287 potential named operand of the form [<name>]. In place, replace
1288 the name and brackets with a number. Return a pointer to the
1289 balance of the string after substitution. */
1292 resolve_operand_name_1 (char *p, tree outputs, tree inputs)
1299 /* Collect the operand name. */
1300 q = strchr (p, ']');
1303 error ("missing close brace for named operand");
1304 return strchr (p, '\0');
1308 /* Resolve the name to a number. */
1309 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
1311 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1314 const char *c = TREE_STRING_POINTER (name);
1315 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
1319 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
1321 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1324 const char *c = TREE_STRING_POINTER (name);
1325 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
1331 error ("undefined named operand %qs", p + 1);
1335 /* Replace the name with the number. Unfortunately, not all libraries
1336 get the return value of sprintf correct, so search for the end of the
1337 generated string by hand. */
1338 sprintf (p, "%d", op);
1339 p = strchr (p, '\0');
1341 /* Verify the no extra buffer space assumption. */
1342 gcc_assert (p <= q);
1344 /* Shift the rest of the buffer down to fill the gap. */
1345 memmove (p, q + 1, strlen (q + 1) + 1);
1350 /* Generate RTL to evaluate the expression EXP. */
1353 expand_expr_stmt (tree exp)
1358 value = expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
1359 if (GIMPLE_TUPLE_P (exp))
1360 type = void_type_node;
1362 type = TREE_TYPE (exp);
1364 /* If all we do is reference a volatile value in memory,
1365 copy it to a register to be sure it is actually touched. */
1366 if (value && MEM_P (value) && TREE_THIS_VOLATILE (exp))
1368 if (TYPE_MODE (type) == VOIDmode)
1370 else if (TYPE_MODE (type) != BLKmode)
1371 value = copy_to_reg (value);
1374 rtx lab = gen_label_rtx ();
1376 /* Compare the value with itself to reference it. */
1377 emit_cmp_and_jump_insns (value, value, EQ,
1378 expand_normal (TYPE_SIZE (type)),
1384 /* Free any temporaries used to evaluate this expression. */
1388 /* Warn if EXP contains any computations whose results are not used.
1389 Return 1 if a warning is printed; 0 otherwise. LOCUS is the
1390 (potential) location of the expression. */
1393 warn_if_unused_value (const_tree exp, location_t locus)
1396 if (TREE_USED (exp) || TREE_NO_WARNING (exp))
1399 /* Don't warn about void constructs. This includes casting to void,
1400 void function calls, and statement expressions with a final cast
1402 if (VOID_TYPE_P (TREE_TYPE (exp)))
1405 if (EXPR_HAS_LOCATION (exp))
1406 locus = EXPR_LOCATION (exp);
1408 switch (TREE_CODE (exp))
1410 case PREINCREMENT_EXPR:
1411 case POSTINCREMENT_EXPR:
1412 case PREDECREMENT_EXPR:
1413 case POSTDECREMENT_EXPR:
1415 case GIMPLE_MODIFY_STMT:
1419 case TRY_CATCH_EXPR:
1420 case WITH_CLEANUP_EXPR:
1426 /* For a binding, warn if no side effect within it. */
1427 exp = BIND_EXPR_BODY (exp);
1431 exp = TREE_OPERAND (exp, 0);
1434 case TRUTH_ORIF_EXPR:
1435 case TRUTH_ANDIF_EXPR:
1436 /* In && or ||, warn if 2nd operand has no side effect. */
1437 exp = TREE_OPERAND (exp, 1);
1441 if (warn_if_unused_value (TREE_OPERAND (exp, 0), locus))
1443 /* Let people do `(foo (), 0)' without a warning. */
1444 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1446 exp = TREE_OPERAND (exp, 1);
1450 /* If this is an expression with side effects, don't warn; this
1451 case commonly appears in macro expansions. */
1452 if (TREE_SIDE_EFFECTS (exp))
1457 /* Don't warn about automatic dereferencing of references, since
1458 the user cannot control it. */
1459 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1461 exp = TREE_OPERAND (exp, 0);
1467 /* Referencing a volatile value is a side effect, so don't warn. */
1468 if ((DECL_P (exp) || REFERENCE_CLASS_P (exp))
1469 && TREE_THIS_VOLATILE (exp))
1472 /* If this is an expression which has no operands, there is no value
1473 to be unused. There are no such language-independent codes,
1474 but front ends may define such. */
1475 if (EXPRESSION_CLASS_P (exp) && TREE_OPERAND_LENGTH (exp) == 0)
1479 warning (OPT_Wunused_value, "%Hvalue computed is not used", &locus);
1485 /* Generate RTL to return from the current function, with no value.
1486 (That is, we do not do anything about returning any value.) */
1489 expand_null_return (void)
1491 /* If this function was declared to return a value, but we
1492 didn't, clobber the return registers so that they are not
1493 propagated live to the rest of the function. */
1494 clobber_return_register ();
1496 expand_null_return_1 ();
1499 /* Generate RTL to return directly from the current function.
1500 (That is, we bypass any return value.) */
1503 expand_naked_return (void)
1507 clear_pending_stack_adjust ();
1508 do_pending_stack_adjust ();
1510 end_label = naked_return_label;
1512 end_label = naked_return_label = gen_label_rtx ();
1514 emit_jump (end_label);
1517 /* Generate RTL to return from the current function, with value VAL. */
1520 expand_value_return (rtx val)
1522 /* Copy the value to the return location
1523 unless it's already there. */
1525 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
1526 if (return_reg != val)
1528 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
1529 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
1531 int unsignedp = TYPE_UNSIGNED (type);
1532 enum machine_mode old_mode
1533 = DECL_MODE (DECL_RESULT (current_function_decl));
1534 enum machine_mode mode
1535 = promote_mode (type, old_mode, &unsignedp, 1);
1537 if (mode != old_mode)
1538 val = convert_modes (mode, old_mode, val, unsignedp);
1540 if (GET_CODE (return_reg) == PARALLEL)
1541 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
1543 emit_move_insn (return_reg, val);
1546 expand_null_return_1 ();
1549 /* Output a return with no value. */
1552 expand_null_return_1 (void)
1554 clear_pending_stack_adjust ();
1555 do_pending_stack_adjust ();
1556 emit_jump (return_label);
1559 /* Generate RTL to evaluate the expression RETVAL and return it
1560 from the current function. */
1563 expand_return (tree retval)
1569 /* If function wants no value, give it none. */
1570 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
1572 expand_normal (retval);
1573 expand_null_return ();
1577 if (retval == error_mark_node)
1579 /* Treat this like a return of no value from a function that
1581 expand_null_return ();
1584 else if ((TREE_CODE (retval) == GIMPLE_MODIFY_STMT
1585 || TREE_CODE (retval) == INIT_EXPR)
1586 && TREE_CODE (GENERIC_TREE_OPERAND (retval, 0)) == RESULT_DECL)
1587 retval_rhs = GENERIC_TREE_OPERAND (retval, 1);
1589 retval_rhs = retval;
1591 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
1593 /* If we are returning the RESULT_DECL, then the value has already
1594 been stored into it, so we don't have to do anything special. */
1595 if (TREE_CODE (retval_rhs) == RESULT_DECL)
1596 expand_value_return (result_rtl);
1598 /* If the result is an aggregate that is being returned in one (or more)
1599 registers, load the registers here. The compiler currently can't handle
1600 copying a BLKmode value into registers. We could put this code in a
1601 more general area (for use by everyone instead of just function
1602 call/return), but until this feature is generally usable it is kept here
1603 (and in expand_call). */
1605 else if (retval_rhs != 0
1606 && TYPE_MODE (GENERIC_TREE_TYPE (retval_rhs)) == BLKmode
1607 && REG_P (result_rtl))
1610 unsigned HOST_WIDE_INT bitpos, xbitpos;
1611 unsigned HOST_WIDE_INT padding_correction = 0;
1612 unsigned HOST_WIDE_INT bytes
1613 = int_size_in_bytes (TREE_TYPE (retval_rhs));
1614 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1615 unsigned int bitsize
1616 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
1617 rtx *result_pseudos = XALLOCAVEC (rtx, n_regs);
1618 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
1619 rtx result_val = expand_normal (retval_rhs);
1620 enum machine_mode tmpmode, result_reg_mode;
1624 expand_null_return ();
1628 /* If the structure doesn't take up a whole number of words, see
1629 whether the register value should be padded on the left or on
1630 the right. Set PADDING_CORRECTION to the number of padding
1631 bits needed on the left side.
1633 In most ABIs, the structure will be returned at the least end of
1634 the register, which translates to right padding on little-endian
1635 targets and left padding on big-endian targets. The opposite
1636 holds if the structure is returned at the most significant
1637 end of the register. */
1638 if (bytes % UNITS_PER_WORD != 0
1639 && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs))
1641 : BYTES_BIG_ENDIAN))
1642 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
1645 /* Copy the structure BITSIZE bits at a time. */
1646 for (bitpos = 0, xbitpos = padding_correction;
1647 bitpos < bytes * BITS_PER_UNIT;
1648 bitpos += bitsize, xbitpos += bitsize)
1650 /* We need a new destination pseudo each time xbitpos is
1651 on a word boundary and when xbitpos == padding_correction
1652 (the first time through). */
1653 if (xbitpos % BITS_PER_WORD == 0
1654 || xbitpos == padding_correction)
1656 /* Generate an appropriate register. */
1657 dst = gen_reg_rtx (word_mode);
1658 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
1660 /* Clear the destination before we move anything into it. */
1661 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
1664 /* We need a new source operand each time bitpos is on a word
1666 if (bitpos % BITS_PER_WORD == 0)
1667 src = operand_subword_force (result_val,
1668 bitpos / BITS_PER_WORD,
1671 /* Use bitpos for the source extraction (left justified) and
1672 xbitpos for the destination store (right justified). */
1673 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
1674 extract_bit_field (src, bitsize,
1675 bitpos % BITS_PER_WORD, 1,
1676 NULL_RTX, word_mode, word_mode));
1679 tmpmode = GET_MODE (result_rtl);
1680 if (tmpmode == BLKmode)
1682 /* Find the smallest integer mode large enough to hold the
1683 entire structure and use that mode instead of BLKmode
1684 on the USE insn for the return register. */
1685 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1686 tmpmode != VOIDmode;
1687 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1688 /* Have we found a large enough mode? */
1689 if (GET_MODE_SIZE (tmpmode) >= bytes)
1692 /* A suitable mode should have been found. */
1693 gcc_assert (tmpmode != VOIDmode);
1695 PUT_MODE (result_rtl, tmpmode);
1698 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
1699 result_reg_mode = word_mode;
1701 result_reg_mode = tmpmode;
1702 result_reg = gen_reg_rtx (result_reg_mode);
1704 for (i = 0; i < n_regs; i++)
1705 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
1708 if (tmpmode != result_reg_mode)
1709 result_reg = gen_lowpart (tmpmode, result_reg);
1711 expand_value_return (result_reg);
1713 else if (retval_rhs != 0
1714 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
1715 && (REG_P (result_rtl)
1716 || (GET_CODE (result_rtl) == PARALLEL)))
1718 /* Calculate the return value into a temporary (usually a pseudo
1720 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
1721 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
1723 val = assign_temp (nt, 0, 0, 1);
1724 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
1725 val = force_not_mem (val);
1726 /* Return the calculated value. */
1727 expand_value_return (val);
1731 /* No hard reg used; calculate value into hard return reg. */
1732 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
1733 expand_value_return (result_rtl);
1737 /* Given a pointer to a BLOCK node return nonzero if (and only if) the node
1738 in question represents the outermost pair of curly braces (i.e. the "body
1739 block") of a function or method.
1741 For any BLOCK node representing a "body block" of a function or method, the
1742 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
1743 represents the outermost (function) scope for the function or method (i.e.
1744 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
1745 *that* node in turn will point to the relevant FUNCTION_DECL node. */
1748 is_body_block (const_tree stmt)
1750 if (lang_hooks.no_body_blocks)
1753 if (TREE_CODE (stmt) == BLOCK)
1755 tree parent = BLOCK_SUPERCONTEXT (stmt);
1757 if (parent && TREE_CODE (parent) == BLOCK)
1759 tree grandparent = BLOCK_SUPERCONTEXT (parent);
1761 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
1769 /* Emit code to restore vital registers at the beginning of a nonlocal goto
1772 expand_nl_goto_receiver (void)
1774 /* Clobber the FP when we get here, so we have to make sure it's
1775 marked as used by this function. */
1776 emit_use (hard_frame_pointer_rtx);
1778 /* Mark the static chain as clobbered here so life information
1779 doesn't get messed up for it. */
1780 emit_clobber (static_chain_rtx);
1782 #ifdef HAVE_nonlocal_goto
1783 if (! HAVE_nonlocal_goto)
1785 /* First adjust our frame pointer to its actual value. It was
1786 previously set to the start of the virtual area corresponding to
1787 the stacked variables when we branched here and now needs to be
1788 adjusted to the actual hardware fp value.
1790 Assignments are to virtual registers are converted by
1791 instantiate_virtual_regs into the corresponding assignment
1792 to the underlying register (fp in this case) that makes
1793 the original assignment true.
1794 So the following insn will actually be
1795 decrementing fp by STARTING_FRAME_OFFSET. */
1796 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
1798 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1799 if (fixed_regs[ARG_POINTER_REGNUM])
1801 #ifdef ELIMINABLE_REGS
1802 /* If the argument pointer can be eliminated in favor of the
1803 frame pointer, we don't need to restore it. We assume here
1804 that if such an elimination is present, it can always be used.
1805 This is the case on all known machines; if we don't make this
1806 assumption, we do unnecessary saving on many machines. */
1807 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1810 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1811 if (elim_regs[i].from == ARG_POINTER_REGNUM
1812 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1815 if (i == ARRAY_SIZE (elim_regs))
1818 /* Now restore our arg pointer from the address at which it
1819 was saved in our stack frame. */
1820 emit_move_insn (virtual_incoming_args_rtx,
1821 copy_to_reg (get_arg_pointer_save_area ()));
1826 #ifdef HAVE_nonlocal_goto_receiver
1827 if (HAVE_nonlocal_goto_receiver)
1828 emit_insn (gen_nonlocal_goto_receiver ());
1831 /* We must not allow the code we just generated to be reordered by
1832 scheduling. Specifically, the update of the frame pointer must
1833 happen immediately, not later. */
1834 emit_insn (gen_blockage ());
1837 /* Generate RTL for the automatic variable declaration DECL.
1838 (Other kinds of declarations are simply ignored if seen here.) */
1841 expand_decl (tree decl)
1845 type = TREE_TYPE (decl);
1847 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
1848 type in case this node is used in a reference. */
1849 if (TREE_CODE (decl) == CONST_DECL)
1851 DECL_MODE (decl) = TYPE_MODE (type);
1852 DECL_ALIGN (decl) = TYPE_ALIGN (type);
1853 DECL_SIZE (decl) = TYPE_SIZE (type);
1854 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
1858 /* Otherwise, only automatic variables need any expansion done. Static and
1859 external variables, and external functions, will be handled by
1860 `assemble_variable' (called from finish_decl). TYPE_DECL requires
1861 nothing. PARM_DECLs are handled in `assign_parms'. */
1862 if (TREE_CODE (decl) != VAR_DECL)
1865 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
1868 /* Create the RTL representation for the variable. */
1870 if (type == error_mark_node)
1871 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
1873 else if (DECL_SIZE (decl) == 0)
1874 /* Variable with incomplete type. */
1877 if (DECL_INITIAL (decl) == 0)
1878 /* Error message was already done; now avoid a crash. */
1879 x = gen_rtx_MEM (BLKmode, const0_rtx);
1881 /* An initializer is going to decide the size of this array.
1882 Until we know the size, represent its address with a reg. */
1883 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
1885 set_mem_attributes (x, decl, 1);
1886 SET_DECL_RTL (decl, x);
1888 else if (use_register_for_decl (decl))
1890 /* Automatic variable that can go in a register. */
1891 int unsignedp = TYPE_UNSIGNED (type);
1892 enum machine_mode reg_mode
1893 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
1895 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
1897 /* Note if the object is a user variable. */
1898 if (!DECL_ARTIFICIAL (decl))
1899 mark_user_reg (DECL_RTL (decl));
1901 if (POINTER_TYPE_P (type))
1902 mark_reg_pointer (DECL_RTL (decl),
1903 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
1906 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
1907 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
1908 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
1909 STACK_CHECK_MAX_VAR_SIZE)))
1911 /* Variable of fixed size that goes on the stack. */
1916 /* If we previously made RTL for this decl, it must be an array
1917 whose size was determined by the initializer.
1918 The old address was a register; set that register now
1919 to the proper address. */
1920 if (DECL_RTL_SET_P (decl))
1922 gcc_assert (MEM_P (DECL_RTL (decl)));
1923 gcc_assert (REG_P (XEXP (DECL_RTL (decl), 0)));
1924 oldaddr = XEXP (DECL_RTL (decl), 0);
1927 /* Set alignment we actually gave this decl. */
1928 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
1929 : GET_MODE_BITSIZE (DECL_MODE (decl)));
1930 DECL_USER_ALIGN (decl) = 0;
1932 x = assign_temp (decl, 1, 1, 1);
1933 set_mem_attributes (x, decl, 1);
1934 SET_DECL_RTL (decl, x);
1938 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
1939 if (addr != oldaddr)
1940 emit_move_insn (oldaddr, addr);
1944 /* Dynamic-size object: must push space on the stack. */
1946 rtx address, size, x;
1948 /* Record the stack pointer on entry to block, if have
1949 not already done so. */
1950 do_pending_stack_adjust ();
1952 /* Compute the variable's size, in bytes. This will expand any
1953 needed SAVE_EXPRs for the first time. */
1954 size = expand_normal (DECL_SIZE_UNIT (decl));
1957 /* Allocate space on the stack for the variable. Note that
1958 DECL_ALIGN says how the variable is to be aligned and we
1959 cannot use it to conclude anything about the alignment of
1961 address = allocate_dynamic_stack_space (size, NULL_RTX,
1962 TYPE_ALIGN (TREE_TYPE (decl)));
1964 /* Reference the variable indirect through that rtx. */
1965 x = gen_rtx_MEM (DECL_MODE (decl), address);
1966 set_mem_attributes (x, decl, 1);
1967 SET_DECL_RTL (decl, x);
1970 /* Indicate the alignment we actually gave this variable. */
1971 #ifdef STACK_BOUNDARY
1972 DECL_ALIGN (decl) = STACK_BOUNDARY;
1974 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
1976 DECL_USER_ALIGN (decl) = 0;
1980 /* Emit code to save the current value of stack. */
1982 expand_stack_save (void)
1986 do_pending_stack_adjust ();
1987 emit_stack_save (SAVE_BLOCK, &ret, NULL_RTX);
1991 /* Emit code to restore the current value of stack. */
1993 expand_stack_restore (tree var)
1995 rtx sa = expand_normal (var);
1997 sa = convert_memory_address (Pmode, sa);
1998 emit_stack_restore (SAVE_BLOCK, sa, NULL_RTX);
2001 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
2002 DECL_ELTS is the list of elements that belong to DECL's type.
2003 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
2006 expand_anon_union_decl (tree decl, tree cleanup ATTRIBUTE_UNUSED,
2012 /* If any of the elements are addressable, so is the entire union. */
2013 for (t = decl_elts; t; t = TREE_CHAIN (t))
2014 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
2016 TREE_ADDRESSABLE (decl) = 1;
2021 x = DECL_RTL (decl);
2023 /* Go through the elements, assigning RTL to each. */
2024 for (t = decl_elts; t; t = TREE_CHAIN (t))
2026 tree decl_elt = TREE_VALUE (t);
2027 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
2030 /* If any of the elements are addressable, so is the entire
2032 if (TREE_USED (decl_elt))
2033 TREE_USED (decl) = 1;
2035 /* Propagate the union's alignment to the elements. */
2036 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
2037 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
2039 /* If the element has BLKmode and the union doesn't, the union is
2040 aligned such that the element doesn't need to have BLKmode, so
2041 change the element's mode to the appropriate one for its size. */
2042 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
2043 DECL_MODE (decl_elt) = mode
2044 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
2046 if (mode == GET_MODE (x))
2049 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
2050 instead create a new MEM rtx with the proper mode. */
2051 decl_rtl = adjust_address_nv (x, mode, 0);
2054 gcc_assert (REG_P (x));
2055 decl_rtl = gen_lowpart_SUBREG (mode, x);
2057 SET_DECL_RTL (decl_elt, decl_rtl);
2061 /* Do the insertion of a case label into case_list. The labels are
2062 fed to us in descending order from the sorted vector of case labels used
2063 in the tree part of the middle end. So the list we construct is
2064 sorted in ascending order. The bounds on the case range, LOW and HIGH,
2065 are converted to case's index type TYPE. */
2067 static struct case_node *
2068 add_case_node (struct case_node *head, tree type, tree low, tree high,
2069 tree label, alloc_pool case_node_pool)
2071 tree min_value, max_value;
2072 struct case_node *r;
2074 gcc_assert (TREE_CODE (low) == INTEGER_CST);
2075 gcc_assert (!high || TREE_CODE (high) == INTEGER_CST);
2077 min_value = TYPE_MIN_VALUE (type);
2078 max_value = TYPE_MAX_VALUE (type);
2080 /* If there's no HIGH value, then this is not a case range; it's
2081 just a simple case label. But that's just a degenerate case
2083 If the bounds are equal, turn this into the one-value case. */
2084 if (!high || tree_int_cst_equal (low, high))
2086 /* If the simple case value is unreachable, ignore it. */
2087 if ((TREE_CODE (min_value) == INTEGER_CST
2088 && tree_int_cst_compare (low, min_value) < 0)
2089 || (TREE_CODE (max_value) == INTEGER_CST
2090 && tree_int_cst_compare (low, max_value) > 0))
2092 low = fold_convert (type, low);
2097 /* If the entire case range is unreachable, ignore it. */
2098 if ((TREE_CODE (min_value) == INTEGER_CST
2099 && tree_int_cst_compare (high, min_value) < 0)
2100 || (TREE_CODE (max_value) == INTEGER_CST
2101 && tree_int_cst_compare (low, max_value) > 0))
2104 /* If the lower bound is less than the index type's minimum
2105 value, truncate the range bounds. */
2106 if (TREE_CODE (min_value) == INTEGER_CST
2107 && tree_int_cst_compare (low, min_value) < 0)
2109 low = fold_convert (type, low);
2111 /* If the upper bound is greater than the index type's maximum
2112 value, truncate the range bounds. */
2113 if (TREE_CODE (max_value) == INTEGER_CST
2114 && tree_int_cst_compare (high, max_value) > 0)
2116 high = fold_convert (type, high);
2120 /* Add this label to the chain. Make sure to drop overflow flags. */
2121 r = (struct case_node *) pool_alloc (case_node_pool);
2122 r->low = build_int_cst_wide (TREE_TYPE (low), TREE_INT_CST_LOW (low),
2123 TREE_INT_CST_HIGH (low));
2124 r->high = build_int_cst_wide (TREE_TYPE (high), TREE_INT_CST_LOW (high),
2125 TREE_INT_CST_HIGH (high));
2126 r->code_label = label;
2127 r->parent = r->left = NULL;
2132 /* Maximum number of case bit tests. */
2133 #define MAX_CASE_BIT_TESTS 3
2135 /* By default, enable case bit tests on targets with ashlsi3. */
2136 #ifndef CASE_USE_BIT_TESTS
2137 #define CASE_USE_BIT_TESTS (optab_handler (ashl_optab, word_mode)->insn_code \
2138 != CODE_FOR_nothing)
2142 /* A case_bit_test represents a set of case nodes that may be
2143 selected from using a bit-wise comparison. HI and LO hold
2144 the integer to be tested against, LABEL contains the label
2145 to jump to upon success and BITS counts the number of case
2146 nodes handled by this test, typically the number of bits
2149 struct case_bit_test
2157 /* Determine whether "1 << x" is relatively cheap in word_mode. */
2160 bool lshift_cheap_p (void)
2162 static bool init = false;
2163 static bool cheap = true;
2167 rtx reg = gen_rtx_REG (word_mode, 10000);
2168 int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET);
2169 cheap = cost < COSTS_N_INSNS (3);
2176 /* Comparison function for qsort to order bit tests by decreasing
2177 number of case nodes, i.e. the node with the most cases gets
2181 case_bit_test_cmp (const void *p1, const void *p2)
2183 const struct case_bit_test *const d1 = (const struct case_bit_test *) p1;
2184 const struct case_bit_test *const d2 = (const struct case_bit_test *) p2;
2186 if (d2->bits != d1->bits)
2187 return d2->bits - d1->bits;
2189 /* Stabilize the sort. */
2190 return CODE_LABEL_NUMBER (d2->label) - CODE_LABEL_NUMBER (d1->label);
2193 /* Expand a switch statement by a short sequence of bit-wise
2194 comparisons. "switch(x)" is effectively converted into
2195 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
2198 INDEX_EXPR is the value being switched on, which is of
2199 type INDEX_TYPE. MINVAL is the lowest case value of in
2200 the case nodes, of INDEX_TYPE type, and RANGE is highest
2201 value minus MINVAL, also of type INDEX_TYPE. NODES is
2202 the set of case nodes, and DEFAULT_LABEL is the label to
2203 branch to should none of the cases match.
2205 There *MUST* be MAX_CASE_BIT_TESTS or less unique case
2209 emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
2210 tree range, case_node_ptr nodes, rtx default_label)
2212 struct case_bit_test test[MAX_CASE_BIT_TESTS];
2213 enum machine_mode mode;
2214 rtx expr, index, label;
2215 unsigned int i,j,lo,hi;
2216 struct case_node *n;
2220 for (n = nodes; n; n = n->right)
2222 label = label_rtx (n->code_label);
2223 for (i = 0; i < count; i++)
2224 if (label == test[i].label)
2229 gcc_assert (count < MAX_CASE_BIT_TESTS);
2232 test[i].label = label;
2239 lo = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2240 n->low, minval), 1);
2241 hi = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2242 n->high, minval), 1);
2243 for (j = lo; j <= hi; j++)
2244 if (j >= HOST_BITS_PER_WIDE_INT)
2245 test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
2247 test[i].lo |= (HOST_WIDE_INT) 1 << j;
2250 qsort (test, count, sizeof(*test), case_bit_test_cmp);
2252 index_expr = fold_build2 (MINUS_EXPR, index_type,
2253 fold_convert (index_type, index_expr),
2254 fold_convert (index_type, minval));
2255 index = expand_normal (index_expr);
2256 do_pending_stack_adjust ();
2258 mode = TYPE_MODE (index_type);
2259 expr = expand_normal (range);
2261 emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
2264 index = convert_to_mode (word_mode, index, 0);
2265 index = expand_binop (word_mode, ashl_optab, const1_rtx,
2266 index, NULL_RTX, 1, OPTAB_WIDEN);
2268 for (i = 0; i < count; i++)
2270 expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
2271 expr = expand_binop (word_mode, and_optab, index, expr,
2272 NULL_RTX, 1, OPTAB_WIDEN);
2273 emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
2274 word_mode, 1, test[i].label);
2278 emit_jump (default_label);
2282 #define HAVE_casesi 0
2285 #ifndef HAVE_tablejump
2286 #define HAVE_tablejump 0
2289 /* Terminate a case (Pascal/Ada) or switch (C) statement
2290 in which ORIG_INDEX is the expression to be tested.
2291 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
2292 type as given in the source before any compiler conversions.
2293 Generate the code to test it and jump to the right place. */
2296 expand_case (tree exp)
2298 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
2299 rtx default_label = 0;
2300 struct case_node *n;
2301 unsigned int count, uniq;
2307 rtx before_case, end, lab;
2309 tree vec = SWITCH_LABELS (exp);
2310 tree orig_type = TREE_TYPE (exp);
2311 tree index_expr = SWITCH_COND (exp);
2312 tree index_type = TREE_TYPE (index_expr);
2313 int unsignedp = TYPE_UNSIGNED (index_type);
2315 /* The insn after which the case dispatch should finally
2316 be emitted. Zero for a dummy. */
2319 /* A list of case labels; it is first built as a list and it may then
2320 be rearranged into a nearly balanced binary tree. */
2321 struct case_node *case_list = 0;
2323 /* Label to jump to if no case matches. */
2324 tree default_label_decl = NULL_TREE;
2326 alloc_pool case_node_pool = create_alloc_pool ("struct case_node pool",
2327 sizeof (struct case_node),
2330 /* The switch body is lowered in gimplify.c, we should never have
2331 switches with a non-NULL SWITCH_BODY here. */
2332 gcc_assert (!SWITCH_BODY (exp));
2333 gcc_assert (SWITCH_LABELS (exp));
2335 do_pending_stack_adjust ();
2337 /* An ERROR_MARK occurs for various reasons including invalid data type. */
2338 if (index_type != error_mark_node)
2341 bitmap label_bitmap;
2342 int vl = TREE_VEC_LENGTH (vec);
2344 /* cleanup_tree_cfg removes all SWITCH_EXPR with their index
2345 expressions being INTEGER_CST. */
2346 gcc_assert (TREE_CODE (index_expr) != INTEGER_CST);
2348 /* The default case, if ever taken, is at the end of TREE_VEC. */
2349 elt = TREE_VEC_ELT (vec, vl - 1);
2350 if (!CASE_LOW (elt) && !CASE_HIGH (elt))
2352 default_label_decl = CASE_LABEL (elt);
2356 for (i = vl - 1; i >= 0; --i)
2359 elt = TREE_VEC_ELT (vec, i);
2361 low = CASE_LOW (elt);
2363 high = CASE_HIGH (elt);
2365 /* Discard empty ranges. */
2366 if (high && tree_int_cst_lt (high, low))
2369 case_list = add_case_node (case_list, index_type, low, high,
2370 CASE_LABEL (elt), case_node_pool);
2374 before_case = start = get_last_insn ();
2375 if (default_label_decl)
2376 default_label = label_rtx (default_label_decl);
2378 /* Get upper and lower bounds of case values. */
2382 label_bitmap = BITMAP_ALLOC (NULL);
2383 for (n = case_list; n; n = n->right)
2385 /* Count the elements and track the largest and smallest
2386 of them (treating them as signed even if they are not). */
2394 if (tree_int_cst_lt (n->low, minval))
2396 if (tree_int_cst_lt (maxval, n->high))
2399 /* A range counts double, since it requires two compares. */
2400 if (! tree_int_cst_equal (n->low, n->high))
2403 /* If we have not seen this label yet, then increase the
2404 number of unique case node targets seen. */
2405 lab = label_rtx (n->code_label);
2406 if (!bitmap_bit_p (label_bitmap, CODE_LABEL_NUMBER (lab)))
2408 bitmap_set_bit (label_bitmap, CODE_LABEL_NUMBER (lab));
2413 BITMAP_FREE (label_bitmap);
2415 /* cleanup_tree_cfg removes all SWITCH_EXPR with a single
2416 destination, such as one with a default case only. However,
2417 it doesn't remove cases that are out of range for the switch
2418 type, so we may still get a zero here. */
2422 emit_jump (default_label);
2423 free_alloc_pool (case_node_pool);
2427 /* Compute span of values. */
2428 range = fold_build2 (MINUS_EXPR, index_type, maxval, minval);
2430 /* Try implementing this switch statement by a short sequence of
2431 bit-wise comparisons. However, we let the binary-tree case
2432 below handle constant index expressions. */
2433 if (CASE_USE_BIT_TESTS
2434 && ! TREE_CONSTANT (index_expr)
2435 && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
2436 && compare_tree_int (range, 0) > 0
2437 && lshift_cheap_p ()
2438 && ((uniq == 1 && count >= 3)
2439 || (uniq == 2 && count >= 5)
2440 || (uniq == 3 && count >= 6)))
2442 /* Optimize the case where all the case values fit in a
2443 word without having to subtract MINVAL. In this case,
2444 we can optimize away the subtraction. */
2445 if (compare_tree_int (minval, 0) > 0
2446 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
2448 minval = build_int_cst (index_type, 0);
2451 emit_case_bit_tests (index_type, index_expr, minval, range,
2452 case_list, default_label);
2455 /* If range of values is much bigger than number of values,
2456 make a sequence of conditional branches instead of a dispatch.
2457 If the switch-index is a constant, do it this way
2458 because we can optimize it. */
2460 else if (count < case_values_threshold ()
2461 || compare_tree_int (range,
2462 (optimize_size ? 3 : 10) * count) > 0
2463 /* RANGE may be signed, and really large ranges will show up
2464 as negative numbers. */
2465 || compare_tree_int (range, 0) < 0
2466 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
2469 || !flag_jump_tables
2470 || TREE_CONSTANT (index_expr)
2471 /* If neither casesi or tablejump is available, we can
2472 only go this way. */
2473 || (!HAVE_casesi && !HAVE_tablejump))
2475 index = expand_normal (index_expr);
2477 /* If the index is a short or char that we do not have
2478 an insn to handle comparisons directly, convert it to
2479 a full integer now, rather than letting each comparison
2480 generate the conversion. */
2482 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
2483 && ! have_insn_for (COMPARE, GET_MODE (index)))
2485 enum machine_mode wider_mode;
2486 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
2487 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
2488 if (have_insn_for (COMPARE, wider_mode))
2490 index = convert_to_mode (wider_mode, index, unsignedp);
2495 do_pending_stack_adjust ();
2498 index = copy_to_reg (index);
2500 /* We generate a binary decision tree to select the
2501 appropriate target code. This is done as follows:
2503 The list of cases is rearranged into a binary tree,
2504 nearly optimal assuming equal probability for each case.
2506 The tree is transformed into RTL, eliminating
2507 redundant test conditions at the same time.
2509 If program flow could reach the end of the
2510 decision tree an unconditional jump to the
2511 default code is emitted. */
2514 = (TREE_CODE (orig_type) != ENUMERAL_TYPE
2515 && estimate_case_costs (case_list));
2516 balance_case_nodes (&case_list, NULL);
2517 emit_case_nodes (index, case_list, default_label, index_type);
2519 emit_jump (default_label);
2523 rtx fallback_label = label_rtx (case_list->code_label);
2524 table_label = gen_label_rtx ();
2525 if (! try_casesi (index_type, index_expr, minval, range,
2526 table_label, default_label, fallback_label))
2530 /* Index jumptables from zero for suitable values of
2531 minval to avoid a subtraction. */
2533 && compare_tree_int (minval, 0) > 0
2534 && compare_tree_int (minval, 3) < 0)
2536 minval = build_int_cst (index_type, 0);
2540 ok = try_tablejump (index_type, index_expr, minval, range,
2541 table_label, default_label);
2545 /* Get table of labels to jump to, in order of case index. */
2547 ncases = tree_low_cst (range, 0) + 1;
2548 labelvec = XALLOCAVEC (rtx, ncases);
2549 memset (labelvec, 0, ncases * sizeof (rtx));
2551 for (n = case_list; n; n = n->right)
2553 /* Compute the low and high bounds relative to the minimum
2554 value since that should fit in a HOST_WIDE_INT while the
2555 actual values may not. */
2557 = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2558 n->low, minval), 1);
2559 HOST_WIDE_INT i_high
2560 = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2561 n->high, minval), 1);
2564 for (i = i_low; i <= i_high; i ++)
2566 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
2569 /* Fill in the gaps with the default. We may have gaps at
2570 the beginning if we tried to avoid the minval subtraction,
2571 so substitute some label even if the default label was
2572 deemed unreachable. */
2574 default_label = fallback_label;
2575 for (i = 0; i < ncases; i++)
2576 if (labelvec[i] == 0)
2577 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
2579 /* Output the table. */
2580 emit_label (table_label);
2582 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
2583 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
2584 gen_rtx_LABEL_REF (Pmode, table_label),
2585 gen_rtvec_v (ncases, labelvec),
2586 const0_rtx, const0_rtx));
2588 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
2589 gen_rtvec_v (ncases, labelvec)));
2591 /* Record no drop-through after the table. */
2595 before_case = NEXT_INSN (before_case);
2596 end = get_last_insn ();
2597 reorder_insns (before_case, end, start);
2601 free_alloc_pool (case_node_pool);
2604 /* Generate code to jump to LABEL if OP0 and OP1 are equal in mode MODE. */
2607 do_jump_if_equal (enum machine_mode mode, rtx op0, rtx op1, rtx label,
2610 do_compare_rtx_and_jump (op0, op1, EQ, unsignedp, mode,
2611 NULL_RTX, NULL_RTX, label);
2614 /* Not all case values are encountered equally. This function
2615 uses a heuristic to weight case labels, in cases where that
2616 looks like a reasonable thing to do.
2618 Right now, all we try to guess is text, and we establish the
2621 chars above space: 16
2630 If we find any cases in the switch that are not either -1 or in the range
2631 of valid ASCII characters, or are control characters other than those
2632 commonly used with "\", don't treat this switch scanning text.
2634 Return 1 if these nodes are suitable for cost estimation, otherwise
2638 estimate_case_costs (case_node_ptr node)
2640 tree min_ascii = integer_minus_one_node;
2641 tree max_ascii = build_int_cst (TREE_TYPE (node->high), 127);
2645 /* If we haven't already made the cost table, make it now. Note that the
2646 lower bound of the table is -1, not zero. */
2648 if (! cost_table_initialized)
2650 cost_table_initialized = 1;
2652 for (i = 0; i < 128; i++)
2655 COST_TABLE (i) = 16;
2656 else if (ISPUNCT (i))
2658 else if (ISCNTRL (i))
2659 COST_TABLE (i) = -1;
2662 COST_TABLE (' ') = 8;
2663 COST_TABLE ('\t') = 4;
2664 COST_TABLE ('\0') = 4;
2665 COST_TABLE ('\n') = 2;
2666 COST_TABLE ('\f') = 1;
2667 COST_TABLE ('\v') = 1;
2668 COST_TABLE ('\b') = 1;
2671 /* See if all the case expressions look like text. It is text if the
2672 constant is >= -1 and the highest constant is <= 127. Do all comparisons
2673 as signed arithmetic since we don't want to ever access cost_table with a
2674 value less than -1. Also check that none of the constants in a range
2675 are strange control characters. */
2677 for (n = node; n; n = n->right)
2679 if (tree_int_cst_lt (n->low, min_ascii)
2680 || tree_int_cst_lt (max_ascii, n->high))
2683 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
2684 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
2685 if (COST_TABLE (i) < 0)
2689 /* All interesting values are within the range of interesting
2690 ASCII characters. */
2694 /* Take an ordered list of case nodes
2695 and transform them into a near optimal binary tree,
2696 on the assumption that any target code selection value is as
2697 likely as any other.
2699 The transformation is performed by splitting the ordered
2700 list into two equal sections plus a pivot. The parts are
2701 then attached to the pivot as left and right branches. Each
2702 branch is then transformed recursively. */
2705 balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
2718 /* Count the number of entries on branch. Also count the ranges. */
2722 if (!tree_int_cst_equal (np->low, np->high))
2726 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
2730 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
2738 /* Split this list if it is long enough for that to help. */
2743 /* Find the place in the list that bisects the list's total cost,
2744 Here I gets half the total cost. */
2749 /* Skip nodes while their cost does not reach that amount. */
2750 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2751 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
2752 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
2755 npp = &(*npp)->right;
2760 /* Leave this branch lopsided, but optimize left-hand
2761 side and fill in `parent' fields for right-hand side. */
2763 np->parent = parent;
2764 balance_case_nodes (&np->left, np);
2765 for (; np->right; np = np->right)
2766 np->right->parent = np;
2770 /* If there are just three nodes, split at the middle one. */
2772 npp = &(*npp)->right;
2775 /* Find the place in the list that bisects the list's total cost,
2776 where ranges count as 2.
2777 Here I gets half the total cost. */
2778 i = (i + ranges + 1) / 2;
2781 /* Skip nodes while their cost does not reach that amount. */
2782 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2787 npp = &(*npp)->right;
2792 np->parent = parent;
2795 /* Optimize each of the two split parts. */
2796 balance_case_nodes (&np->left, np);
2797 balance_case_nodes (&np->right, np);
2801 /* Else leave this branch as one level,
2802 but fill in `parent' fields. */
2804 np->parent = parent;
2805 for (; np->right; np = np->right)
2806 np->right->parent = np;
2811 /* Search the parent sections of the case node tree
2812 to see if a test for the lower bound of NODE would be redundant.
2813 INDEX_TYPE is the type of the index expression.
2815 The instructions to generate the case decision tree are
2816 output in the same order as nodes are processed so it is
2817 known that if a parent node checks the range of the current
2818 node minus one that the current node is bounded at its lower
2819 span. Thus the test would be redundant. */
2822 node_has_low_bound (case_node_ptr node, tree index_type)
2825 case_node_ptr pnode;
2827 /* If the lower bound of this node is the lowest value in the index type,
2828 we need not test it. */
2830 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
2833 /* If this node has a left branch, the value at the left must be less
2834 than that at this node, so it cannot be bounded at the bottom and
2835 we need not bother testing any further. */
2840 low_minus_one = fold_build2 (MINUS_EXPR, TREE_TYPE (node->low),
2842 build_int_cst (TREE_TYPE (node->low), 1));
2844 /* If the subtraction above overflowed, we can't verify anything.
2845 Otherwise, look for a parent that tests our value - 1. */
2847 if (! tree_int_cst_lt (low_minus_one, node->low))
2850 for (pnode = node->parent; pnode; pnode = pnode->parent)
2851 if (tree_int_cst_equal (low_minus_one, pnode->high))
2857 /* Search the parent sections of the case node tree
2858 to see if a test for the upper bound of NODE would be redundant.
2859 INDEX_TYPE is the type of the index expression.
2861 The instructions to generate the case decision tree are
2862 output in the same order as nodes are processed so it is
2863 known that if a parent node checks the range of the current
2864 node plus one that the current node is bounded at its upper
2865 span. Thus the test would be redundant. */
2868 node_has_high_bound (case_node_ptr node, tree index_type)
2871 case_node_ptr pnode;
2873 /* If there is no upper bound, obviously no test is needed. */
2875 if (TYPE_MAX_VALUE (index_type) == NULL)
2878 /* If the upper bound of this node is the highest value in the type
2879 of the index expression, we need not test against it. */
2881 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
2884 /* If this node has a right branch, the value at the right must be greater
2885 than that at this node, so it cannot be bounded at the top and
2886 we need not bother testing any further. */
2891 high_plus_one = fold_build2 (PLUS_EXPR, TREE_TYPE (node->high),
2893 build_int_cst (TREE_TYPE (node->high), 1));
2895 /* If the addition above overflowed, we can't verify anything.
2896 Otherwise, look for a parent that tests our value + 1. */
2898 if (! tree_int_cst_lt (node->high, high_plus_one))
2901 for (pnode = node->parent; pnode; pnode = pnode->parent)
2902 if (tree_int_cst_equal (high_plus_one, pnode->low))
2908 /* Search the parent sections of the
2909 case node tree to see if both tests for the upper and lower
2910 bounds of NODE would be redundant. */
2913 node_is_bounded (case_node_ptr node, tree index_type)
2915 return (node_has_low_bound (node, index_type)
2916 && node_has_high_bound (node, index_type));
2919 /* Emit step-by-step code to select a case for the value of INDEX.
2920 The thus generated decision tree follows the form of the
2921 case-node binary tree NODE, whose nodes represent test conditions.
2922 INDEX_TYPE is the type of the index of the switch.
2924 Care is taken to prune redundant tests from the decision tree
2925 by detecting any boundary conditions already checked by
2926 emitted rtx. (See node_has_high_bound, node_has_low_bound
2927 and node_is_bounded, above.)
2929 Where the test conditions can be shown to be redundant we emit
2930 an unconditional jump to the target code. As a further
2931 optimization, the subordinates of a tree node are examined to
2932 check for bounded nodes. In this case conditional and/or
2933 unconditional jumps as a result of the boundary check for the
2934 current node are arranged to target the subordinates associated
2935 code for out of bound conditions on the current node.
2937 We can assume that when control reaches the code generated here,
2938 the index value has already been compared with the parents
2939 of this node, and determined to be on the same side of each parent
2940 as this node is. Thus, if this node tests for the value 51,
2941 and a parent tested for 52, we don't need to consider
2942 the possibility of a value greater than 51. If another parent
2943 tests for the value 50, then this node need not test anything. */
2946 emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
2949 /* If INDEX has an unsigned type, we must make unsigned branches. */
2950 int unsignedp = TYPE_UNSIGNED (index_type);
2951 enum machine_mode mode = GET_MODE (index);
2952 enum machine_mode imode = TYPE_MODE (index_type);
2954 /* Handle indices detected as constant during RTL expansion. */
2955 if (mode == VOIDmode)
2958 /* See if our parents have already tested everything for us.
2959 If they have, emit an unconditional jump for this node. */
2960 if (node_is_bounded (node, index_type))
2961 emit_jump (label_rtx (node->code_label));
2963 else if (tree_int_cst_equal (node->low, node->high))
2965 /* Node is single valued. First see if the index expression matches
2966 this node and then check our children, if any. */
2968 do_jump_if_equal (mode, index,
2969 convert_modes (mode, imode,
2970 expand_normal (node->low),
2972 label_rtx (node->code_label), unsignedp);
2974 if (node->right != 0 && node->left != 0)
2976 /* This node has children on both sides.
2977 Dispatch to one side or the other
2978 by comparing the index value with this node's value.
2979 If one subtree is bounded, check that one first,
2980 so we can avoid real branches in the tree. */
2982 if (node_is_bounded (node->right, index_type))
2984 emit_cmp_and_jump_insns (index,
2987 expand_normal (node->high),
2989 GT, NULL_RTX, mode, unsignedp,
2990 label_rtx (node->right->code_label));
2991 emit_case_nodes (index, node->left, default_label, index_type);
2994 else if (node_is_bounded (node->left, index_type))
2996 emit_cmp_and_jump_insns (index,
2999 expand_normal (node->high),
3001 LT, NULL_RTX, mode, unsignedp,
3002 label_rtx (node->left->code_label));
3003 emit_case_nodes (index, node->right, default_label, index_type);
3006 /* If both children are single-valued cases with no
3007 children, finish up all the work. This way, we can save
3008 one ordered comparison. */
3009 else if (tree_int_cst_equal (node->right->low, node->right->high)
3010 && node->right->left == 0
3011 && node->right->right == 0
3012 && tree_int_cst_equal (node->left->low, node->left->high)
3013 && node->left->left == 0
3014 && node->left->right == 0)
3016 /* Neither node is bounded. First distinguish the two sides;
3017 then emit the code for one side at a time. */
3019 /* See if the value matches what the right hand side
3021 do_jump_if_equal (mode, index,
3022 convert_modes (mode, imode,
3023 expand_normal (node->right->low),
3025 label_rtx (node->right->code_label),
3028 /* See if the value matches what the left hand side
3030 do_jump_if_equal (mode, index,
3031 convert_modes (mode, imode,
3032 expand_normal (node->left->low),
3034 label_rtx (node->left->code_label),
3040 /* Neither node is bounded. First distinguish the two sides;
3041 then emit the code for one side at a time. */
3043 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
3045 /* See if the value is on the right. */
3046 emit_cmp_and_jump_insns (index,
3049 expand_normal (node->high),
3051 GT, NULL_RTX, mode, unsignedp,
3052 label_rtx (test_label));
3054 /* Value must be on the left.
3055 Handle the left-hand subtree. */
3056 emit_case_nodes (index, node->left, default_label, index_type);
3057 /* If left-hand subtree does nothing,
3060 emit_jump (default_label);
3062 /* Code branches here for the right-hand subtree. */
3063 expand_label (test_label);
3064 emit_case_nodes (index, node->right, default_label, index_type);
3068 else if (node->right != 0 && node->left == 0)
3070 /* Here we have a right child but no left so we issue a conditional
3071 branch to default and process the right child.
3073 Omit the conditional branch to default if the right child
3074 does not have any children and is single valued; it would
3075 cost too much space to save so little time. */
3077 if (node->right->right || node->right->left
3078 || !tree_int_cst_equal (node->right->low, node->right->high))
3080 if (!node_has_low_bound (node, index_type))
3082 emit_cmp_and_jump_insns (index,
3085 expand_normal (node->high),
3087 LT, NULL_RTX, mode, unsignedp,
3091 emit_case_nodes (index, node->right, default_label, index_type);
3094 /* We cannot process node->right normally
3095 since we haven't ruled out the numbers less than
3096 this node's value. So handle node->right explicitly. */
3097 do_jump_if_equal (mode, index,
3100 expand_normal (node->right->low),
3102 label_rtx (node->right->code_label), unsignedp);
3105 else if (node->right == 0 && node->left != 0)
3107 /* Just one subtree, on the left. */
3108 if (node->left->left || node->left->right
3109 || !tree_int_cst_equal (node->left->low, node->left->high))
3111 if (!node_has_high_bound (node, index_type))
3113 emit_cmp_and_jump_insns (index,
3116 expand_normal (node->high),
3118 GT, NULL_RTX, mode, unsignedp,
3122 emit_case_nodes (index, node->left, default_label, index_type);
3125 /* We cannot process node->left normally
3126 since we haven't ruled out the numbers less than
3127 this node's value. So handle node->left explicitly. */
3128 do_jump_if_equal (mode, index,
3131 expand_normal (node->left->low),
3133 label_rtx (node->left->code_label), unsignedp);
3138 /* Node is a range. These cases are very similar to those for a single
3139 value, except that we do not start by testing whether this node
3140 is the one to branch to. */
3142 if (node->right != 0 && node->left != 0)
3144 /* Node has subtrees on both sides.
3145 If the right-hand subtree is bounded,
3146 test for it first, since we can go straight there.
3147 Otherwise, we need to make a branch in the control structure,
3148 then handle the two subtrees. */
3149 tree test_label = 0;
3151 if (node_is_bounded (node->right, index_type))
3152 /* Right hand node is fully bounded so we can eliminate any
3153 testing and branch directly to the target code. */
3154 emit_cmp_and_jump_insns (index,
3157 expand_normal (node->high),
3159 GT, NULL_RTX, mode, unsignedp,
3160 label_rtx (node->right->code_label));
3163 /* Right hand node requires testing.
3164 Branch to a label where we will handle it later. */
3166 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
3167 emit_cmp_and_jump_insns (index,
3170 expand_normal (node->high),
3172 GT, NULL_RTX, mode, unsignedp,
3173 label_rtx (test_label));
3176 /* Value belongs to this node or to the left-hand subtree. */
3178 emit_cmp_and_jump_insns (index,
3181 expand_normal (node->low),
3183 GE, NULL_RTX, mode, unsignedp,
3184 label_rtx (node->code_label));
3186 /* Handle the left-hand subtree. */
3187 emit_case_nodes (index, node->left, default_label, index_type);
3189 /* If right node had to be handled later, do that now. */
3193 /* If the left-hand subtree fell through,
3194 don't let it fall into the right-hand subtree. */
3196 emit_jump (default_label);
3198 expand_label (test_label);
3199 emit_case_nodes (index, node->right, default_label, index_type);
3203 else if (node->right != 0 && node->left == 0)
3205 /* Deal with values to the left of this node,
3206 if they are possible. */
3207 if (!node_has_low_bound (node, index_type))
3209 emit_cmp_and_jump_insns (index,
3212 expand_normal (node->low),
3214 LT, NULL_RTX, mode, unsignedp,
3218 /* Value belongs to this node or to the right-hand subtree. */
3220 emit_cmp_and_jump_insns (index,
3223 expand_normal (node->high),
3225 LE, NULL_RTX, mode, unsignedp,
3226 label_rtx (node->code_label));
3228 emit_case_nodes (index, node->right, default_label, index_type);
3231 else if (node->right == 0 && node->left != 0)
3233 /* Deal with values to the right of this node,
3234 if they are possible. */
3235 if (!node_has_high_bound (node, index_type))
3237 emit_cmp_and_jump_insns (index,
3240 expand_normal (node->high),
3242 GT, NULL_RTX, mode, unsignedp,
3246 /* Value belongs to this node or to the left-hand subtree. */
3248 emit_cmp_and_jump_insns (index,
3251 expand_normal (node->low),
3253 GE, NULL_RTX, mode, unsignedp,
3254 label_rtx (node->code_label));
3256 emit_case_nodes (index, node->left, default_label, index_type);
3261 /* Node has no children so we check low and high bounds to remove
3262 redundant tests. Only one of the bounds can exist,
3263 since otherwise this node is bounded--a case tested already. */
3264 int high_bound = node_has_high_bound (node, index_type);
3265 int low_bound = node_has_low_bound (node, index_type);
3267 if (!high_bound && low_bound)
3269 emit_cmp_and_jump_insns (index,
3272 expand_normal (node->high),
3274 GT, NULL_RTX, mode, unsignedp,
3278 else if (!low_bound && high_bound)
3280 emit_cmp_and_jump_insns (index,
3283 expand_normal (node->low),
3285 LT, NULL_RTX, mode, unsignedp,
3288 else if (!low_bound && !high_bound)
3290 /* Widen LOW and HIGH to the same width as INDEX. */
3291 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
3292 tree low = build1 (CONVERT_EXPR, type, node->low);
3293 tree high = build1 (CONVERT_EXPR, type, node->high);
3294 rtx low_rtx, new_index, new_bound;
3296 /* Instead of doing two branches, emit one unsigned branch for
3297 (index-low) > (high-low). */
3298 low_rtx = expand_expr (low, NULL_RTX, mode, EXPAND_NORMAL);
3299 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
3300 NULL_RTX, unsignedp,
3302 new_bound = expand_expr (fold_build2 (MINUS_EXPR, type,
3304 NULL_RTX, mode, EXPAND_NORMAL);
3306 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
3307 mode, 1, default_label);
3310 emit_jump (label_rtx (node->code_label));