1 /* Subroutines for manipulating rtx's in semantically interesting ways.
2 Copyright (C) 1987, 1991, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
32 #include "hard-reg-set.h"
33 #include "insn-config.h"
36 #if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
37 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
40 static rtx break_out_memory_refs PARAMS ((rtx));
41 static void emit_stack_probe PARAMS ((rtx));
44 /* Truncate and perhaps sign-extend C as appropriate for MODE. */
47 trunc_int_for_mode (c, mode)
49 enum machine_mode mode;
51 int width = GET_MODE_BITSIZE (mode);
53 /* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */
55 return c & 1 ? STORE_FLAG_VALUE : 0;
57 /* Sign-extend for the requested mode. */
59 if (width < HOST_BITS_PER_WIDE_INT)
61 HOST_WIDE_INT sign = 1;
71 /* Return an rtx for the sum of X and the integer C.
73 This function should be used via the `plus_constant' macro. */
76 plus_constant_wide (x, c)
78 register HOST_WIDE_INT c;
80 register RTX_CODE code;
82 register enum machine_mode mode;
98 return GEN_INT (INTVAL (x) + c);
102 unsigned HOST_WIDE_INT l1 = CONST_DOUBLE_LOW (x);
103 HOST_WIDE_INT h1 = CONST_DOUBLE_HIGH (x);
104 unsigned HOST_WIDE_INT l2 = c;
105 HOST_WIDE_INT h2 = c < 0 ? ~0 : 0;
106 unsigned HOST_WIDE_INT lv;
109 add_double (l1, h1, l2, h2, &lv, &hv);
111 return immed_double_const (lv, hv, VOIDmode);
115 /* If this is a reference to the constant pool, try replacing it with
116 a reference to a new constant. If the resulting address isn't
117 valid, don't return it because we have no way to validize it. */
118 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
119 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
122 = force_const_mem (GET_MODE (x),
123 plus_constant (get_pool_constant (XEXP (x, 0)),
125 if (memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
131 /* If adding to something entirely constant, set a flag
132 so that we can add a CONST around the result. */
143 /* The interesting case is adding the integer to a sum.
144 Look for constant term in the sum and combine
145 with C. For an integer constant term, we make a combined
146 integer. For a constant term that is not an explicit integer,
147 we cannot really combine, but group them together anyway.
149 Restart or use a recursive call in case the remaining operand is
150 something that we handle specially, such as a SYMBOL_REF.
152 We may not immediately return from the recursive call here, lest
153 all_constant gets lost. */
155 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
157 c += INTVAL (XEXP (x, 1));
159 if (GET_MODE (x) != VOIDmode)
160 c = trunc_int_for_mode (c, GET_MODE (x));
165 else if (CONSTANT_P (XEXP (x, 1)))
167 x = gen_rtx_PLUS (mode, XEXP (x, 0), plus_constant (XEXP (x, 1), c));
170 else if (find_constant_term_loc (&y))
172 /* We need to be careful since X may be shared and we can't
173 modify it in place. */
174 rtx copy = copy_rtx (x);
175 rtx *const_loc = find_constant_term_loc (©);
177 *const_loc = plus_constant (*const_loc, c);
188 x = gen_rtx_PLUS (mode, x, GEN_INT (c));
190 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
192 else if (all_constant)
193 return gen_rtx_CONST (mode, x);
198 /* If X is a sum, return a new sum like X but lacking any constant terms.
199 Add all the removed constant terms into *CONSTPTR.
200 X itself is not altered. The result != X if and only if
201 it is not isomorphic to X. */
204 eliminate_constant_term (x, constptr)
211 if (GET_CODE (x) != PLUS)
214 /* First handle constants appearing at this level explicitly. */
215 if (GET_CODE (XEXP (x, 1)) == CONST_INT
216 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
218 && GET_CODE (tem) == CONST_INT)
221 return eliminate_constant_term (XEXP (x, 0), constptr);
225 x0 = eliminate_constant_term (XEXP (x, 0), &tem);
226 x1 = eliminate_constant_term (XEXP (x, 1), &tem);
227 if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
228 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x),
230 && GET_CODE (tem) == CONST_INT)
233 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
239 /* Returns the insn that next references REG after INSN, or 0
240 if REG is clobbered before next referenced or we cannot find
241 an insn that references REG in a straight-line piece of code. */
244 find_next_ref (reg, insn)
250 for (insn = NEXT_INSN (insn); insn; insn = next)
252 next = NEXT_INSN (insn);
253 if (GET_CODE (insn) == NOTE)
255 if (GET_CODE (insn) == CODE_LABEL
256 || GET_CODE (insn) == BARRIER)
258 if (GET_CODE (insn) == INSN
259 || GET_CODE (insn) == JUMP_INSN
260 || GET_CODE (insn) == CALL_INSN)
262 if (reg_set_p (reg, insn))
264 if (reg_mentioned_p (reg, PATTERN (insn)))
266 if (GET_CODE (insn) == JUMP_INSN)
268 if (any_uncondjump_p (insn))
269 next = JUMP_LABEL (insn);
273 if (GET_CODE (insn) == CALL_INSN
274 && REGNO (reg) < FIRST_PSEUDO_REGISTER
275 && call_used_regs[REGNO (reg)])
284 /* Return an rtx for the size in bytes of the value of EXP. */
292 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
293 && DECL_SIZE_UNIT (exp) != 0)
294 size = DECL_SIZE_UNIT (exp);
296 size = size_in_bytes (TREE_TYPE (exp));
298 if (TREE_CODE (size) != INTEGER_CST
299 && contains_placeholder_p (size))
300 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
302 return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype),
303 EXPAND_MEMORY_USE_BAD);
306 /* Return a copy of X in which all memory references
307 and all constants that involve symbol refs
308 have been replaced with new temporary registers.
309 Also emit code to load the memory locations and constants
310 into those registers.
312 If X contains no such constants or memory references,
313 X itself (not a copy) is returned.
315 If a constant is found in the address that is not a legitimate constant
316 in an insn, it is left alone in the hope that it might be valid in the
319 X may contain no arithmetic except addition, subtraction and multiplication.
320 Values returned by expand_expr with 1 for sum_ok fit this constraint. */
323 break_out_memory_refs (x)
326 if (GET_CODE (x) == MEM
327 || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
328 && GET_MODE (x) != VOIDmode))
329 x = force_reg (GET_MODE (x), x);
330 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
331 || GET_CODE (x) == MULT)
333 register rtx op0 = break_out_memory_refs (XEXP (x, 0));
334 register rtx op1 = break_out_memory_refs (XEXP (x, 1));
336 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
337 x = gen_rtx_fmt_ee (GET_CODE (x), Pmode, op0, op1);
343 #ifdef POINTERS_EXTEND_UNSIGNED
345 /* Given X, a memory address in ptr_mode, convert it to an address
346 in Pmode, or vice versa (TO_MODE says which way). We take advantage of
347 the fact that pointers are not allowed to overflow by commuting arithmetic
348 operations over conversions so that address arithmetic insns can be
352 convert_memory_address (to_mode, x)
353 enum machine_mode to_mode;
356 enum machine_mode from_mode = to_mode == ptr_mode ? Pmode : ptr_mode;
359 /* Here we handle some special cases. If none of them apply, fall through
360 to the default case. */
361 switch (GET_CODE (x))
368 if (POINTERS_EXTEND_UNSIGNED >= 0
369 && GET_MODE (SUBREG_REG (x)) == to_mode)
370 return SUBREG_REG (x);
374 if (POINTERS_EXTEND_UNSIGNED >= 0)
376 temp = gen_rtx_LABEL_REF (to_mode, XEXP (x, 0));
377 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
383 if (POINTERS_EXTEND_UNSIGNED >= 0)
385 temp = gen_rtx_SYMBOL_REF (to_mode, XSTR (x, 0));
386 SYMBOL_REF_FLAG (temp) = SYMBOL_REF_FLAG (x);
387 CONSTANT_POOL_ADDRESS_P (temp) = CONSTANT_POOL_ADDRESS_P (x);
388 STRING_POOL_ADDRESS_P (temp) = STRING_POOL_ADDRESS_P (x);
394 if (POINTERS_EXTEND_UNSIGNED >= 0)
395 return gen_rtx_CONST (to_mode,
396 convert_memory_address (to_mode, XEXP (x, 0)));
401 /* For addition the second operand is a small constant, we can safely
402 permute the conversion and addition operation. We can always safely
403 permute them if we are making the address narrower. In addition,
404 always permute the operations if this is a constant. */
405 if (POINTERS_EXTEND_UNSIGNED >= 0
406 && (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
407 || (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT
408 && (INTVAL (XEXP (x, 1)) + 20000 < 40000
409 || CONSTANT_P (XEXP (x, 0))))))
410 return gen_rtx_fmt_ee (GET_CODE (x), to_mode,
411 convert_memory_address (to_mode, XEXP (x, 0)),
412 convert_memory_address (to_mode, XEXP (x, 1)));
419 return convert_modes (to_mode, from_mode,
420 x, POINTERS_EXTEND_UNSIGNED);
424 /* Given a memory address or facsimile X, construct a new address,
425 currently equivalent, that is stable: future stores won't change it.
427 X must be composed of constants, register and memory references
428 combined with addition, subtraction and multiplication:
429 in other words, just what you can get from expand_expr if sum_ok is 1.
431 Works by making copies of all regs and memory locations used
432 by X and combining them the same way X does.
433 You could also stabilize the reference to this address
434 by copying the address to a register with copy_to_reg;
435 but then you wouldn't get indexed addressing in the reference. */
441 if (GET_CODE (x) == REG)
443 if (REGNO (x) != FRAME_POINTER_REGNUM
444 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
445 && REGNO (x) != HARD_FRAME_POINTER_REGNUM
450 else if (GET_CODE (x) == MEM)
452 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
453 || GET_CODE (x) == MULT)
455 register rtx op0 = copy_all_regs (XEXP (x, 0));
456 register rtx op1 = copy_all_regs (XEXP (x, 1));
457 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
458 x = gen_rtx_fmt_ee (GET_CODE (x), Pmode, op0, op1);
463 /* Return something equivalent to X but valid as a memory address
464 for something of mode MODE. When X is not itself valid, this
465 works by copying X or subexpressions of it into registers. */
468 memory_address (mode, x)
469 enum machine_mode mode;
472 register rtx oldx = x;
474 if (GET_CODE (x) == ADDRESSOF)
477 #ifdef POINTERS_EXTEND_UNSIGNED
478 if (GET_MODE (x) == ptr_mode)
479 x = convert_memory_address (Pmode, x);
482 /* By passing constant addresses thru registers
483 we get a chance to cse them. */
484 if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
485 x = force_reg (Pmode, x);
487 /* Accept a QUEUED that refers to a REG
488 even though that isn't a valid address.
489 On attempting to put this in an insn we will call protect_from_queue
490 which will turn it into a REG, which is valid. */
491 else if (GET_CODE (x) == QUEUED
492 && GET_CODE (QUEUED_VAR (x)) == REG)
495 /* We get better cse by rejecting indirect addressing at this stage.
496 Let the combiner create indirect addresses where appropriate.
497 For now, generate the code so that the subexpressions useful to share
498 are visible. But not if cse won't be done! */
501 if (! cse_not_expected && GET_CODE (x) != REG)
502 x = break_out_memory_refs (x);
504 /* At this point, any valid address is accepted. */
505 GO_IF_LEGITIMATE_ADDRESS (mode, x, win);
507 /* If it was valid before but breaking out memory refs invalidated it,
508 use it the old way. */
509 if (memory_address_p (mode, oldx))
512 /* Perform machine-dependent transformations on X
513 in certain cases. This is not necessary since the code
514 below can handle all possible cases, but machine-dependent
515 transformations can make better code. */
516 LEGITIMIZE_ADDRESS (x, oldx, mode, win);
518 /* PLUS and MULT can appear in special ways
519 as the result of attempts to make an address usable for indexing.
520 Usually they are dealt with by calling force_operand, below.
521 But a sum containing constant terms is special
522 if removing them makes the sum a valid address:
523 then we generate that address in a register
524 and index off of it. We do this because it often makes
525 shorter code, and because the addresses thus generated
526 in registers often become common subexpressions. */
527 if (GET_CODE (x) == PLUS)
529 rtx constant_term = const0_rtx;
530 rtx y = eliminate_constant_term (x, &constant_term);
531 if (constant_term == const0_rtx
532 || ! memory_address_p (mode, y))
533 x = force_operand (x, NULL_RTX);
536 y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
537 if (! memory_address_p (mode, y))
538 x = force_operand (x, NULL_RTX);
544 else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
545 x = force_operand (x, NULL_RTX);
547 /* If we have a register that's an invalid address,
548 it must be a hard reg of the wrong class. Copy it to a pseudo. */
549 else if (GET_CODE (x) == REG)
552 /* Last resort: copy the value to a register, since
553 the register is a valid address. */
555 x = force_reg (Pmode, x);
562 if (flag_force_addr && ! cse_not_expected && GET_CODE (x) != REG
563 /* Don't copy an addr via a reg if it is one of our stack slots. */
564 && ! (GET_CODE (x) == PLUS
565 && (XEXP (x, 0) == virtual_stack_vars_rtx
566 || XEXP (x, 0) == virtual_incoming_args_rtx)))
568 if (general_operand (x, Pmode))
569 x = force_reg (Pmode, x);
571 x = force_operand (x, NULL_RTX);
577 /* If we didn't change the address, we are done. Otherwise, mark
578 a reg as a pointer if we have REG or REG + CONST_INT. */
581 else if (GET_CODE (x) == REG)
582 mark_reg_pointer (x, BITS_PER_UNIT);
583 else if (GET_CODE (x) == PLUS
584 && GET_CODE (XEXP (x, 0)) == REG
585 && GET_CODE (XEXP (x, 1)) == CONST_INT)
586 mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT);
588 /* OLDX may have been the address on a temporary. Update the address
589 to indicate that X is now used. */
590 update_temp_slot_address (oldx, x);
595 /* Like `memory_address' but pretend `flag_force_addr' is 0. */
598 memory_address_noforce (mode, x)
599 enum machine_mode mode;
602 int ambient_force_addr = flag_force_addr;
606 val = memory_address (mode, x);
607 flag_force_addr = ambient_force_addr;
611 /* Convert a mem ref into one with a valid memory address.
612 Pass through anything else unchanged. */
618 if (GET_CODE (ref) != MEM)
620 if (! (flag_force_addr && CONSTANT_ADDRESS_P (XEXP (ref, 0)))
621 && memory_address_p (GET_MODE (ref), XEXP (ref, 0)))
624 /* Don't alter REF itself, since that is probably a stack slot. */
625 return replace_equiv_address (ref, XEXP (ref, 0));
628 /* Given REF, either a MEM or a REG, and T, either the type of X or
629 the expression corresponding to REF, set RTX_UNCHANGING_P if
633 maybe_set_unchanging (ref, t)
637 /* We can set RTX_UNCHANGING_P from TREE_READONLY for decls whose
638 initialization is only executed once, or whose initializer always
639 has the same value. Currently we simplify this to PARM_DECLs in the
640 first case, and decls with TREE_CONSTANT initializers in the second. */
641 if ((TREE_READONLY (t) && DECL_P (t)
642 && (TREE_CODE (t) == PARM_DECL
643 || DECL_INITIAL (t) == NULL_TREE
644 || TREE_CONSTANT (DECL_INITIAL (t))))
645 || TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
646 RTX_UNCHANGING_P (ref) = 1;
649 /* Given REF, a MEM, and T, either the type of X or the expression
650 corresponding to REF, set the memory attributes. OBJECTP is nonzero
651 if we are making a new object of this type. */
654 set_mem_attributes (ref, t, objectp)
661 /* It can happen that type_for_mode was given a mode for which there
662 is no language-level type. In which case it returns NULL, which
667 type = TYPE_P (t) ? t : TREE_TYPE (t);
669 /* Get the alias set from the expression or type (perhaps using a
670 front-end routine) and then copy bits from the type. */
672 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY (type)
673 here, because, in C and C++, the fact that a location is accessed
674 through a const expression does not mean that the value there can
676 set_mem_alias_set (ref, get_alias_set (t));
677 MEM_VOLATILE_P (ref) = TYPE_VOLATILE (type);
678 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
680 /* If we are making an object of this type, we know that it is a scalar if
681 the type is not an aggregate. */
682 if (objectp && ! AGGREGATE_TYPE_P (type))
683 MEM_SCALAR_P (ref) = 1;
685 /* If T is a type, this is all we can do. Otherwise, we may be able
686 to deduce some more information about the expression. */
690 maybe_set_unchanging (ref, t);
691 if (TREE_THIS_VOLATILE (t))
692 MEM_VOLATILE_P (ref) = 1;
694 /* Now see if we can say more about whether it's an aggregate or
695 scalar. If we already know it's an aggregate, don't bother. */
696 if (MEM_IN_STRUCT_P (ref))
699 /* Now remove any NOPs: they don't change what the underlying object is.
700 Likewise for SAVE_EXPR. */
701 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
702 || TREE_CODE (t) == NON_LVALUE_EXPR || TREE_CODE (t) == SAVE_EXPR)
703 t = TREE_OPERAND (t, 0);
705 /* Since we already know the type isn't an aggregate, if this is a decl,
706 it must be a scalar. Or if it is a reference into an aggregate,
707 this is part of an aggregate. Otherwise we don't know. */
709 MEM_SCALAR_P (ref) = 1;
710 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
711 || TREE_CODE (t) == ARRAY_RANGE_REF
712 || TREE_CODE (t) == BIT_FIELD_REF)
713 MEM_IN_STRUCT_P (ref) = 1;
716 /* Return a modified copy of X with its memory address copied
717 into a temporary register to protect it from side effects.
718 If X is not a MEM, it is returned unchanged (and not copied).
719 Perhaps even if it is a MEM, if there is no need to change it. */
726 if (GET_CODE (x) != MEM
727 || ! rtx_unstable_p (XEXP (x, 0)))
731 replace_equiv_address (x, force_reg (Pmode, copy_all_regs (XEXP (x, 0))));
734 /* Copy the value or contents of X to a new temp reg and return that reg. */
740 register rtx temp = gen_reg_rtx (GET_MODE (x));
742 /* If not an operand, must be an address with PLUS and MULT so
743 do the computation. */
744 if (! general_operand (x, VOIDmode))
745 x = force_operand (x, temp);
748 emit_move_insn (temp, x);
753 /* Like copy_to_reg but always give the new register mode Pmode
754 in case X is a constant. */
760 return copy_to_mode_reg (Pmode, x);
763 /* Like copy_to_reg but always give the new register mode MODE
764 in case X is a constant. */
767 copy_to_mode_reg (mode, x)
768 enum machine_mode mode;
771 register rtx temp = gen_reg_rtx (mode);
773 /* If not an operand, must be an address with PLUS and MULT so
774 do the computation. */
775 if (! general_operand (x, VOIDmode))
776 x = force_operand (x, temp);
778 if (GET_MODE (x) != mode && GET_MODE (x) != VOIDmode)
781 emit_move_insn (temp, x);
785 /* Load X into a register if it is not already one.
786 Use mode MODE for the register.
787 X should be valid for mode MODE, but it may be a constant which
788 is valid for all integer modes; that's why caller must specify MODE.
790 The caller must not alter the value in the register we return,
791 since we mark it as a "constant" register. */
795 enum machine_mode mode;
798 register rtx temp, insn, set;
800 if (GET_CODE (x) == REG)
803 temp = gen_reg_rtx (mode);
805 if (! general_operand (x, mode))
806 x = force_operand (x, NULL_RTX);
808 insn = emit_move_insn (temp, x);
810 /* Let optimizers know that TEMP's value never changes
811 and that X can be substituted for it. Don't get confused
812 if INSN set something else (such as a SUBREG of TEMP). */
814 && (set = single_set (insn)) != 0
815 && SET_DEST (set) == temp)
817 rtx note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
822 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, x, REG_NOTES (insn));
827 /* If X is a memory ref, copy its contents to a new temp reg and return
828 that reg. Otherwise, return X. */
836 if (GET_CODE (x) != MEM || GET_MODE (x) == BLKmode)
839 temp = gen_reg_rtx (GET_MODE (x));
840 emit_move_insn (temp, x);
844 /* Copy X to TARGET (if it's nonzero and a reg)
845 or to a new temp reg and return that reg.
846 MODE is the mode to use for X in case it is a constant. */
849 copy_to_suggested_reg (x, target, mode)
851 enum machine_mode mode;
855 if (target && GET_CODE (target) == REG)
858 temp = gen_reg_rtx (mode);
860 emit_move_insn (temp, x);
864 /* Return the mode to use to store a scalar of TYPE and MODE.
865 PUNSIGNEDP points to the signedness of the type and may be adjusted
866 to show what signedness to use on extension operations.
868 FOR_CALL is non-zero if this call is promoting args for a call. */
871 promote_mode (type, mode, punsignedp, for_call)
873 enum machine_mode mode;
875 int for_call ATTRIBUTE_UNUSED;
877 enum tree_code code = TREE_CODE (type);
878 int unsignedp = *punsignedp;
880 #ifdef PROMOTE_FOR_CALL_ONLY
888 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
889 case CHAR_TYPE: case REAL_TYPE: case OFFSET_TYPE:
890 PROMOTE_MODE (mode, unsignedp, type);
894 #ifdef POINTERS_EXTEND_UNSIGNED
898 unsignedp = POINTERS_EXTEND_UNSIGNED;
906 *punsignedp = unsignedp;
910 /* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
911 This pops when ADJUST is positive. ADJUST need not be constant. */
914 adjust_stack (adjust)
918 adjust = protect_from_queue (adjust, 0);
920 if (adjust == const0_rtx)
923 /* We expect all variable sized adjustments to be multiple of
924 PREFERRED_STACK_BOUNDARY. */
925 if (GET_CODE (adjust) == CONST_INT)
926 stack_pointer_delta -= INTVAL (adjust);
928 temp = expand_binop (Pmode,
929 #ifdef STACK_GROWS_DOWNWARD
934 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
937 if (temp != stack_pointer_rtx)
938 emit_move_insn (stack_pointer_rtx, temp);
941 /* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
942 This pushes when ADJUST is positive. ADJUST need not be constant. */
945 anti_adjust_stack (adjust)
949 adjust = protect_from_queue (adjust, 0);
951 if (adjust == const0_rtx)
954 /* We expect all variable sized adjustments to be multiple of
955 PREFERRED_STACK_BOUNDARY. */
956 if (GET_CODE (adjust) == CONST_INT)
957 stack_pointer_delta += INTVAL (adjust);
959 temp = expand_binop (Pmode,
960 #ifdef STACK_GROWS_DOWNWARD
965 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
968 if (temp != stack_pointer_rtx)
969 emit_move_insn (stack_pointer_rtx, temp);
972 /* Round the size of a block to be pushed up to the boundary required
973 by this machine. SIZE is the desired size, which need not be constant. */
979 #ifdef PREFERRED_STACK_BOUNDARY
980 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
983 if (GET_CODE (size) == CONST_INT)
985 int new = (INTVAL (size) + align - 1) / align * align;
986 if (INTVAL (size) != new)
987 size = GEN_INT (new);
991 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
992 but we know it can't. So add ourselves and then do
994 size = expand_binop (Pmode, add_optab, size, GEN_INT (align - 1),
995 NULL_RTX, 1, OPTAB_LIB_WIDEN);
996 size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, GEN_INT (align),
998 size = expand_mult (Pmode, size, GEN_INT (align), NULL_RTX, 1);
1000 #endif /* PREFERRED_STACK_BOUNDARY */
1004 /* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer
1005 to a previously-created save area. If no save area has been allocated,
1006 this function will allocate one. If a save area is specified, it
1007 must be of the proper mode.
1009 The insns are emitted after insn AFTER, if nonzero, otherwise the insns
1010 are emitted at the current position. */
1013 emit_stack_save (save_level, psave, after)
1014 enum save_level save_level;
1019 /* The default is that we use a move insn and save in a Pmode object. */
1020 rtx (*fcn) PARAMS ((rtx, rtx)) = gen_move_insn;
1021 enum machine_mode mode = STACK_SAVEAREA_MODE (save_level);
1023 /* See if this machine has anything special to do for this kind of save. */
1026 #ifdef HAVE_save_stack_block
1028 if (HAVE_save_stack_block)
1029 fcn = gen_save_stack_block;
1032 #ifdef HAVE_save_stack_function
1034 if (HAVE_save_stack_function)
1035 fcn = gen_save_stack_function;
1038 #ifdef HAVE_save_stack_nonlocal
1040 if (HAVE_save_stack_nonlocal)
1041 fcn = gen_save_stack_nonlocal;
1048 /* If there is no save area and we have to allocate one, do so. Otherwise
1049 verify the save area is the proper mode. */
1053 if (mode != VOIDmode)
1055 if (save_level == SAVE_NONLOCAL)
1056 *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
1058 *psave = sa = gen_reg_rtx (mode);
1063 if (mode == VOIDmode || GET_MODE (sa) != mode)
1072 /* We must validize inside the sequence, to ensure that any instructions
1073 created by the validize call also get moved to the right place. */
1075 sa = validize_mem (sa);
1076 emit_insn (fcn (sa, stack_pointer_rtx));
1077 seq = gen_sequence ();
1079 emit_insn_after (seq, after);
1084 sa = validize_mem (sa);
1085 emit_insn (fcn (sa, stack_pointer_rtx));
1089 /* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save
1090 area made by emit_stack_save. If it is zero, we have nothing to do.
1092 Put any emitted insns after insn AFTER, if nonzero, otherwise at
1093 current position. */
1096 emit_stack_restore (save_level, sa, after)
1097 enum save_level save_level;
1101 /* The default is that we use a move insn. */
1102 rtx (*fcn) PARAMS ((rtx, rtx)) = gen_move_insn;
1104 /* See if this machine has anything special to do for this kind of save. */
1107 #ifdef HAVE_restore_stack_block
1109 if (HAVE_restore_stack_block)
1110 fcn = gen_restore_stack_block;
1113 #ifdef HAVE_restore_stack_function
1115 if (HAVE_restore_stack_function)
1116 fcn = gen_restore_stack_function;
1119 #ifdef HAVE_restore_stack_nonlocal
1121 if (HAVE_restore_stack_nonlocal)
1122 fcn = gen_restore_stack_nonlocal;
1130 sa = validize_mem (sa);
1137 emit_insn (fcn (stack_pointer_rtx, sa));
1138 seq = gen_sequence ();
1140 emit_insn_after (seq, after);
1143 emit_insn (fcn (stack_pointer_rtx, sa));
1146 #ifdef SETJMP_VIA_SAVE_AREA
1147 /* Optimize RTL generated by allocate_dynamic_stack_space for targets
1148 where SETJMP_VIA_SAVE_AREA is true. The problem is that on these
1149 platforms, the dynamic stack space used can corrupt the original
1150 frame, thus causing a crash if a longjmp unwinds to it. */
1153 optimize_save_area_alloca (insns)
1158 for (insn = insns; insn; insn = NEXT_INSN(insn))
1162 if (GET_CODE (insn) != INSN)
1165 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1167 if (REG_NOTE_KIND (note) != REG_SAVE_AREA)
1170 if (!current_function_calls_setjmp)
1172 rtx pat = PATTERN (insn);
1174 /* If we do not see the note in a pattern matching
1175 these precise characteristics, we did something
1176 entirely wrong in allocate_dynamic_stack_space.
1178 Note, one way this could happen is if SETJMP_VIA_SAVE_AREA
1179 was defined on a machine where stacks grow towards higher
1182 Right now only supported port with stack that grow upward
1183 is the HPPA and it does not define SETJMP_VIA_SAVE_AREA. */
1184 if (GET_CODE (pat) != SET
1185 || SET_DEST (pat) != stack_pointer_rtx
1186 || GET_CODE (SET_SRC (pat)) != MINUS
1187 || XEXP (SET_SRC (pat), 0) != stack_pointer_rtx)
1190 /* This will now be transformed into a (set REG REG)
1191 so we can just blow away all the other notes. */
1192 XEXP (SET_SRC (pat), 1) = XEXP (note, 0);
1193 REG_NOTES (insn) = NULL_RTX;
1197 /* setjmp was called, we must remove the REG_SAVE_AREA
1198 note so that later passes do not get confused by its
1200 if (note == REG_NOTES (insn))
1202 REG_NOTES (insn) = XEXP (note, 1);
1208 for (srch = REG_NOTES (insn); srch; srch = XEXP (srch, 1))
1209 if (XEXP (srch, 1) == note)
1212 if (srch == NULL_RTX)
1215 XEXP (srch, 1) = XEXP (note, 1);
1218 /* Once we've seen the note of interest, we need not look at
1219 the rest of them. */
1224 #endif /* SETJMP_VIA_SAVE_AREA */
1226 /* Return an rtx representing the address of an area of memory dynamically
1227 pushed on the stack. This region of memory is always aligned to
1228 a multiple of BIGGEST_ALIGNMENT.
1230 Any required stack pointer alignment is preserved.
1232 SIZE is an rtx representing the size of the area.
1233 TARGET is a place in which the address can be placed.
1235 KNOWN_ALIGN is the alignment (in bits) that we know SIZE has. */
1238 allocate_dynamic_stack_space (size, target, known_align)
1243 #ifdef SETJMP_VIA_SAVE_AREA
1244 rtx setjmpless_size = NULL_RTX;
1247 /* If we're asking for zero bytes, it doesn't matter what we point
1248 to since we can't dereference it. But return a reasonable
1250 if (size == const0_rtx)
1251 return virtual_stack_dynamic_rtx;
1253 /* Otherwise, show we're calling alloca or equivalent. */
1254 current_function_calls_alloca = 1;
1256 /* Ensure the size is in the proper mode. */
1257 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1258 size = convert_to_mode (Pmode, size, 1);
1260 /* We can't attempt to minimize alignment necessary, because we don't
1261 know the final value of preferred_stack_boundary yet while executing
1263 #ifdef PREFERRED_STACK_BOUNDARY
1264 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1267 /* We will need to ensure that the address we return is aligned to
1268 BIGGEST_ALIGNMENT. If STACK_DYNAMIC_OFFSET is defined, we don't
1269 always know its final value at this point in the compilation (it
1270 might depend on the size of the outgoing parameter lists, for
1271 example), so we must align the value to be returned in that case.
1272 (Note that STACK_DYNAMIC_OFFSET will have a default non-zero value if
1273 STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined).
1274 We must also do an alignment operation on the returned value if
1275 the stack pointer alignment is less strict that BIGGEST_ALIGNMENT.
1277 If we have to align, we must leave space in SIZE for the hole
1278 that might result from the alignment operation. */
1280 #if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET) || ! defined (PREFERRED_STACK_BOUNDARY)
1281 #define MUST_ALIGN 1
1283 #define MUST_ALIGN (PREFERRED_STACK_BOUNDARY < BIGGEST_ALIGNMENT)
1288 = force_operand (plus_constant (size,
1289 BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
1292 #ifdef SETJMP_VIA_SAVE_AREA
1293 /* If setjmp restores regs from a save area in the stack frame,
1294 avoid clobbering the reg save area. Note that the offset of
1295 virtual_incoming_args_rtx includes the preallocated stack args space.
1296 It would be no problem to clobber that, but it's on the wrong side
1297 of the old save area. */
1300 = expand_binop (Pmode, sub_optab, virtual_stack_dynamic_rtx,
1301 stack_pointer_rtx, NULL_RTX, 1, OPTAB_LIB_WIDEN);
1303 if (!current_function_calls_setjmp)
1305 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1307 /* See optimize_save_area_alloca to understand what is being
1310 #if !defined(PREFERRED_STACK_BOUNDARY) || !defined(MUST_ALIGN) || (PREFERRED_STACK_BOUNDARY != BIGGEST_ALIGNMENT)
1311 /* If anyone creates a target with these characteristics, let them
1312 know that our optimization cannot work correctly in such a case. */
1316 if (GET_CODE (size) == CONST_INT)
1318 HOST_WIDE_INT new = INTVAL (size) / align * align;
1320 if (INTVAL (size) != new)
1321 setjmpless_size = GEN_INT (new);
1323 setjmpless_size = size;
1327 /* Since we know overflow is not possible, we avoid using
1328 CEIL_DIV_EXPR and use TRUNC_DIV_EXPR instead. */
1329 setjmpless_size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size,
1330 GEN_INT (align), NULL_RTX, 1);
1331 setjmpless_size = expand_mult (Pmode, setjmpless_size,
1332 GEN_INT (align), NULL_RTX, 1);
1334 /* Our optimization works based upon being able to perform a simple
1335 transformation of this RTL into a (set REG REG) so make sure things
1336 did in fact end up in a REG. */
1337 if (!register_operand (setjmpless_size, Pmode))
1338 setjmpless_size = force_reg (Pmode, setjmpless_size);
1341 size = expand_binop (Pmode, add_optab, size, dynamic_offset,
1342 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1344 #endif /* SETJMP_VIA_SAVE_AREA */
1346 /* Round the size to a multiple of the required stack alignment.
1347 Since the stack if presumed to be rounded before this allocation,
1348 this will maintain the required alignment.
1350 If the stack grows downward, we could save an insn by subtracting
1351 SIZE from the stack pointer and then aligning the stack pointer.
1352 The problem with this is that the stack pointer may be unaligned
1353 between the execution of the subtraction and alignment insns and
1354 some machines do not allow this. Even on those that do, some
1355 signal handlers malfunction if a signal should occur between those
1356 insns. Since this is an extremely rare event, we have no reliable
1357 way of knowing which systems have this problem. So we avoid even
1358 momentarily mis-aligning the stack. */
1360 #ifdef PREFERRED_STACK_BOUNDARY
1361 /* If we added a variable amount to SIZE,
1362 we can no longer assume it is aligned. */
1363 #if !defined (SETJMP_VIA_SAVE_AREA)
1364 if (MUST_ALIGN || known_align % PREFERRED_STACK_BOUNDARY != 0)
1366 size = round_push (size);
1369 do_pending_stack_adjust ();
1371 /* We ought to be called always on the toplevel and stack ought to be aligned
1373 #ifdef PREFERRED_STACK_BOUNDARY
1374 if (stack_pointer_delta % (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT))
1378 /* If needed, check that we have the required amount of stack. Take into
1379 account what has already been checked. */
1380 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
1381 probe_stack_range (STACK_CHECK_MAX_FRAME_SIZE + STACK_CHECK_PROTECT, size);
1383 /* Don't use a TARGET that isn't a pseudo or is the wrong mode. */
1384 if (target == 0 || GET_CODE (target) != REG
1385 || REGNO (target) < FIRST_PSEUDO_REGISTER
1386 || GET_MODE (target) != Pmode)
1387 target = gen_reg_rtx (Pmode);
1389 mark_reg_pointer (target, known_align);
1391 /* Perform the required allocation from the stack. Some systems do
1392 this differently than simply incrementing/decrementing from the
1393 stack pointer, such as acquiring the space by calling malloc(). */
1394 #ifdef HAVE_allocate_stack
1395 if (HAVE_allocate_stack)
1397 enum machine_mode mode = STACK_SIZE_MODE;
1398 insn_operand_predicate_fn pred;
1400 pred = insn_data[(int) CODE_FOR_allocate_stack].operand[0].predicate;
1401 if (pred && ! ((*pred) (target, Pmode)))
1402 #ifdef POINTERS_EXTEND_UNSIGNED
1403 target = convert_memory_address (Pmode, target);
1405 target = copy_to_mode_reg (Pmode, target);
1408 if (mode == VOIDmode)
1411 pred = insn_data[(int) CODE_FOR_allocate_stack].operand[1].predicate;
1412 if (pred && ! ((*pred) (size, mode)))
1413 size = copy_to_mode_reg (mode, size);
1415 emit_insn (gen_allocate_stack (target, size));
1420 #ifndef STACK_GROWS_DOWNWARD
1421 emit_move_insn (target, virtual_stack_dynamic_rtx);
1424 /* Check stack bounds if necessary. */
1425 if (current_function_limit_stack)
1428 rtx space_available = gen_label_rtx ();
1429 #ifdef STACK_GROWS_DOWNWARD
1430 available = expand_binop (Pmode, sub_optab,
1431 stack_pointer_rtx, stack_limit_rtx,
1432 NULL_RTX, 1, OPTAB_WIDEN);
1434 available = expand_binop (Pmode, sub_optab,
1435 stack_limit_rtx, stack_pointer_rtx,
1436 NULL_RTX, 1, OPTAB_WIDEN);
1438 emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1,
1439 0, space_available);
1442 emit_insn (gen_trap ());
1445 error ("stack limits not supported on this target");
1447 emit_label (space_available);
1450 anti_adjust_stack (size);
1451 #ifdef SETJMP_VIA_SAVE_AREA
1452 if (setjmpless_size != NULL_RTX)
1454 rtx note_target = get_last_insn ();
1456 REG_NOTES (note_target)
1457 = gen_rtx_EXPR_LIST (REG_SAVE_AREA, setjmpless_size,
1458 REG_NOTES (note_target));
1460 #endif /* SETJMP_VIA_SAVE_AREA */
1462 #ifdef STACK_GROWS_DOWNWARD
1463 emit_move_insn (target, virtual_stack_dynamic_rtx);
1469 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1470 but we know it can't. So add ourselves and then do
1472 target = expand_binop (Pmode, add_optab, target,
1473 GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
1474 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1475 target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
1476 GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT),
1478 target = expand_mult (Pmode, target,
1479 GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT),
1483 /* Some systems require a particular insn to refer to the stack
1484 to make the pages exist. */
1487 emit_insn (gen_probe ());
1490 /* Record the new stack level for nonlocal gotos. */
1491 if (nonlocal_goto_handler_slots != 0)
1492 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
1497 /* A front end may want to override GCC's stack checking by providing a
1498 run-time routine to call to check the stack, so provide a mechanism for
1499 calling that routine. */
1501 static rtx stack_check_libfunc;
1504 set_stack_check_libfunc (libfunc)
1507 stack_check_libfunc = libfunc;
1510 /* Emit one stack probe at ADDRESS, an address within the stack. */
1513 emit_stack_probe (address)
1516 rtx memref = gen_rtx_MEM (word_mode, address);
1518 MEM_VOLATILE_P (memref) = 1;
1520 if (STACK_CHECK_PROBE_LOAD)
1521 emit_move_insn (gen_reg_rtx (word_mode), memref);
1523 emit_move_insn (memref, const0_rtx);
1526 /* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
1527 FIRST is a constant and size is a Pmode RTX. These are offsets from the
1528 current stack pointer. STACK_GROWS_DOWNWARD says whether to add or
1529 subtract from the stack. If SIZE is constant, this is done
1530 with a fixed number of probes. Otherwise, we must make a loop. */
1532 #ifdef STACK_GROWS_DOWNWARD
1533 #define STACK_GROW_OP MINUS
1535 #define STACK_GROW_OP PLUS
1539 probe_stack_range (first, size)
1540 HOST_WIDE_INT first;
1543 /* First see if the front end has set up a function for us to call to
1545 if (stack_check_libfunc != 0)
1547 rtx addr = memory_address (QImode,
1548 gen_rtx (STACK_GROW_OP, Pmode,
1550 plus_constant (size, first)));
1552 #ifdef POINTERS_EXTEND_UNSIGNED
1553 if (GET_MODE (addr) != ptr_mode)
1554 addr = convert_memory_address (ptr_mode, addr);
1557 emit_library_call (stack_check_libfunc, 0, VOIDmode, 1, addr,
1561 /* Next see if we have an insn to check the stack. Use it if so. */
1562 #ifdef HAVE_check_stack
1563 else if (HAVE_check_stack)
1565 insn_operand_predicate_fn pred;
1567 = force_operand (gen_rtx_STACK_GROW_OP (Pmode,
1569 plus_constant (size, first)),
1572 pred = insn_data[(int) CODE_FOR_check_stack].operand[0].predicate;
1573 if (pred && ! ((*pred) (last_addr, Pmode)))
1574 last_addr = copy_to_mode_reg (Pmode, last_addr);
1576 emit_insn (gen_check_stack (last_addr));
1580 /* If we have to generate explicit probes, see if we have a constant
1581 small number of them to generate. If so, that's the easy case. */
1582 else if (GET_CODE (size) == CONST_INT
1583 && INTVAL (size) < 10 * STACK_CHECK_PROBE_INTERVAL)
1585 HOST_WIDE_INT offset;
1587 /* Start probing at FIRST + N * STACK_CHECK_PROBE_INTERVAL
1588 for values of N from 1 until it exceeds LAST. If only one
1589 probe is needed, this will not generate any code. Then probe
1591 for (offset = first + STACK_CHECK_PROBE_INTERVAL;
1592 offset < INTVAL (size);
1593 offset = offset + STACK_CHECK_PROBE_INTERVAL)
1594 emit_stack_probe (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1598 emit_stack_probe (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1600 plus_constant (size, first)));
1603 /* In the variable case, do the same as above, but in a loop. We emit loop
1604 notes so that loop optimization can be done. */
1608 = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1610 GEN_INT (first + STACK_CHECK_PROBE_INTERVAL)),
1613 = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1615 plus_constant (size, first)),
1617 rtx incr = GEN_INT (STACK_CHECK_PROBE_INTERVAL);
1618 rtx loop_lab = gen_label_rtx ();
1619 rtx test_lab = gen_label_rtx ();
1620 rtx end_lab = gen_label_rtx ();
1623 if (GET_CODE (test_addr) != REG
1624 || REGNO (test_addr) < FIRST_PSEUDO_REGISTER)
1625 test_addr = force_reg (Pmode, test_addr);
1627 emit_note (NULL, NOTE_INSN_LOOP_BEG);
1628 emit_jump (test_lab);
1630 emit_label (loop_lab);
1631 emit_stack_probe (test_addr);
1633 emit_note (NULL, NOTE_INSN_LOOP_CONT);
1635 #ifdef STACK_GROWS_DOWNWARD
1636 #define CMP_OPCODE GTU
1637 temp = expand_binop (Pmode, sub_optab, test_addr, incr, test_addr,
1640 #define CMP_OPCODE LTU
1641 temp = expand_binop (Pmode, add_optab, test_addr, incr, test_addr,
1645 if (temp != test_addr)
1648 emit_label (test_lab);
1649 emit_cmp_and_jump_insns (test_addr, last_addr, CMP_OPCODE,
1650 NULL_RTX, Pmode, 1, 0, loop_lab);
1651 emit_jump (end_lab);
1652 emit_note (NULL, NOTE_INSN_LOOP_END);
1653 emit_label (end_lab);
1655 emit_stack_probe (last_addr);
1659 /* Return an rtx representing the register or memory location
1660 in which a scalar value of data type VALTYPE
1661 was returned by a function call to function FUNC.
1662 FUNC is a FUNCTION_DECL node if the precise function is known,
1664 OUTGOING is 1 if on a machine with register windows this function
1665 should return the register in which the function will put its result
1669 hard_function_value (valtype, func, outgoing)
1671 tree func ATTRIBUTE_UNUSED;
1672 int outgoing ATTRIBUTE_UNUSED;
1676 #ifdef FUNCTION_OUTGOING_VALUE
1678 val = FUNCTION_OUTGOING_VALUE (valtype, func);
1681 val = FUNCTION_VALUE (valtype, func);
1683 if (GET_CODE (val) == REG
1684 && GET_MODE (val) == BLKmode)
1686 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (valtype);
1687 enum machine_mode tmpmode;
1689 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1690 tmpmode != VOIDmode;
1691 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1693 /* Have we found a large enough mode? */
1694 if (GET_MODE_SIZE (tmpmode) >= bytes)
1698 /* No suitable mode found. */
1699 if (tmpmode == VOIDmode)
1702 PUT_MODE (val, tmpmode);
1707 /* Return an rtx representing the register or memory location
1708 in which a scalar value of mode MODE was returned by a library call. */
1711 hard_libcall_value (mode)
1712 enum machine_mode mode;
1714 return LIBCALL_VALUE (mode);
1717 /* Look up the tree code for a given rtx code
1718 to provide the arithmetic operation for REAL_ARITHMETIC.
1719 The function returns an int because the caller may not know
1720 what `enum tree_code' means. */
1723 rtx_to_tree_code (code)
1726 enum tree_code tcode;
1749 tcode = LAST_AND_UNUSED_TREE_CODE;
1752 return ((int) tcode);