1 /* Pass computing data for optimizing stdarg functions.
2 Copyright (C) 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
24 #include "coretypes.h"
28 #include "langhooks.h"
29 #include "diagnostic.h"
31 #include "tree-flow.h"
32 #include "tree-pass.h"
33 #include "tree-stdarg.h"
35 /* A simple pass that attempts to optimize stdarg functions on architectures
36 that need to save register arguments to stack on entry to stdarg functions.
37 If the function doesn't use any va_start macros, no registers need to
38 be saved. If va_start macros are used, the va_list variables don't escape
39 the function, it is only necessary to save registers that will be used
40 in va_arg macros. E.g. if va_arg is only used with integral types
41 in the function, floating point registers don't need to be saved, etc. */
44 /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
45 is executed at most as many times as VA_START_BB. */
48 reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
56 if (va_arg_bb == va_start_bb)
59 if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
62 stack = XNEWVEC (edge, n_basic_blocks + 1);
65 visited = sbitmap_alloc (last_basic_block);
66 sbitmap_zero (visited);
69 FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
80 if (e->flags & EDGE_COMPLEX)
86 if (src == va_start_bb)
89 /* va_arg_bb can be executed more times than va_start_bb. */
96 gcc_assert (src != ENTRY_BLOCK_PTR);
98 if (! TEST_BIT (visited, src->index))
100 SET_BIT (visited, src->index);
101 FOR_EACH_EDGE (e, ei, src->preds)
107 sbitmap_free (visited);
112 /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
113 return constant, otherwise return (unsigned HOST_WIDE_INT) -1.
114 GPR_P is true if this is GPR counter. */
116 static unsigned HOST_WIDE_INT
117 va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
120 tree stmt, lhs, orig_lhs;
121 unsigned HOST_WIDE_INT ret = 0, val, counter_val;
122 unsigned int max_size;
124 if (si->offsets == NULL)
128 si->offsets = XNEWVEC (int, num_ssa_names);
129 for (i = 0; i < num_ssa_names; ++i)
133 counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size;
134 max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE;
135 orig_lhs = lhs = rhs;
138 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
140 if (counter_val >= max_size)
146 ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)];
150 stmt = SSA_NAME_DEF_STMT (lhs);
152 if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT
153 || GIMPLE_STMT_OPERAND (stmt, 0) != lhs)
154 return (unsigned HOST_WIDE_INT) -1;
156 rhs = GIMPLE_STMT_OPERAND (stmt, 1);
157 if (TREE_CODE (rhs) == WITH_SIZE_EXPR)
158 rhs = TREE_OPERAND (rhs, 0);
160 if (TREE_CODE (rhs) == SSA_NAME)
166 if ((TREE_CODE (rhs) == NOP_EXPR
167 || TREE_CODE (rhs) == CONVERT_EXPR)
168 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
170 lhs = TREE_OPERAND (rhs, 0);
174 if ((TREE_CODE (rhs) == POINTER_PLUS_EXPR
175 || TREE_CODE (rhs) == PLUS_EXPR)
176 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
177 && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST
178 && host_integerp (TREE_OPERAND (rhs, 1), 1))
180 ret += tree_low_cst (TREE_OPERAND (rhs, 1), 1);
181 lhs = TREE_OPERAND (rhs, 0);
185 if (TREE_CODE (counter) != TREE_CODE (rhs))
186 return (unsigned HOST_WIDE_INT) -1;
188 if (TREE_CODE (counter) == COMPONENT_REF)
190 if (get_base_address (counter) != get_base_address (rhs)
191 || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL
192 || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1))
193 return (unsigned HOST_WIDE_INT) -1;
195 else if (counter != rhs)
196 return (unsigned HOST_WIDE_INT) -1;
202 val = ret + counter_val;
205 if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
209 si->offsets[SSA_NAME_VERSION (lhs)] = max_size;
211 si->offsets[SSA_NAME_VERSION (lhs)] = val;
213 stmt = SSA_NAME_DEF_STMT (lhs);
215 rhs = GIMPLE_STMT_OPERAND (stmt, 1);
216 if (TREE_CODE (rhs) == WITH_SIZE_EXPR)
217 rhs = TREE_OPERAND (rhs, 0);
219 if (TREE_CODE (rhs) == SSA_NAME)
225 if ((TREE_CODE (rhs) == NOP_EXPR
226 || TREE_CODE (rhs) == CONVERT_EXPR)
227 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
229 lhs = TREE_OPERAND (rhs, 0);
233 if ((TREE_CODE (rhs) == POINTER_PLUS_EXPR
234 || TREE_CODE (rhs) == PLUS_EXPR)
235 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
236 && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST
237 && host_integerp (TREE_OPERAND (rhs, 1), 1))
239 val -= tree_low_cst (TREE_OPERAND (rhs, 1), 1);
240 lhs = TREE_OPERAND (rhs, 0);
251 /* Called by walk_tree to look for references to va_list variables. */
254 find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
257 bitmap va_list_vars = (bitmap) data;
260 if (TREE_CODE (var) == SSA_NAME)
261 var = SSA_NAME_VAR (var);
263 if (TREE_CODE (var) == VAR_DECL
264 && bitmap_bit_p (va_list_vars, DECL_UID (var)))
271 /* Helper function of va_list_counter_struct_op. Compute
272 cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
273 if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
274 statement. GPR_P is true if AP is a GPR counter, false if it is
278 va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p,
281 unsigned HOST_WIDE_INT increment;
283 if (si->compute_sizes < 0)
285 si->compute_sizes = 0;
286 if (si->va_start_count == 1
287 && reachable_at_most_once (si->bb, si->va_start_bb))
288 si->compute_sizes = 1;
290 if (dump_file && (dump_flags & TDF_DETAILS))
292 "bb%d will %sbe executed at most once for each va_start "
293 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
294 si->va_start_bb->index);
299 && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1)
301 if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
303 cfun->va_list_gpr_size += increment;
307 if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE)
309 cfun->va_list_fpr_size += increment;
314 if (write_p || !si->compute_sizes)
317 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
319 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
324 /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
325 If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
326 is false, AP has been seen in VAR = AP assignment.
327 Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
328 va_arg operation that doesn't cause the va_list variable to escape
332 va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var,
337 if (TREE_CODE (ap) != COMPONENT_REF
338 || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL)
341 if (TREE_CODE (var) != SSA_NAME
342 || bitmap_bit_p (si->va_list_vars, DECL_UID (SSA_NAME_VAR (var))))
345 base = get_base_address (ap);
346 if (TREE_CODE (base) != VAR_DECL
347 || !bitmap_bit_p (si->va_list_vars, DECL_UID (base)))
350 if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field)
351 va_list_counter_op (si, ap, var, true, write_p);
352 else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field)
353 va_list_counter_op (si, ap, var, false, write_p);
359 /* Check for TEM = AP. Return true if found and the caller shouldn't
360 search for va_list references in the statement. */
363 va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem)
365 if (TREE_CODE (ap) != VAR_DECL
366 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap)))
369 if (TREE_CODE (tem) != SSA_NAME
370 || bitmap_bit_p (si->va_list_vars,
371 DECL_UID (SSA_NAME_VAR (tem)))
372 || is_global_var (SSA_NAME_VAR (tem)))
375 if (si->compute_sizes < 0)
377 si->compute_sizes = 0;
378 if (si->va_start_count == 1
379 && reachable_at_most_once (si->bb, si->va_start_bb))
380 si->compute_sizes = 1;
382 if (dump_file && (dump_flags & TDF_DETAILS))
384 "bb%d will %sbe executed at most once for each va_start "
385 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
386 si->va_start_bb->index);
389 /* For void * or char * va_list types, there is just one counter.
390 If va_arg is used in a loop, we don't know how many registers need
392 if (! si->compute_sizes)
395 if (va_list_counter_bump (si, ap, tem, true) == (unsigned HOST_WIDE_INT) -1)
398 /* Note the temporary, as we need to track whether it doesn't escape
399 the current function. */
400 bitmap_set_bit (si->va_list_escape_vars,
401 DECL_UID (SSA_NAME_VAR (tem)));
410 sequence and update cfun->va_list_gpr_size. Return true if found. */
413 va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2)
415 unsigned HOST_WIDE_INT increment;
417 if (TREE_CODE (ap) != VAR_DECL
418 || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap)))
421 if (TREE_CODE (tem2) != SSA_NAME
422 || bitmap_bit_p (si->va_list_vars, DECL_UID (SSA_NAME_VAR (tem2))))
425 if (si->compute_sizes <= 0)
428 increment = va_list_counter_bump (si, ap, tem2, true);
429 if (increment + 1 <= 1)
432 if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
433 cfun->va_list_gpr_size += increment;
435 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
441 /* If RHS is X, (some type *) X or X + CST for X a temporary variable
442 containing value of some va_list variable plus optionally some constant,
443 either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
444 depending whether LHS is a function local temporary. */
447 check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs)
449 if (! POINTER_TYPE_P (TREE_TYPE (rhs)))
452 if (((TREE_CODE (rhs) == POINTER_PLUS_EXPR
453 || TREE_CODE (rhs) == PLUS_EXPR)
454 && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST)
455 || TREE_CODE (rhs) == NOP_EXPR
456 || TREE_CODE (rhs) == CONVERT_EXPR)
457 rhs = TREE_OPERAND (rhs, 0);
459 if (TREE_CODE (rhs) != SSA_NAME
460 || ! bitmap_bit_p (si->va_list_escape_vars,
461 DECL_UID (SSA_NAME_VAR (rhs))))
464 if (TREE_CODE (lhs) != SSA_NAME || is_global_var (SSA_NAME_VAR (lhs)))
466 si->va_list_escapes = true;
470 if (si->compute_sizes < 0)
472 si->compute_sizes = 0;
473 if (si->va_start_count == 1
474 && reachable_at_most_once (si->bb, si->va_start_bb))
475 si->compute_sizes = 1;
477 if (dump_file && (dump_flags & TDF_DETAILS))
479 "bb%d will %sbe executed at most once for each va_start "
480 "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
481 si->va_start_bb->index);
484 /* For void * or char * va_list types, there is just one counter.
485 If va_arg is used in a loop, we don't know how many registers need
487 if (! si->compute_sizes)
489 si->va_list_escapes = true;
493 if (va_list_counter_bump (si, si->va_start_ap, lhs, true)
494 == (unsigned HOST_WIDE_INT) -1)
496 si->va_list_escapes = true;
500 bitmap_set_bit (si->va_list_escape_vars,
501 DECL_UID (SSA_NAME_VAR (lhs)));
505 /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
506 Return true if va_list might be escaping. */
509 check_all_va_list_escapes (struct stdarg_info *si)
515 block_stmt_iterator i;
517 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
519 tree stmt = bsi_stmt (i), use;
522 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
524 if (! bitmap_bit_p (si->va_list_escape_vars,
525 DECL_UID (SSA_NAME_VAR (use))))
528 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
530 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
531 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
533 if (TREE_CODE (rhs) == WITH_SIZE_EXPR)
534 rhs = TREE_OPERAND (rhs, 0);
537 if (TREE_CODE (rhs) == INDIRECT_REF
538 && TREE_OPERAND (rhs, 0) == use
539 && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
540 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (rhs)), 1)
541 && si->offsets[SSA_NAME_VERSION (use)] != -1)
543 unsigned HOST_WIDE_INT gpr_size;
544 tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs));
546 gpr_size = si->offsets[SSA_NAME_VERSION (use)]
547 + tree_low_cst (access_size, 1);
548 if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
549 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
550 else if (gpr_size > cfun->va_list_gpr_size)
551 cfun->va_list_gpr_size = gpr_size;
555 /* va_arg sequences may contain
556 other_ap_temp = ap_temp;
557 other_ap_temp = ap_temp + constant;
558 other_ap_temp = (some_type *) ap_temp;
561 if ((TREE_CODE (rhs) == POINTER_PLUS_EXPR
562 && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST)
563 || TREE_CODE (rhs) == NOP_EXPR
564 || TREE_CODE (rhs) == CONVERT_EXPR)
565 rhs = TREE_OPERAND (rhs, 0);
569 if (TREE_CODE (lhs) == SSA_NAME
570 && bitmap_bit_p (si->va_list_escape_vars,
571 DECL_UID (SSA_NAME_VAR (lhs))))
574 if (TREE_CODE (lhs) == VAR_DECL
575 && bitmap_bit_p (si->va_list_vars,
581 if (dump_file && (dump_flags & TDF_DETAILS))
583 fputs ("va_list escapes in ", dump_file);
584 print_generic_expr (dump_file, stmt, dump_flags);
585 fputc ('\n', dump_file);
596 /* Return true if this optimization pass should be done.
597 It makes only sense for stdarg functions. */
600 gate_optimize_stdarg (void)
602 /* This optimization is only for stdarg functions. */
603 return current_function_stdarg != 0;
607 /* Entry point to the stdarg optimization pass. */
610 execute_optimize_stdarg (void)
613 bool va_list_escapes = false;
614 bool va_list_simple_ptr;
615 struct stdarg_info si;
616 const char *funcname = NULL;
618 cfun->va_list_gpr_size = 0;
619 cfun->va_list_fpr_size = 0;
620 memset (&si, 0, sizeof (si));
621 si.va_list_vars = BITMAP_ALLOC (NULL);
622 si.va_list_escape_vars = BITMAP_ALLOC (NULL);
625 funcname = lang_hooks.decl_printable_name (current_function_decl, 2);
627 va_list_simple_ptr = POINTER_TYPE_P (va_list_type_node)
628 && (TREE_TYPE (va_list_type_node) == void_type_node
629 || TREE_TYPE (va_list_type_node) == char_type_node);
630 gcc_assert (is_gimple_reg_type (va_list_type_node) == va_list_simple_ptr);
634 block_stmt_iterator i;
636 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
638 tree stmt = bsi_stmt (i);
639 tree call = get_call_expr_in (stmt), callee;
645 callee = get_callee_fndecl (call);
647 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
650 switch (DECL_FUNCTION_CODE (callee))
652 case BUILT_IN_VA_START:
654 /* If old style builtins are used, don't optimize anything. */
655 case BUILT_IN_SAVEREGS:
656 case BUILT_IN_STDARG_START:
657 case BUILT_IN_ARGS_INFO:
658 case BUILT_IN_NEXT_ARG:
659 va_list_escapes = true;
666 ap = CALL_EXPR_ARG (call, 0);
668 if (TREE_CODE (ap) != ADDR_EXPR)
670 va_list_escapes = true;
673 ap = TREE_OPERAND (ap, 0);
674 if (TREE_CODE (ap) == ARRAY_REF)
676 if (! integer_zerop (TREE_OPERAND (ap, 1)))
678 va_list_escapes = true;
681 ap = TREE_OPERAND (ap, 0);
683 if (TYPE_MAIN_VARIANT (TREE_TYPE (ap))
684 != TYPE_MAIN_VARIANT (va_list_type_node)
685 || TREE_CODE (ap) != VAR_DECL)
687 va_list_escapes = true;
691 if (is_global_var (ap))
693 va_list_escapes = true;
697 bitmap_set_bit (si.va_list_vars, DECL_UID (ap));
699 /* VA_START_BB and VA_START_AP will be only used if there is just
700 one va_start in the function. */
709 /* If there were no va_start uses in the function, there is no need to
711 if (si.va_start_count == 0)
714 /* If some va_list arguments weren't local, we can't optimize. */
718 /* For void * or char * va_list, something useful can be done only
719 if there is just one va_start. */
720 if (va_list_simple_ptr && si.va_start_count > 1)
722 va_list_escapes = true;
726 /* For struct * va_list, if the backend didn't tell us what the counter fields
727 are, there is nothing more we can do. */
728 if (!va_list_simple_ptr
729 && va_list_gpr_counter_field == NULL_TREE
730 && va_list_fpr_counter_field == NULL_TREE)
732 va_list_escapes = true;
736 /* For void * or char * va_list there is just one counter
737 (va_list itself). Use VA_LIST_GPR_SIZE for it. */
738 if (va_list_simple_ptr)
739 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
741 calculate_dominance_info (CDI_DOMINATORS);
745 block_stmt_iterator i;
747 si.compute_sizes = -1;
750 /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
751 them as assignments for the purpose of escape analysis. This is
752 not needed for non-simple va_list because virtual phis don't perform
753 any real data movement. */
754 if (va_list_simple_ptr)
760 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
762 lhs = PHI_RESULT (phi);
764 if (!is_gimple_reg (lhs))
767 FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
769 rhs = USE_FROM_PTR (uop);
770 if (va_list_ptr_read (&si, rhs, lhs))
772 else if (va_list_ptr_write (&si, lhs, rhs))
775 check_va_list_escapes (&si, lhs, rhs);
777 if (si.va_list_escapes
778 || walk_tree (&phi, find_va_list_reference,
779 si.va_list_vars, NULL))
781 if (dump_file && (dump_flags & TDF_DETAILS))
783 fputs ("va_list escapes in ", dump_file);
784 print_generic_expr (dump_file, phi, dump_flags);
785 fputc ('\n', dump_file);
787 va_list_escapes = true;
793 for (i = bsi_start (bb);
794 !bsi_end_p (i) && !va_list_escapes;
797 tree stmt = bsi_stmt (i);
800 /* Don't look at __builtin_va_{start,end}, they are ok. */
801 call = get_call_expr_in (stmt);
804 tree callee = get_callee_fndecl (call);
807 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
808 && (DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_START
809 || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END))
813 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
815 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
816 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
818 if (TREE_CODE (rhs) == WITH_SIZE_EXPR)
819 rhs = TREE_OPERAND (rhs, 0);
821 if (va_list_simple_ptr)
823 /* Check for tem = ap. */
824 if (va_list_ptr_read (&si, rhs, lhs))
827 /* Check for the last insn in:
832 else if (va_list_ptr_write (&si, lhs, rhs))
836 check_va_list_escapes (&si, lhs, rhs);
840 /* Check for ap[0].field = temp. */
841 if (va_list_counter_struct_op (&si, lhs, rhs, true))
844 /* Check for temp = ap[0].field. */
845 else if (va_list_counter_struct_op (&si, rhs, lhs, false))
848 /* Do any architecture specific checking. */
849 else if (targetm.stdarg_optimize_hook
850 && targetm.stdarg_optimize_hook (&si, lhs, rhs))
855 /* All other uses of va_list are either va_copy (that is not handled
856 in this optimization), taking address of va_list variable or
857 passing va_list to other functions (in that case va_list might
858 escape the function and therefore va_start needs to set it up
859 fully), or some unexpected use of va_list. None of these should
860 happen in a gimplified VA_ARG_EXPR. */
861 if (si.va_list_escapes
862 || walk_tree (&stmt, find_va_list_reference,
863 si.va_list_vars, NULL))
865 if (dump_file && (dump_flags & TDF_DETAILS))
867 fputs ("va_list escapes in ", dump_file);
868 print_generic_expr (dump_file, stmt, dump_flags);
869 fputc ('\n', dump_file);
871 va_list_escapes = true;
879 if (! va_list_escapes
880 && va_list_simple_ptr
881 && ! bitmap_empty_p (si.va_list_escape_vars)
882 && check_all_va_list_escapes (&si))
883 va_list_escapes = true;
888 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
889 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
891 BITMAP_FREE (si.va_list_vars);
892 BITMAP_FREE (si.va_list_escape_vars);
896 fprintf (dump_file, "%s: va_list escapes %d, needs to save ",
897 funcname, (int) va_list_escapes);
898 if (cfun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE)
899 fputs ("all", dump_file);
901 fprintf (dump_file, "%d", cfun->va_list_gpr_size);
902 fputs (" GPR units and ", dump_file);
903 if (cfun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE)
904 fputs ("all", dump_file);
906 fprintf (dump_file, "%d", cfun->va_list_fpr_size);
907 fputs (" FPR units.\n", dump_file);
913 struct tree_opt_pass pass_stdarg =
916 gate_optimize_stdarg, /* gate */
917 execute_optimize_stdarg, /* execute */
920 0, /* static_pass_number */
922 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
923 0, /* properties_provided */
924 0, /* properties_destroyed */
925 0, /* todo_flags_start */
926 TODO_dump_func, /* todo_flags_finish */