1 /* __builtin_object_size (ptr, object_size_type) computation
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jakub Jelinek <jakub@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
27 #include "diagnostic-core.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-ssa-propagate.h"
34 struct object_size_info
37 bitmap visited, reexamine;
41 unsigned int *stack, *tos;
44 static unsigned HOST_WIDE_INT unknown[4] = { -1, -1, 0, 0 };
46 static tree compute_object_offset (const_tree, const_tree);
47 static unsigned HOST_WIDE_INT addr_object_size (struct object_size_info *,
49 static unsigned HOST_WIDE_INT alloc_object_size (const_gimple, int);
50 static tree pass_through_call (const_gimple);
51 static void collect_object_sizes_for (struct object_size_info *, tree);
52 static void expr_object_size (struct object_size_info *, tree, tree);
53 static bool merge_object_sizes (struct object_size_info *, tree, tree,
54 unsigned HOST_WIDE_INT);
55 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple);
56 static bool cond_expr_object_size (struct object_size_info *, tree, tree);
57 static unsigned int compute_object_sizes (void);
58 static void init_offset_limit (void);
59 static void check_for_plus_in_loops (struct object_size_info *, tree);
60 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
63 /* object_sizes[0] is upper bound for number of bytes till the end of
65 object_sizes[1] is upper bound for number of bytes till the end of
66 the subobject (innermost array or field with address taken).
67 object_sizes[2] is lower bound for number of bytes till the end of
68 the object and object_sizes[3] lower bound for subobject. */
69 static unsigned HOST_WIDE_INT *object_sizes[4];
71 /* Bitmaps what object sizes have been computed already. */
72 static bitmap computed[4];
74 /* Maximum value of offset we consider to be addition. */
75 static unsigned HOST_WIDE_INT offset_limit;
78 /* Initialize OFFSET_LIMIT variable. */
80 init_offset_limit (void)
82 if (host_integerp (TYPE_MAX_VALUE (sizetype), 1))
83 offset_limit = tree_low_cst (TYPE_MAX_VALUE (sizetype), 1);
90 /* Compute offset of EXPR within VAR. Return error_mark_node
94 compute_object_offset (const_tree expr, const_tree var)
96 enum tree_code code = PLUS_EXPR;
100 return size_zero_node;
102 switch (TREE_CODE (expr))
105 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
106 if (base == error_mark_node)
109 t = TREE_OPERAND (expr, 1);
110 off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
111 size_int (tree_low_cst (DECL_FIELD_BIT_OFFSET (t), 1)
117 case VIEW_CONVERT_EXPR:
118 case NON_LVALUE_EXPR:
119 return compute_object_offset (TREE_OPERAND (expr, 0), var);
122 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
123 if (base == error_mark_node)
126 off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
130 base = compute_object_offset (TREE_OPERAND (expr, 0), var);
131 if (base == error_mark_node)
134 t = TREE_OPERAND (expr, 1);
135 if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
138 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
140 t = fold_convert (sizetype, t);
141 off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
145 gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
146 return double_int_to_tree (sizetype, mem_ref_offset (expr));
149 return error_mark_node;
152 return size_binop (code, base, off);
156 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
157 OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
158 If unknown, return unknown[object_size_type]. */
160 static unsigned HOST_WIDE_INT
161 addr_object_size (struct object_size_info *osi, const_tree ptr,
162 int object_size_type)
164 tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
166 gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
168 pt_var = TREE_OPERAND (ptr, 0);
169 while (handled_component_p (pt_var))
170 pt_var = TREE_OPERAND (pt_var, 0);
173 && TREE_CODE (pt_var) == MEM_REF)
175 unsigned HOST_WIDE_INT sz;
177 if (!osi || (object_size_type & 1) != 0
178 || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
180 sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
181 object_size_type & ~1);
185 tree var = TREE_OPERAND (pt_var, 0);
187 collect_object_sizes_for (osi, var);
188 if (bitmap_bit_p (computed[object_size_type],
189 SSA_NAME_VERSION (var)))
190 sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
192 sz = unknown[object_size_type];
194 if (sz != unknown[object_size_type])
196 double_int dsz = double_int_sub (uhwi_to_double_int (sz),
197 mem_ref_offset (pt_var));
198 if (double_int_negative_p (dsz))
200 else if (double_int_fits_in_uhwi_p (dsz))
201 sz = double_int_to_uhwi (dsz);
203 sz = unknown[object_size_type];
206 if (sz != unknown[object_size_type] && sz < offset_limit)
207 pt_var_size = size_int (sz);
211 && host_integerp (DECL_SIZE_UNIT (pt_var), 1)
212 && (unsigned HOST_WIDE_INT)
213 tree_low_cst (DECL_SIZE_UNIT (pt_var), 1) < offset_limit)
214 pt_var_size = DECL_SIZE_UNIT (pt_var);
216 && TREE_CODE (pt_var) == STRING_CST
217 && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
218 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
219 && (unsigned HOST_WIDE_INT)
220 tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
222 pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
224 return unknown[object_size_type];
226 if (pt_var != TREE_OPERAND (ptr, 0))
230 if (object_size_type & 1)
232 var = TREE_OPERAND (ptr, 0);
235 && TREE_CODE (var) != BIT_FIELD_REF
236 && TREE_CODE (var) != COMPONENT_REF
237 && TREE_CODE (var) != ARRAY_REF
238 && TREE_CODE (var) != ARRAY_RANGE_REF
239 && TREE_CODE (var) != REALPART_EXPR
240 && TREE_CODE (var) != IMAGPART_EXPR)
241 var = TREE_OPERAND (var, 0);
242 if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
243 var = TREE_OPERAND (var, 0);
244 if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
245 || ! host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (var)), 1)
247 && tree_int_cst_lt (pt_var_size,
248 TYPE_SIZE_UNIT (TREE_TYPE (var)))))
250 else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
253 /* For &X->fld, compute object size only if fld isn't the last
254 field, as struct { int i; char c[1]; } is often used instead
255 of flexible array member. */
256 while (v && v != pt_var)
257 switch (TREE_CODE (v))
260 if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
261 && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
264 = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
266 && TYPE_MAX_VALUE (domain)
267 && TREE_CODE (TYPE_MAX_VALUE (domain))
269 && tree_int_cst_lt (TREE_OPERAND (v, 1),
270 TYPE_MAX_VALUE (domain)))
276 v = TREE_OPERAND (v, 0);
283 if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
288 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
289 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
291 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
295 v = TREE_OPERAND (v, 0);
296 if (TREE_CODE (v) == COMPONENT_REF
297 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
300 tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
301 for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
302 if (TREE_CODE (fld_chain) == FIELD_DECL)
310 v = TREE_OPERAND (v, 0);
312 while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
313 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
315 && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
319 v = TREE_OPERAND (v, 0);
337 var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
338 else if (!pt_var_size)
339 return unknown[object_size_type];
341 var_size = pt_var_size;
342 bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
343 if (bytes != error_mark_node)
345 if (TREE_CODE (bytes) == INTEGER_CST
346 && tree_int_cst_lt (var_size, bytes))
347 bytes = size_zero_node;
349 bytes = size_binop (MINUS_EXPR, var_size, bytes);
353 && TREE_CODE (pt_var) == MEM_REF
354 && bytes != error_mark_node)
356 tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
357 if (bytes2 != error_mark_node)
359 if (TREE_CODE (bytes2) == INTEGER_CST
360 && tree_int_cst_lt (pt_var_size, bytes2))
361 bytes2 = size_zero_node;
363 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
364 bytes = size_binop (MIN_EXPR, bytes, bytes2);
368 else if (!pt_var_size)
369 return unknown[object_size_type];
373 if (host_integerp (bytes, 1))
374 return tree_low_cst (bytes, 1);
376 return unknown[object_size_type];
380 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
381 Handles various allocation calls. OBJECT_SIZE_TYPE is the second
382 argument from __builtin_object_size. If unknown, return
383 unknown[object_size_type]. */
385 static unsigned HOST_WIDE_INT
386 alloc_object_size (const_gimple call, int object_size_type)
388 tree callee, bytes = NULL_TREE;
390 int arg1 = -1, arg2 = -1;
392 gcc_assert (is_gimple_call (call));
394 callee = gimple_call_fndecl (call);
396 return unknown[object_size_type];
398 alloc_size = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (TREE_TYPE(callee)));
399 if (alloc_size && TREE_VALUE (alloc_size))
401 tree p = TREE_VALUE (alloc_size);
403 arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
405 arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
408 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
409 switch (DECL_FUNCTION_CODE (callee))
411 case BUILT_IN_CALLOC:
414 case BUILT_IN_MALLOC:
415 case BUILT_IN_ALLOCA:
421 if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
422 || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
424 && (arg2 >= (int)gimple_call_num_args (call)
425 || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
426 return unknown[object_size_type];
429 bytes = size_binop (MULT_EXPR,
430 fold_convert (sizetype, gimple_call_arg (call, arg1)),
431 fold_convert (sizetype, gimple_call_arg (call, arg2)));
433 bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
435 if (bytes && host_integerp (bytes, 1))
436 return tree_low_cst (bytes, 1);
438 return unknown[object_size_type];
442 /* If object size is propagated from one of function's arguments directly
443 to its return value, return that argument for GIMPLE_CALL statement CALL.
444 Otherwise return NULL. */
447 pass_through_call (const_gimple call)
449 tree callee = gimple_call_fndecl (call);
452 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
453 switch (DECL_FUNCTION_CODE (callee))
455 case BUILT_IN_MEMCPY:
456 case BUILT_IN_MEMMOVE:
457 case BUILT_IN_MEMSET:
458 case BUILT_IN_STRCPY:
459 case BUILT_IN_STRNCPY:
460 case BUILT_IN_STRCAT:
461 case BUILT_IN_STRNCAT:
462 case BUILT_IN_MEMCPY_CHK:
463 case BUILT_IN_MEMMOVE_CHK:
464 case BUILT_IN_MEMSET_CHK:
465 case BUILT_IN_STRCPY_CHK:
466 case BUILT_IN_STRNCPY_CHK:
467 case BUILT_IN_STRCAT_CHK:
468 case BUILT_IN_STRNCAT_CHK:
469 if (gimple_call_num_args (call) >= 1)
470 return gimple_call_arg (call, 0);
480 /* Compute __builtin_object_size value for PTR. OBJECT_SIZE_TYPE is the
481 second argument from __builtin_object_size. */
483 unsigned HOST_WIDE_INT
484 compute_builtin_object_size (tree ptr, int object_size_type)
486 gcc_assert (object_size_type >= 0 && object_size_type <= 3);
489 init_offset_limit ();
491 if (TREE_CODE (ptr) == ADDR_EXPR)
492 return addr_object_size (NULL, ptr, object_size_type);
494 if (TREE_CODE (ptr) == SSA_NAME
495 && POINTER_TYPE_P (TREE_TYPE (ptr))
496 && object_sizes[object_size_type] != NULL)
498 if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
500 struct object_size_info osi;
506 fprintf (dump_file, "Computing %s %sobject size for ",
507 (object_size_type & 2) ? "minimum" : "maximum",
508 (object_size_type & 1) ? "sub" : "");
509 print_generic_expr (dump_file, ptr, dump_flags);
510 fprintf (dump_file, ":\n");
513 osi.visited = BITMAP_ALLOC (NULL);
514 osi.reexamine = BITMAP_ALLOC (NULL);
515 osi.object_size_type = object_size_type;
520 /* First pass: walk UD chains, compute object sizes that
521 can be computed. osi.reexamine bitmap at the end will
522 contain what variables were found in dependency cycles
523 and therefore need to be reexamined. */
526 collect_object_sizes_for (&osi, ptr);
528 /* Second pass: keep recomputing object sizes of variables
529 that need reexamination, until no object sizes are
530 increased or all object sizes are computed. */
531 if (! bitmap_empty_p (osi.reexamine))
533 bitmap reexamine = BITMAP_ALLOC (NULL);
535 /* If looking for minimum instead of maximum object size,
536 detect cases where a pointer is increased in a loop.
537 Although even without this detection pass 2 would eventually
538 terminate, it could take a long time. If a pointer is
539 increasing this way, we need to assume 0 object size.
540 E.g. p = &buf[0]; while (cond) p = p + 4; */
541 if (object_size_type & 2)
543 osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
544 osi.stack = XNEWVEC (unsigned int, num_ssa_names);
547 /* collect_object_sizes_for is changing
548 osi.reexamine bitmap, so iterate over a copy. */
549 bitmap_copy (reexamine, osi.reexamine);
550 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
551 if (bitmap_bit_p (osi.reexamine, i))
552 check_for_plus_in_loops (&osi, ssa_name (i));
565 /* collect_object_sizes_for is changing
566 osi.reexamine bitmap, so iterate over a copy. */
567 bitmap_copy (reexamine, osi.reexamine);
568 EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
569 if (bitmap_bit_p (osi.reexamine, i))
571 collect_object_sizes_for (&osi, ssa_name (i));
572 if (dump_file && (dump_flags & TDF_DETAILS))
574 fprintf (dump_file, "Reexamining ");
575 print_generic_expr (dump_file, ssa_name (i),
577 fprintf (dump_file, "\n");
583 BITMAP_FREE (reexamine);
585 EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
586 bitmap_set_bit (computed[object_size_type], i);
588 /* Debugging dumps. */
591 EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
592 if (object_sizes[object_size_type][i]
593 != unknown[object_size_type])
595 print_generic_expr (dump_file, ssa_name (i),
598 ": %s %sobject size "
599 HOST_WIDE_INT_PRINT_UNSIGNED "\n",
600 (object_size_type & 2) ? "minimum" : "maximum",
601 (object_size_type & 1) ? "sub" : "",
602 object_sizes[object_size_type][i]);
606 BITMAP_FREE (osi.reexamine);
607 BITMAP_FREE (osi.visited);
610 return object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
613 return unknown[object_size_type];
616 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME. */
619 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
621 int object_size_type = osi->object_size_type;
622 unsigned int varno = SSA_NAME_VERSION (ptr);
623 unsigned HOST_WIDE_INT bytes;
625 gcc_assert (object_sizes[object_size_type][varno]
626 != unknown[object_size_type]);
627 gcc_assert (osi->pass == 0);
629 if (TREE_CODE (value) == WITH_SIZE_EXPR)
630 value = TREE_OPERAND (value, 0);
632 /* Pointer variables should have been handled by merge_object_sizes. */
633 gcc_assert (TREE_CODE (value) != SSA_NAME
634 || !POINTER_TYPE_P (TREE_TYPE (value)));
636 if (TREE_CODE (value) == ADDR_EXPR)
637 bytes = addr_object_size (osi, value, object_size_type);
639 bytes = unknown[object_size_type];
641 if ((object_size_type & 2) == 0)
643 if (object_sizes[object_size_type][varno] < bytes)
644 object_sizes[object_size_type][varno] = bytes;
648 if (object_sizes[object_size_type][varno] > bytes)
649 object_sizes[object_size_type][varno] = bytes;
654 /* Compute object_sizes for PTR, defined to the result of a call. */
657 call_object_size (struct object_size_info *osi, tree ptr, gimple call)
659 int object_size_type = osi->object_size_type;
660 unsigned int varno = SSA_NAME_VERSION (ptr);
661 unsigned HOST_WIDE_INT bytes;
663 gcc_assert (is_gimple_call (call));
665 gcc_assert (object_sizes[object_size_type][varno]
666 != unknown[object_size_type]);
667 gcc_assert (osi->pass == 0);
669 bytes = alloc_object_size (call, object_size_type);
671 if ((object_size_type & 2) == 0)
673 if (object_sizes[object_size_type][varno] < bytes)
674 object_sizes[object_size_type][varno] = bytes;
678 if (object_sizes[object_size_type][varno] > bytes)
679 object_sizes[object_size_type][varno] = bytes;
684 /* Compute object_sizes for PTR, defined to an unknown value. */
687 unknown_object_size (struct object_size_info *osi, tree ptr)
689 int object_size_type = osi->object_size_type;
690 unsigned int varno = SSA_NAME_VERSION (ptr);
691 unsigned HOST_WIDE_INT bytes;
693 gcc_assert (object_sizes[object_size_type][varno]
694 != unknown[object_size_type]);
695 gcc_assert (osi->pass == 0);
697 bytes = unknown[object_size_type];
699 if ((object_size_type & 2) == 0)
701 if (object_sizes[object_size_type][varno] < bytes)
702 object_sizes[object_size_type][varno] = bytes;
706 if (object_sizes[object_size_type][varno] > bytes)
707 object_sizes[object_size_type][varno] = bytes;
712 /* Merge object sizes of ORIG + OFFSET into DEST. Return true if
713 the object size might need reexamination later. */
716 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
717 unsigned HOST_WIDE_INT offset)
719 int object_size_type = osi->object_size_type;
720 unsigned int varno = SSA_NAME_VERSION (dest);
721 unsigned HOST_WIDE_INT orig_bytes;
723 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
725 if (offset >= offset_limit)
727 object_sizes[object_size_type][varno] = unknown[object_size_type];
732 collect_object_sizes_for (osi, orig);
734 orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
735 if (orig_bytes != unknown[object_size_type])
736 orig_bytes = (offset > orig_bytes)
737 ? (unsigned HOST_WIDE_INT) 0 : orig_bytes - offset;
739 if ((object_size_type & 2) == 0)
741 if (object_sizes[object_size_type][varno] < orig_bytes)
743 object_sizes[object_size_type][varno] = orig_bytes;
749 if (object_sizes[object_size_type][varno] > orig_bytes)
751 object_sizes[object_size_type][varno] = orig_bytes;
755 return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
759 /* Compute object_sizes for VAR, defined to the result of an assignment
760 with operator POINTER_PLUS_EXPR. Return true if the object size might
761 need reexamination later. */
764 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple stmt)
766 int object_size_type = osi->object_size_type;
767 unsigned int varno = SSA_NAME_VERSION (var);
768 unsigned HOST_WIDE_INT bytes;
771 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
773 op0 = gimple_assign_rhs1 (stmt);
774 op1 = gimple_assign_rhs2 (stmt);
776 else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
778 tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
779 gcc_assert (TREE_CODE (rhs) == MEM_REF);
780 op0 = TREE_OPERAND (rhs, 0);
781 op1 = TREE_OPERAND (rhs, 1);
786 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
789 /* Handle PTR + OFFSET here. */
790 if (TREE_CODE (op1) == INTEGER_CST
791 && (TREE_CODE (op0) == SSA_NAME
792 || TREE_CODE (op0) == ADDR_EXPR))
794 if (! host_integerp (op1, 1))
795 bytes = unknown[object_size_type];
796 else if (TREE_CODE (op0) == SSA_NAME)
797 return merge_object_sizes (osi, var, op0, tree_low_cst (op1, 1));
800 unsigned HOST_WIDE_INT off = tree_low_cst (op1, 1);
802 /* op0 will be ADDR_EXPR here. */
803 bytes = addr_object_size (osi, op0, object_size_type);
804 if (bytes == unknown[object_size_type])
806 else if (off > offset_limit)
807 bytes = unknown[object_size_type];
808 else if (off > bytes)
815 bytes = unknown[object_size_type];
817 if ((object_size_type & 2) == 0)
819 if (object_sizes[object_size_type][varno] < bytes)
820 object_sizes[object_size_type][varno] = bytes;
824 if (object_sizes[object_size_type][varno] > bytes)
825 object_sizes[object_size_type][varno] = bytes;
831 /* Compute object_sizes for VAR, defined to VALUE, which is
832 a COND_EXPR. Return true if the object size might need reexamination
836 cond_expr_object_size (struct object_size_info *osi, tree var, tree value)
839 int object_size_type = osi->object_size_type;
840 unsigned int varno = SSA_NAME_VERSION (var);
841 bool reexamine = false;
843 gcc_assert (TREE_CODE (value) == COND_EXPR);
845 if (object_sizes[object_size_type][varno] == unknown[object_size_type])
848 then_ = COND_EXPR_THEN (value);
849 else_ = COND_EXPR_ELSE (value);
851 if (TREE_CODE (then_) == SSA_NAME)
852 reexamine |= merge_object_sizes (osi, var, then_, 0);
854 expr_object_size (osi, var, then_);
856 if (TREE_CODE (else_) == SSA_NAME)
857 reexamine |= merge_object_sizes (osi, var, else_, 0);
859 expr_object_size (osi, var, else_);
864 /* Compute object sizes for VAR.
865 For ADDR_EXPR an object size is the number of remaining bytes
866 to the end of the object (where what is considered an object depends on
867 OSI->object_size_type).
868 For allocation GIMPLE_CALL like malloc or calloc object size is the size
870 For POINTER_PLUS_EXPR where second operand is a constant integer,
871 object size is object size of the first operand minus the constant.
872 If the constant is bigger than the number of remaining bytes until the
873 end of the object, object size is 0, but if it is instead a pointer
874 subtraction, object size is unknown[object_size_type].
875 To differentiate addition from subtraction, ADDR_EXPR returns
876 unknown[object_size_type] for all objects bigger than half of the address
877 space, and constants less than half of the address space are considered
878 addition, while bigger constants subtraction.
879 For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
880 object size is object size of that argument.
881 Otherwise, object size is the maximum of object sizes of variables
882 that it might be set to. */
885 collect_object_sizes_for (struct object_size_info *osi, tree var)
887 int object_size_type = osi->object_size_type;
888 unsigned int varno = SSA_NAME_VERSION (var);
892 if (bitmap_bit_p (computed[object_size_type], varno))
897 if (bitmap_set_bit (osi->visited, varno))
899 object_sizes[object_size_type][varno]
900 = (object_size_type & 2) ? -1 : 0;
904 /* Found a dependency loop. Mark the variable for later
906 bitmap_set_bit (osi->reexamine, varno);
907 if (dump_file && (dump_flags & TDF_DETAILS))
909 fprintf (dump_file, "Found a dependency loop at ");
910 print_generic_expr (dump_file, var, dump_flags);
911 fprintf (dump_file, "\n");
917 if (dump_file && (dump_flags & TDF_DETAILS))
919 fprintf (dump_file, "Visiting use-def links for ");
920 print_generic_expr (dump_file, var, dump_flags);
921 fprintf (dump_file, "\n");
924 stmt = SSA_NAME_DEF_STMT (var);
927 switch (gimple_code (stmt))
931 tree rhs = gimple_assign_rhs1 (stmt);
932 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
933 || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
934 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
935 reexamine = plus_stmt_object_size (osi, var, stmt);
936 else if (gimple_assign_single_p (stmt)
937 || gimple_assign_unary_nop_p (stmt))
939 if (TREE_CODE (rhs) == SSA_NAME
940 && POINTER_TYPE_P (TREE_TYPE (rhs)))
941 reexamine = merge_object_sizes (osi, var, rhs, 0);
942 else if (TREE_CODE (rhs) == COND_EXPR)
943 reexamine = cond_expr_object_size (osi, var, rhs);
945 expr_object_size (osi, var, rhs);
948 unknown_object_size (osi, var);
954 tree arg = pass_through_call (stmt);
957 if (TREE_CODE (arg) == SSA_NAME
958 && POINTER_TYPE_P (TREE_TYPE (arg)))
959 reexamine = merge_object_sizes (osi, var, arg, 0);
960 else if (TREE_CODE (arg) == COND_EXPR)
961 reexamine = cond_expr_object_size (osi, var, arg);
963 expr_object_size (osi, var, arg);
966 call_object_size (osi, var, stmt);
971 /* Pointers defined by __asm__ statements can point anywhere. */
972 object_sizes[object_size_type][varno] = unknown[object_size_type];
977 tree decl = SSA_NAME_VAR (var);
979 if (TREE_CODE (decl) != PARM_DECL && DECL_INITIAL (decl))
980 expr_object_size (osi, var, DECL_INITIAL (decl));
982 expr_object_size (osi, var, decl);
990 for (i = 0; i < gimple_phi_num_args (stmt); i++)
992 tree rhs = gimple_phi_arg (stmt, i)->def;
994 if (object_sizes[object_size_type][varno]
995 == unknown[object_size_type])
998 if (TREE_CODE (rhs) == SSA_NAME)
999 reexamine |= merge_object_sizes (osi, var, rhs, 0);
1000 else if (osi->pass == 0)
1001 expr_object_size (osi, var, rhs);
1011 || object_sizes[object_size_type][varno] == unknown[object_size_type])
1013 bitmap_set_bit (computed[object_size_type], varno);
1014 bitmap_clear_bit (osi->reexamine, varno);
1018 bitmap_set_bit (osi->reexamine, varno);
1019 if (dump_file && (dump_flags & TDF_DETAILS))
1021 fprintf (dump_file, "Need to reexamine ");
1022 print_generic_expr (dump_file, var, dump_flags);
1023 fprintf (dump_file, "\n");
1029 /* Helper function for check_for_plus_in_loops. Called recursively
1033 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1036 gimple stmt = SSA_NAME_DEF_STMT (var);
1037 unsigned int varno = SSA_NAME_VERSION (var);
1039 if (osi->depths[varno])
1041 if (osi->depths[varno] != depth)
1045 /* Found a loop involving pointer addition. */
1046 for (sp = osi->tos; sp > osi->stack; )
1049 bitmap_clear_bit (osi->reexamine, *sp);
1050 bitmap_set_bit (computed[osi->object_size_type], *sp);
1051 object_sizes[osi->object_size_type][*sp] = 0;
1058 else if (! bitmap_bit_p (osi->reexamine, varno))
1061 osi->depths[varno] = depth;
1062 *osi->tos++ = varno;
1064 switch (gimple_code (stmt))
1069 if ((gimple_assign_single_p (stmt)
1070 || gimple_assign_unary_nop_p (stmt))
1071 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1073 tree rhs = gimple_assign_rhs1 (stmt);
1075 check_for_plus_in_loops_1 (osi, rhs, depth);
1077 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1079 tree basevar = gimple_assign_rhs1 (stmt);
1080 tree cst = gimple_assign_rhs2 (stmt);
1082 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1084 check_for_plus_in_loops_1 (osi, basevar,
1085 depth + !integer_zerop (cst));
1094 tree arg = pass_through_call (stmt);
1097 if (TREE_CODE (arg) == SSA_NAME)
1098 check_for_plus_in_loops_1 (osi, arg, depth);
1109 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1111 tree rhs = gimple_phi_arg (stmt, i)->def;
1113 if (TREE_CODE (rhs) == SSA_NAME)
1114 check_for_plus_in_loops_1 (osi, rhs, depth);
1123 osi->depths[varno] = 0;
1128 /* Check if some pointer we are computing object size of is being increased
1129 within a loop. If yes, assume all the SSA variables participating in
1130 that loop have minimum object sizes 0. */
1133 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1135 gimple stmt = SSA_NAME_DEF_STMT (var);
1137 /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1138 and looked for a POINTER_PLUS_EXPR in the pass-through
1139 argument, if any. In GIMPLE, however, such an expression
1140 is not a valid call operand. */
1142 if (is_gimple_assign (stmt)
1143 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1145 tree basevar = gimple_assign_rhs1 (stmt);
1146 tree cst = gimple_assign_rhs2 (stmt);
1148 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1150 if (integer_zerop (cst))
1153 osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1154 *osi->tos++ = SSA_NAME_VERSION (basevar);
1155 check_for_plus_in_loops_1 (osi, var, 2);
1156 osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1162 /* Initialize data structures for the object size computation. */
1165 init_object_sizes (void)
1167 int object_size_type;
1169 if (object_sizes[0])
1172 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1174 object_sizes[object_size_type] = XNEWVEC (unsigned HOST_WIDE_INT, num_ssa_names);
1175 computed[object_size_type] = BITMAP_ALLOC (NULL);
1178 init_offset_limit ();
1182 /* Destroy data structures after the object size computation. */
1185 fini_object_sizes (void)
1187 int object_size_type;
1189 for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1191 free (object_sizes[object_size_type]);
1192 BITMAP_FREE (computed[object_size_type]);
1193 object_sizes[object_size_type] = NULL;
1198 /* Simple pass to optimize all __builtin_object_size () builtins. */
1201 compute_object_sizes (void)
1206 gimple_stmt_iterator i;
1207 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1209 tree callee, result;
1210 gimple call = gsi_stmt (i);
1212 if (gimple_code (call) != GIMPLE_CALL)
1215 callee = gimple_call_fndecl (call);
1217 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
1218 || DECL_FUNCTION_CODE (callee) != BUILT_IN_OBJECT_SIZE)
1221 init_object_sizes ();
1222 result = fold_call_stmt (call, false);
1225 if (gimple_call_num_args (call) == 2
1226 && POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
1228 tree ost = gimple_call_arg (call, 1);
1230 if (host_integerp (ost, 1))
1232 unsigned HOST_WIDE_INT object_size_type
1233 = tree_low_cst (ost, 1);
1235 if (object_size_type < 2)
1236 result = fold_convert (size_type_node,
1237 integer_minus_one_node);
1238 else if (object_size_type < 4)
1239 result = build_zero_cst (size_type_node);
1247 if (dump_file && (dump_flags & TDF_DETAILS))
1249 fprintf (dump_file, "Simplified\n ");
1250 print_gimple_stmt (dump_file, call, 0, dump_flags);
1253 if (!update_call_from_tree (&i, result))
1256 /* NOTE: In the pre-tuples code, we called update_stmt here. This is
1257 now handled by gsi_replace, called from update_call_from_tree. */
1259 if (dump_file && (dump_flags & TDF_DETAILS))
1261 fprintf (dump_file, "to\n ");
1262 print_gimple_stmt (dump_file, call, 0, dump_flags);
1263 fprintf (dump_file, "\n");
1268 fini_object_sizes ();
1272 struct gimple_opt_pass pass_object_sizes =
1278 compute_object_sizes, /* execute */
1281 0, /* static_pass_number */
1282 TV_NONE, /* tv_id */
1283 PROP_cfg | PROP_ssa, /* properties_required */
1284 0, /* properties_provided */
1285 0, /* properties_destroyed */
1286 0, /* todo_flags_start */
1287 TODO_dump_func | TODO_verify_ssa /* todo_flags_finish */