OSDN Git Service

2011-07-24 François Dumont <francois.cppdevs@free.fr>
[pf3gnuchains/gcc-fork.git] / gcc / tree-object-size.c
1 /* __builtin_object_size (ptr, object_size_type) computation
2    Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3    Free Software Foundation, Inc.
4    Contributed by Jakub Jelinek <jakub@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "diagnostic-core.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-ssa-propagate.h"
33
34 struct object_size_info
35 {
36   int object_size_type;
37   bitmap visited, reexamine;
38   int pass;
39   bool changed;
40   unsigned int *depths;
41   unsigned int *stack, *tos;
42 };
43
44 static unsigned HOST_WIDE_INT unknown[4] = { -1, -1, 0, 0 };
45
46 static tree compute_object_offset (const_tree, const_tree);
47 static unsigned HOST_WIDE_INT addr_object_size (struct object_size_info *,
48                                                 const_tree, int);
49 static unsigned HOST_WIDE_INT alloc_object_size (const_gimple, int);
50 static tree pass_through_call (const_gimple);
51 static void collect_object_sizes_for (struct object_size_info *, tree);
52 static void expr_object_size (struct object_size_info *, tree, tree);
53 static bool merge_object_sizes (struct object_size_info *, tree, tree,
54                                 unsigned HOST_WIDE_INT);
55 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple);
56 static bool cond_expr_object_size (struct object_size_info *, tree, tree);
57 static unsigned int compute_object_sizes (void);
58 static void init_offset_limit (void);
59 static void check_for_plus_in_loops (struct object_size_info *, tree);
60 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
61                                        unsigned int);
62
63 /* object_sizes[0] is upper bound for number of bytes till the end of
64    the object.
65    object_sizes[1] is upper bound for number of bytes till the end of
66    the subobject (innermost array or field with address taken).
67    object_sizes[2] is lower bound for number of bytes till the end of
68    the object and object_sizes[3] lower bound for subobject.  */
69 static unsigned HOST_WIDE_INT *object_sizes[4];
70
71 /* Bitmaps what object sizes have been computed already.  */
72 static bitmap computed[4];
73
74 /* Maximum value of offset we consider to be addition.  */
75 static unsigned HOST_WIDE_INT offset_limit;
76
77
78 /* Initialize OFFSET_LIMIT variable.  */
79 static void
80 init_offset_limit (void)
81 {
82   if (host_integerp (TYPE_MAX_VALUE (sizetype), 1))
83     offset_limit = tree_low_cst (TYPE_MAX_VALUE (sizetype), 1);
84   else
85     offset_limit = -1;
86   offset_limit /= 2;
87 }
88
89
90 /* Compute offset of EXPR within VAR.  Return error_mark_node
91    if unknown.  */
92
93 static tree
94 compute_object_offset (const_tree expr, const_tree var)
95 {
96   enum tree_code code = PLUS_EXPR;
97   tree base, off, t;
98
99   if (expr == var)
100     return size_zero_node;
101
102   switch (TREE_CODE (expr))
103     {
104     case COMPONENT_REF:
105       base = compute_object_offset (TREE_OPERAND (expr, 0), var);
106       if (base == error_mark_node)
107         return base;
108
109       t = TREE_OPERAND (expr, 1);
110       off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
111                         size_int (tree_low_cst (DECL_FIELD_BIT_OFFSET (t), 1)
112                                   / BITS_PER_UNIT));
113       break;
114
115     case REALPART_EXPR:
116     CASE_CONVERT:
117     case VIEW_CONVERT_EXPR:
118     case NON_LVALUE_EXPR:
119       return compute_object_offset (TREE_OPERAND (expr, 0), var);
120
121     case IMAGPART_EXPR:
122       base = compute_object_offset (TREE_OPERAND (expr, 0), var);
123       if (base == error_mark_node)
124         return base;
125
126       off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
127       break;
128
129     case ARRAY_REF:
130       base = compute_object_offset (TREE_OPERAND (expr, 0), var);
131       if (base == error_mark_node)
132         return base;
133
134       t = TREE_OPERAND (expr, 1);
135       if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
136         {
137           code = MINUS_EXPR;
138           t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
139         }
140       t = fold_convert (sizetype, t);
141       off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
142       break;
143
144     case MEM_REF:
145       gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
146       return double_int_to_tree (sizetype, mem_ref_offset (expr));
147
148     default:
149       return error_mark_node;
150     }
151
152   return size_binop (code, base, off);
153 }
154
155
156 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
157    OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
158    If unknown, return unknown[object_size_type].  */
159
160 static unsigned HOST_WIDE_INT
161 addr_object_size (struct object_size_info *osi, const_tree ptr,
162                   int object_size_type)
163 {
164   tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
165
166   gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
167
168   pt_var = TREE_OPERAND (ptr, 0);
169   if (REFERENCE_CLASS_P (pt_var))
170     pt_var = get_base_address (pt_var);
171
172   if (pt_var
173       && TREE_CODE (pt_var) == MEM_REF
174       && TREE_CODE (TREE_OPERAND (pt_var, 0)) == SSA_NAME
175       && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (pt_var, 0))))
176     {
177       unsigned HOST_WIDE_INT sz;
178
179       if (!osi || (object_size_type & 1) != 0)
180         {
181           sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
182                                             object_size_type & ~1);
183           if (host_integerp (TREE_OPERAND (pt_var, 1), 0))
184             sz -= TREE_INT_CST_LOW (TREE_OPERAND (pt_var, 1));
185           else
186             sz = offset_limit;
187         }
188       else
189         {
190           tree var = TREE_OPERAND (pt_var, 0);
191           if (osi->pass == 0)
192             collect_object_sizes_for (osi, var);
193           if (bitmap_bit_p (computed[object_size_type],
194                             SSA_NAME_VERSION (var)))
195             sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
196           else
197             sz = unknown[object_size_type];
198           if (host_integerp (TREE_OPERAND (pt_var, 1), 0))
199             sz -= TREE_INT_CST_LOW (TREE_OPERAND (pt_var, 1));
200           else
201             sz = offset_limit;
202         }
203
204       if (sz != unknown[object_size_type] && sz < offset_limit)
205         pt_var_size = size_int (sz);
206     }
207   else if (pt_var
208            && DECL_P (pt_var)
209            && host_integerp (DECL_SIZE_UNIT (pt_var), 1)
210            && (unsigned HOST_WIDE_INT)
211                 tree_low_cst (DECL_SIZE_UNIT (pt_var), 1) < offset_limit)
212     pt_var_size = DECL_SIZE_UNIT (pt_var);
213   else if (pt_var
214            && (SSA_VAR_P (pt_var) || TREE_CODE (pt_var) == STRING_CST)
215            && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
216            && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
217            && (unsigned HOST_WIDE_INT)
218               tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
219               < offset_limit)
220     pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
221   else
222     return unknown[object_size_type];
223
224   if (pt_var != TREE_OPERAND (ptr, 0))
225     {
226       tree var;
227
228       if (object_size_type & 1)
229         {
230           var = TREE_OPERAND (ptr, 0);
231
232           while (var != pt_var
233                  && TREE_CODE (var) != BIT_FIELD_REF
234                  && TREE_CODE (var) != COMPONENT_REF
235                  && TREE_CODE (var) != ARRAY_REF
236                  && TREE_CODE (var) != ARRAY_RANGE_REF
237                  && TREE_CODE (var) != REALPART_EXPR
238                  && TREE_CODE (var) != IMAGPART_EXPR)
239             var = TREE_OPERAND (var, 0);
240           if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
241             var = TREE_OPERAND (var, 0);
242           if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
243               || ! host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (var)), 1)
244               || (pt_var_size
245                   && tree_int_cst_lt (pt_var_size,
246                                       TYPE_SIZE_UNIT (TREE_TYPE (var)))))
247             var = pt_var;
248           else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
249             {
250               tree v = var;
251               /* For &X->fld, compute object size only if fld isn't the last
252                  field, as struct { int i; char c[1]; } is often used instead
253                  of flexible array member.  */
254               while (v && v != pt_var)
255                 switch (TREE_CODE (v))
256                   {
257                   case ARRAY_REF:
258                     if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
259                         && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
260                       {
261                         tree domain
262                           = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
263                         if (domain
264                             && TYPE_MAX_VALUE (domain)
265                             && TREE_CODE (TYPE_MAX_VALUE (domain))
266                                == INTEGER_CST
267                             && tree_int_cst_lt (TREE_OPERAND (v, 1),
268                                                 TYPE_MAX_VALUE (domain)))
269                           {
270                             v = NULL_TREE;
271                             break;
272                           }
273                       }
274                     v = TREE_OPERAND (v, 0);
275                     break;
276                   case REALPART_EXPR:
277                   case IMAGPART_EXPR:
278                     v = NULL_TREE;
279                     break;
280                   case COMPONENT_REF:
281                     if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
282                       {
283                         v = NULL_TREE;
284                         break;
285                       }
286                     while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
287                       if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
288                           != UNION_TYPE
289                           && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
290                           != QUAL_UNION_TYPE)
291                         break;
292                       else
293                         v = TREE_OPERAND (v, 0);
294                     if (TREE_CODE (v) == COMPONENT_REF
295                         && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
296                            == RECORD_TYPE)
297                       {
298                         tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
299                         for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
300                           if (TREE_CODE (fld_chain) == FIELD_DECL)
301                             break;
302
303                         if (fld_chain)
304                           {
305                             v = NULL_TREE;
306                             break;
307                           }
308                         v = TREE_OPERAND (v, 0);
309                       }
310                     while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
311                       if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
312                           != UNION_TYPE
313                           && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
314                           != QUAL_UNION_TYPE)
315                         break;
316                       else
317                         v = TREE_OPERAND (v, 0);
318                     if (v != pt_var)
319                       v = NULL_TREE;
320                     else
321                       v = pt_var;
322                     break;
323                   default:
324                     v = pt_var;
325                     break;
326                   }
327               if (v == pt_var)
328                 var = pt_var;
329             }
330         }
331       else
332         var = pt_var;
333
334       if (var != pt_var)
335         var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
336       else if (!pt_var_size)
337         return unknown[object_size_type];
338       else
339         var_size = pt_var_size;
340       bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
341       if (bytes != error_mark_node)
342         {
343           if (TREE_CODE (bytes) == INTEGER_CST
344               && tree_int_cst_lt (var_size, bytes))
345             bytes = size_zero_node;
346           else
347             bytes = size_binop (MINUS_EXPR, var_size, bytes);
348         }
349       if (var != pt_var
350           && pt_var_size
351           && TREE_CODE (pt_var) == MEM_REF
352           && bytes != error_mark_node)
353         {
354           tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
355           if (bytes2 != error_mark_node)
356             {
357               if (TREE_CODE (bytes2) == INTEGER_CST
358                   && tree_int_cst_lt (pt_var_size, bytes2))
359                 bytes2 = size_zero_node;
360               else
361                 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
362               bytes = size_binop (MIN_EXPR, bytes, bytes2);
363             }
364         }
365     }
366   else if (!pt_var_size)
367     return unknown[object_size_type];
368   else
369     bytes = pt_var_size;
370
371   if (host_integerp (bytes, 1))
372     return tree_low_cst (bytes, 1);
373
374   return unknown[object_size_type];
375 }
376
377
378 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
379    Handles various allocation calls.  OBJECT_SIZE_TYPE is the second
380    argument from __builtin_object_size.  If unknown, return
381    unknown[object_size_type].  */
382
383 static unsigned HOST_WIDE_INT
384 alloc_object_size (const_gimple call, int object_size_type)
385 {
386   tree callee, bytes = NULL_TREE;
387   tree alloc_size;
388   int arg1 = -1, arg2 = -1;
389
390   gcc_assert (is_gimple_call (call));
391
392   callee = gimple_call_fndecl (call);
393   if (!callee)
394     return unknown[object_size_type];
395
396   alloc_size = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (TREE_TYPE(callee)));
397   if (alloc_size && TREE_VALUE (alloc_size))
398     {
399       tree p = TREE_VALUE (alloc_size);
400
401       arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
402       if (TREE_CHAIN (p))
403         arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
404     }
405
406   if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
407     switch (DECL_FUNCTION_CODE (callee))
408       {
409       case BUILT_IN_CALLOC:
410         arg2 = 1;
411         /* fall through */
412       case BUILT_IN_MALLOC:
413       case BUILT_IN_ALLOCA:
414         arg1 = 0;
415       default:
416         break;
417       }
418
419   if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
420       || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
421       || (arg2 >= 0
422           && (arg2 >= (int)gimple_call_num_args (call)
423               || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
424     return unknown[object_size_type];
425
426   if (arg2 >= 0)
427     bytes = size_binop (MULT_EXPR,
428         fold_convert (sizetype, gimple_call_arg (call, arg1)),
429         fold_convert (sizetype, gimple_call_arg (call, arg2)));
430   else if (arg1 >= 0)
431     bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
432
433   if (bytes && host_integerp (bytes, 1))
434     return tree_low_cst (bytes, 1);
435
436   return unknown[object_size_type];
437 }
438
439
440 /* If object size is propagated from one of function's arguments directly
441    to its return value, return that argument for GIMPLE_CALL statement CALL.
442    Otherwise return NULL.  */
443
444 static tree
445 pass_through_call (const_gimple call)
446 {
447   tree callee = gimple_call_fndecl (call);
448
449   if (callee
450       && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
451     switch (DECL_FUNCTION_CODE (callee))
452       {
453       case BUILT_IN_MEMCPY:
454       case BUILT_IN_MEMMOVE:
455       case BUILT_IN_MEMSET:
456       case BUILT_IN_STRCPY:
457       case BUILT_IN_STRNCPY:
458       case BUILT_IN_STRCAT:
459       case BUILT_IN_STRNCAT:
460       case BUILT_IN_MEMCPY_CHK:
461       case BUILT_IN_MEMMOVE_CHK:
462       case BUILT_IN_MEMSET_CHK:
463       case BUILT_IN_STRCPY_CHK:
464       case BUILT_IN_STRNCPY_CHK:
465       case BUILT_IN_STRCAT_CHK:
466       case BUILT_IN_STRNCAT_CHK:
467       case BUILT_IN_ASSUME_ALIGNED:
468         if (gimple_call_num_args (call) >= 1)
469           return gimple_call_arg (call, 0);
470         break;
471       default:
472         break;
473       }
474
475   return NULL_TREE;
476 }
477
478
479 /* Compute __builtin_object_size value for PTR.  OBJECT_SIZE_TYPE is the
480    second argument from __builtin_object_size.  */
481
482 unsigned HOST_WIDE_INT
483 compute_builtin_object_size (tree ptr, int object_size_type)
484 {
485   gcc_assert (object_size_type >= 0 && object_size_type <= 3);
486
487   if (! offset_limit)
488     init_offset_limit ();
489
490   if (TREE_CODE (ptr) == ADDR_EXPR)
491     return addr_object_size (NULL, ptr, object_size_type);
492
493   if (TREE_CODE (ptr) == SSA_NAME
494       && POINTER_TYPE_P (TREE_TYPE (ptr))
495       && object_sizes[object_size_type] != NULL)
496     {
497       if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
498         {
499           struct object_size_info osi;
500           bitmap_iterator bi;
501           unsigned int i;
502
503           if (dump_file)
504             {
505               fprintf (dump_file, "Computing %s %sobject size for ",
506                        (object_size_type & 2) ? "minimum" : "maximum",
507                        (object_size_type & 1) ? "sub" : "");
508               print_generic_expr (dump_file, ptr, dump_flags);
509               fprintf (dump_file, ":\n");
510             }
511
512           osi.visited = BITMAP_ALLOC (NULL);
513           osi.reexamine = BITMAP_ALLOC (NULL);
514           osi.object_size_type = object_size_type;
515           osi.depths = NULL;
516           osi.stack = NULL;
517           osi.tos = NULL;
518
519           /* First pass: walk UD chains, compute object sizes that
520              can be computed.  osi.reexamine bitmap at the end will
521              contain what variables were found in dependency cycles
522              and therefore need to be reexamined.  */
523           osi.pass = 0;
524           osi.changed = false;
525           collect_object_sizes_for (&osi, ptr);
526
527           /* Second pass: keep recomputing object sizes of variables
528              that need reexamination, until no object sizes are
529              increased or all object sizes are computed.  */
530           if (! bitmap_empty_p (osi.reexamine))
531             {
532               bitmap reexamine = BITMAP_ALLOC (NULL);
533
534               /* If looking for minimum instead of maximum object size,
535                  detect cases where a pointer is increased in a loop.
536                  Although even without this detection pass 2 would eventually
537                  terminate, it could take a long time.  If a pointer is
538                  increasing this way, we need to assume 0 object size.
539                  E.g. p = &buf[0]; while (cond) p = p + 4;  */
540               if (object_size_type & 2)
541                 {
542                   osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
543                   osi.stack = XNEWVEC (unsigned int, num_ssa_names);
544                   osi.tos = osi.stack;
545                   osi.pass = 1;
546                   /* collect_object_sizes_for is changing
547                      osi.reexamine bitmap, so iterate over a copy.  */
548                   bitmap_copy (reexamine, osi.reexamine);
549                   EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
550                     if (bitmap_bit_p (osi.reexamine, i))
551                       check_for_plus_in_loops (&osi, ssa_name (i));
552
553                   free (osi.depths);
554                   osi.depths = NULL;
555                   free (osi.stack);
556                   osi.stack = NULL;
557                   osi.tos = NULL;
558                 }
559
560               do
561                 {
562                   osi.pass = 2;
563                   osi.changed = false;
564                   /* collect_object_sizes_for is changing
565                      osi.reexamine bitmap, so iterate over a copy.  */
566                   bitmap_copy (reexamine, osi.reexamine);
567                   EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
568                     if (bitmap_bit_p (osi.reexamine, i))
569                       {
570                         collect_object_sizes_for (&osi, ssa_name (i));
571                         if (dump_file && (dump_flags & TDF_DETAILS))
572                           {
573                             fprintf (dump_file, "Reexamining ");
574                             print_generic_expr (dump_file, ssa_name (i),
575                                                 dump_flags);
576                             fprintf (dump_file, "\n");
577                           }
578                       }
579                 }
580               while (osi.changed);
581
582               BITMAP_FREE (reexamine);
583             }
584           EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
585             bitmap_set_bit (computed[object_size_type], i);
586
587           /* Debugging dumps.  */
588           if (dump_file)
589             {
590               EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
591                 if (object_sizes[object_size_type][i]
592                     != unknown[object_size_type])
593                   {
594                     print_generic_expr (dump_file, ssa_name (i),
595                                         dump_flags);
596                     fprintf (dump_file,
597                              ": %s %sobject size "
598                              HOST_WIDE_INT_PRINT_UNSIGNED "\n",
599                              (object_size_type & 2) ? "minimum" : "maximum",
600                              (object_size_type & 1) ? "sub" : "",
601                              object_sizes[object_size_type][i]);
602                   }
603             }
604
605           BITMAP_FREE (osi.reexamine);
606           BITMAP_FREE (osi.visited);
607         }
608
609       return object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
610     }
611
612   return unknown[object_size_type];
613 }
614
615 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME.  */
616
617 static void
618 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
619 {
620   int object_size_type = osi->object_size_type;
621   unsigned int varno = SSA_NAME_VERSION (ptr);
622   unsigned HOST_WIDE_INT bytes;
623
624   gcc_assert (object_sizes[object_size_type][varno]
625               != unknown[object_size_type]);
626   gcc_assert (osi->pass == 0);
627
628   if (TREE_CODE (value) == WITH_SIZE_EXPR)
629     value = TREE_OPERAND (value, 0);
630
631   /* Pointer variables should have been handled by merge_object_sizes.  */
632   gcc_assert (TREE_CODE (value) != SSA_NAME
633               || !POINTER_TYPE_P (TREE_TYPE (value)));
634
635   if (TREE_CODE (value) == ADDR_EXPR)
636     bytes = addr_object_size (osi, value, object_size_type);
637   else
638     bytes = unknown[object_size_type];
639
640   if ((object_size_type & 2) == 0)
641     {
642       if (object_sizes[object_size_type][varno] < bytes)
643         object_sizes[object_size_type][varno] = bytes;
644     }
645   else
646     {
647       if (object_sizes[object_size_type][varno] > bytes)
648         object_sizes[object_size_type][varno] = bytes;
649     }
650 }
651
652
653 /* Compute object_sizes for PTR, defined to the result of a call.  */
654
655 static void
656 call_object_size (struct object_size_info *osi, tree ptr, gimple call)
657 {
658   int object_size_type = osi->object_size_type;
659   unsigned int varno = SSA_NAME_VERSION (ptr);
660   unsigned HOST_WIDE_INT bytes;
661
662   gcc_assert (is_gimple_call (call));
663
664   gcc_assert (object_sizes[object_size_type][varno]
665               != unknown[object_size_type]);
666   gcc_assert (osi->pass == 0);
667
668   bytes = alloc_object_size (call, object_size_type);
669
670   if ((object_size_type & 2) == 0)
671     {
672       if (object_sizes[object_size_type][varno] < bytes)
673         object_sizes[object_size_type][varno] = bytes;
674     }
675   else
676     {
677       if (object_sizes[object_size_type][varno] > bytes)
678         object_sizes[object_size_type][varno] = bytes;
679     }
680 }
681
682
683 /* Compute object_sizes for PTR, defined to an unknown value.  */
684
685 static void
686 unknown_object_size (struct object_size_info *osi, tree ptr)
687 {
688   int object_size_type = osi->object_size_type;
689   unsigned int varno = SSA_NAME_VERSION (ptr);
690   unsigned HOST_WIDE_INT bytes;
691
692   gcc_assert (object_sizes[object_size_type][varno]
693               != unknown[object_size_type]);
694   gcc_assert (osi->pass == 0);
695
696   bytes = unknown[object_size_type];
697
698   if ((object_size_type & 2) == 0)
699     {
700       if (object_sizes[object_size_type][varno] < bytes)
701         object_sizes[object_size_type][varno] = bytes;
702     }
703   else
704     {
705       if (object_sizes[object_size_type][varno] > bytes)
706         object_sizes[object_size_type][varno] = bytes;
707     }
708 }
709
710
711 /* Merge object sizes of ORIG + OFFSET into DEST.  Return true if
712    the object size might need reexamination later.  */
713
714 static bool
715 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
716                     unsigned HOST_WIDE_INT offset)
717 {
718   int object_size_type = osi->object_size_type;
719   unsigned int varno = SSA_NAME_VERSION (dest);
720   unsigned HOST_WIDE_INT orig_bytes;
721
722   if (object_sizes[object_size_type][varno] == unknown[object_size_type])
723     return false;
724   if (offset >= offset_limit)
725     {
726       object_sizes[object_size_type][varno] = unknown[object_size_type];
727       return false;
728     }
729
730   if (osi->pass == 0)
731     collect_object_sizes_for (osi, orig);
732
733   orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
734   if (orig_bytes != unknown[object_size_type])
735     orig_bytes = (offset > orig_bytes)
736                  ? (unsigned HOST_WIDE_INT) 0 : orig_bytes - offset;
737
738   if ((object_size_type & 2) == 0)
739     {
740       if (object_sizes[object_size_type][varno] < orig_bytes)
741         {
742           object_sizes[object_size_type][varno] = orig_bytes;
743           osi->changed = true;
744         }
745     }
746   else
747     {
748       if (object_sizes[object_size_type][varno] > orig_bytes)
749         {
750           object_sizes[object_size_type][varno] = orig_bytes;
751           osi->changed = true;
752         }
753     }
754   return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
755 }
756
757
758 /* Compute object_sizes for VAR, defined to the result of an assignment
759    with operator POINTER_PLUS_EXPR.  Return true if the object size might
760    need reexamination  later.  */
761
762 static bool
763 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple stmt)
764 {
765   int object_size_type = osi->object_size_type;
766   unsigned int varno = SSA_NAME_VERSION (var);
767   unsigned HOST_WIDE_INT bytes;
768   tree op0, op1;
769
770   if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
771     {
772       op0 = gimple_assign_rhs1 (stmt);
773       op1 = gimple_assign_rhs2 (stmt);
774     }
775   else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
776     {
777       tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
778       gcc_assert (TREE_CODE (rhs) == MEM_REF);
779       op0 = TREE_OPERAND (rhs, 0);
780       op1 = TREE_OPERAND (rhs, 1);
781     }
782   else
783     gcc_unreachable ();
784
785   if (object_sizes[object_size_type][varno] == unknown[object_size_type])
786     return false;
787
788   /* Handle PTR + OFFSET here.  */
789   if (TREE_CODE (op1) == INTEGER_CST
790       && (TREE_CODE (op0) == SSA_NAME
791           || TREE_CODE (op0) == ADDR_EXPR))
792     {
793       if (! host_integerp (op1, 1))
794         bytes = unknown[object_size_type];
795       else if (TREE_CODE (op0) == SSA_NAME)
796         return merge_object_sizes (osi, var, op0, tree_low_cst (op1, 1));
797       else
798         {
799           unsigned HOST_WIDE_INT off = tree_low_cst (op1, 1);
800
801           /* op0 will be ADDR_EXPR here.  */
802           bytes = addr_object_size (osi, op0, object_size_type);
803           if (bytes == unknown[object_size_type])
804             ;
805           else if (off > offset_limit)
806             bytes = unknown[object_size_type];
807           else if (off > bytes)
808             bytes = 0;
809           else
810             bytes -= off;
811         }
812     }
813   else
814     bytes = unknown[object_size_type];
815
816   if ((object_size_type & 2) == 0)
817     {
818       if (object_sizes[object_size_type][varno] < bytes)
819         object_sizes[object_size_type][varno] = bytes;
820     }
821   else
822     {
823       if (object_sizes[object_size_type][varno] > bytes)
824         object_sizes[object_size_type][varno] = bytes;
825     }
826   return false;
827 }
828
829
830 /* Compute object_sizes for VAR, defined to VALUE, which is
831    a COND_EXPR.  Return true if the object size might need reexamination
832    later.  */
833
834 static bool
835 cond_expr_object_size (struct object_size_info *osi, tree var, tree value)
836 {
837   tree then_, else_;
838   int object_size_type = osi->object_size_type;
839   unsigned int varno = SSA_NAME_VERSION (var);
840   bool reexamine = false;
841
842   gcc_assert (TREE_CODE (value) == COND_EXPR);
843
844   if (object_sizes[object_size_type][varno] == unknown[object_size_type])
845     return false;
846
847   then_ = COND_EXPR_THEN (value);
848   else_ = COND_EXPR_ELSE (value);
849
850   if (TREE_CODE (then_) == SSA_NAME)
851     reexamine |= merge_object_sizes (osi, var, then_, 0);
852   else
853     expr_object_size (osi, var, then_);
854
855   if (TREE_CODE (else_) == SSA_NAME)
856     reexamine |= merge_object_sizes (osi, var, else_, 0);
857   else
858     expr_object_size (osi, var, else_);
859
860   return reexamine;
861 }
862
863 /* Compute object sizes for VAR.
864    For ADDR_EXPR an object size is the number of remaining bytes
865    to the end of the object (where what is considered an object depends on
866    OSI->object_size_type).
867    For allocation GIMPLE_CALL like malloc or calloc object size is the size
868    of the allocation.
869    For POINTER_PLUS_EXPR where second operand is a constant integer,
870    object size is object size of the first operand minus the constant.
871    If the constant is bigger than the number of remaining bytes until the
872    end of the object, object size is 0, but if it is instead a pointer
873    subtraction, object size is unknown[object_size_type].
874    To differentiate addition from subtraction, ADDR_EXPR returns
875    unknown[object_size_type] for all objects bigger than half of the address
876    space, and constants less than half of the address space are considered
877    addition, while bigger constants subtraction.
878    For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
879    object size is object size of that argument.
880    Otherwise, object size is the maximum of object sizes of variables
881    that it might be set to.  */
882
883 static void
884 collect_object_sizes_for (struct object_size_info *osi, tree var)
885 {
886   int object_size_type = osi->object_size_type;
887   unsigned int varno = SSA_NAME_VERSION (var);
888   gimple stmt;
889   bool reexamine;
890
891   if (bitmap_bit_p (computed[object_size_type], varno))
892     return;
893
894   if (osi->pass == 0)
895     {
896       if (bitmap_set_bit (osi->visited, varno))
897         {
898           object_sizes[object_size_type][varno]
899             = (object_size_type & 2) ? -1 : 0;
900         }
901       else
902         {
903           /* Found a dependency loop.  Mark the variable for later
904              re-examination.  */
905           bitmap_set_bit (osi->reexamine, varno);
906           if (dump_file && (dump_flags & TDF_DETAILS))
907             {
908               fprintf (dump_file, "Found a dependency loop at ");
909               print_generic_expr (dump_file, var, dump_flags);
910               fprintf (dump_file, "\n");
911             }
912           return;
913         }
914     }
915
916   if (dump_file && (dump_flags & TDF_DETAILS))
917     {
918       fprintf (dump_file, "Visiting use-def links for ");
919       print_generic_expr (dump_file, var, dump_flags);
920       fprintf (dump_file, "\n");
921     }
922
923   stmt = SSA_NAME_DEF_STMT (var);
924   reexamine = false;
925
926   switch (gimple_code (stmt))
927     {
928     case GIMPLE_ASSIGN:
929       {
930         tree rhs = gimple_assign_rhs1 (stmt);
931         if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
932             || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
933                 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
934           reexamine = plus_stmt_object_size (osi, var, stmt);
935         else if (gimple_assign_single_p (stmt)
936                  || gimple_assign_unary_nop_p (stmt))
937           {
938             if (TREE_CODE (rhs) == SSA_NAME
939                 && POINTER_TYPE_P (TREE_TYPE (rhs)))
940               reexamine = merge_object_sizes (osi, var, rhs, 0);
941             else if (TREE_CODE (rhs) == COND_EXPR)
942               reexamine = cond_expr_object_size (osi, var, rhs);
943             else
944               expr_object_size (osi, var, rhs);
945           }
946         else
947           unknown_object_size (osi, var);
948         break;
949       }
950
951     case GIMPLE_CALL:
952       {
953         tree arg = pass_through_call (stmt);
954         if (arg)
955           {
956             if (TREE_CODE (arg) == SSA_NAME
957                 && POINTER_TYPE_P (TREE_TYPE (arg)))
958               reexamine = merge_object_sizes (osi, var, arg, 0);
959             else if (TREE_CODE (arg) == COND_EXPR)
960               reexamine = cond_expr_object_size (osi, var, arg);
961             else
962               expr_object_size (osi, var, arg);
963           }
964         else
965           call_object_size (osi, var, stmt);
966         break;
967       }
968
969     case GIMPLE_ASM:
970       /* Pointers defined by __asm__ statements can point anywhere.  */
971       object_sizes[object_size_type][varno] = unknown[object_size_type];
972       break;
973
974     case GIMPLE_NOP:
975       {
976         tree decl = SSA_NAME_VAR (var);
977
978         if (TREE_CODE (decl) != PARM_DECL && DECL_INITIAL (decl))
979           expr_object_size (osi, var, DECL_INITIAL (decl));
980         else
981           expr_object_size (osi, var, decl);
982       }
983       break;
984
985     case GIMPLE_PHI:
986       {
987         unsigned i;
988
989         for (i = 0; i < gimple_phi_num_args (stmt); i++)
990           {
991             tree rhs = gimple_phi_arg (stmt, i)->def;
992
993             if (object_sizes[object_size_type][varno]
994                 == unknown[object_size_type])
995               break;
996
997             if (TREE_CODE (rhs) == SSA_NAME)
998               reexamine |= merge_object_sizes (osi, var, rhs, 0);
999             else if (osi->pass == 0)
1000               expr_object_size (osi, var, rhs);
1001           }
1002         break;
1003       }
1004
1005     default:
1006       gcc_unreachable ();
1007     }
1008
1009   if (! reexamine
1010       || object_sizes[object_size_type][varno] == unknown[object_size_type])
1011     {
1012       bitmap_set_bit (computed[object_size_type], varno);
1013       bitmap_clear_bit (osi->reexamine, varno);
1014     }
1015   else
1016     {
1017       bitmap_set_bit (osi->reexamine, varno);
1018       if (dump_file && (dump_flags & TDF_DETAILS))
1019         {
1020           fprintf (dump_file, "Need to reexamine ");
1021           print_generic_expr (dump_file, var, dump_flags);
1022           fprintf (dump_file, "\n");
1023         }
1024     }
1025 }
1026
1027
1028 /* Helper function for check_for_plus_in_loops.  Called recursively
1029    to detect loops.  */
1030
1031 static void
1032 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1033                            unsigned int depth)
1034 {
1035   gimple stmt = SSA_NAME_DEF_STMT (var);
1036   unsigned int varno = SSA_NAME_VERSION (var);
1037
1038   if (osi->depths[varno])
1039     {
1040       if (osi->depths[varno] != depth)
1041         {
1042           unsigned int *sp;
1043
1044           /* Found a loop involving pointer addition.  */
1045           for (sp = osi->tos; sp > osi->stack; )
1046             {
1047               --sp;
1048               bitmap_clear_bit (osi->reexamine, *sp);
1049               bitmap_set_bit (computed[osi->object_size_type], *sp);
1050               object_sizes[osi->object_size_type][*sp] = 0;
1051               if (*sp == varno)
1052                 break;
1053             }
1054         }
1055       return;
1056     }
1057   else if (! bitmap_bit_p (osi->reexamine, varno))
1058     return;
1059
1060   osi->depths[varno] = depth;
1061   *osi->tos++ = varno;
1062
1063   switch (gimple_code (stmt))
1064     {
1065
1066     case GIMPLE_ASSIGN:
1067       {
1068         if ((gimple_assign_single_p (stmt)
1069              || gimple_assign_unary_nop_p (stmt))
1070             && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1071           {
1072             tree rhs = gimple_assign_rhs1 (stmt);
1073
1074             check_for_plus_in_loops_1 (osi, rhs, depth);
1075           }
1076         else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1077           {
1078             tree basevar = gimple_assign_rhs1 (stmt);
1079             tree cst = gimple_assign_rhs2 (stmt);
1080
1081             gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1082
1083             check_for_plus_in_loops_1 (osi, basevar,
1084                                        depth + !integer_zerop (cst));
1085           }
1086         else
1087           gcc_unreachable ();
1088         break;
1089       }
1090
1091     case GIMPLE_CALL:
1092       {
1093         tree arg = pass_through_call (stmt);
1094         if (arg)
1095           {
1096             if (TREE_CODE (arg) == SSA_NAME)
1097               check_for_plus_in_loops_1 (osi, arg, depth);
1098             else
1099               gcc_unreachable ();
1100           }
1101         break;
1102       }
1103
1104     case GIMPLE_PHI:
1105       {
1106         unsigned i;
1107
1108         for (i = 0; i < gimple_phi_num_args (stmt); i++)
1109           {
1110             tree rhs = gimple_phi_arg (stmt, i)->def;
1111
1112             if (TREE_CODE (rhs) == SSA_NAME)
1113               check_for_plus_in_loops_1 (osi, rhs, depth);
1114           }
1115         break;
1116       }
1117
1118     default:
1119       gcc_unreachable ();
1120     }
1121
1122   osi->depths[varno] = 0;
1123   osi->tos--;
1124 }
1125
1126
1127 /* Check if some pointer we are computing object size of is being increased
1128    within a loop.  If yes, assume all the SSA variables participating in
1129    that loop have minimum object sizes 0.  */
1130
1131 static void
1132 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1133 {
1134   gimple stmt = SSA_NAME_DEF_STMT (var);
1135
1136   /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1137      and looked for a POINTER_PLUS_EXPR in the pass-through
1138      argument, if any.  In GIMPLE, however, such an expression
1139      is not a valid call operand.  */
1140
1141   if (is_gimple_assign (stmt)
1142       && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1143     {
1144       tree basevar = gimple_assign_rhs1 (stmt);
1145       tree cst = gimple_assign_rhs2 (stmt);
1146
1147       gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1148
1149       if (integer_zerop (cst))
1150         return;
1151
1152       osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1153       *osi->tos++ = SSA_NAME_VERSION (basevar);
1154       check_for_plus_in_loops_1 (osi, var, 2);
1155       osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1156       osi->tos--;
1157     }
1158 }
1159
1160
1161 /* Initialize data structures for the object size computation.  */
1162
1163 void
1164 init_object_sizes (void)
1165 {
1166   int object_size_type;
1167
1168   if (object_sizes[0])
1169     return;
1170
1171   for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1172     {
1173       object_sizes[object_size_type] = XNEWVEC (unsigned HOST_WIDE_INT, num_ssa_names);
1174       computed[object_size_type] = BITMAP_ALLOC (NULL);
1175     }
1176
1177   init_offset_limit ();
1178 }
1179
1180
1181 /* Destroy data structures after the object size computation.  */
1182
1183 void
1184 fini_object_sizes (void)
1185 {
1186   int object_size_type;
1187
1188   for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1189     {
1190       free (object_sizes[object_size_type]);
1191       BITMAP_FREE (computed[object_size_type]);
1192       object_sizes[object_size_type] = NULL;
1193     }
1194 }
1195
1196
1197 /* Simple pass to optimize all __builtin_object_size () builtins.  */
1198
1199 static unsigned int
1200 compute_object_sizes (void)
1201 {
1202   basic_block bb;
1203   FOR_EACH_BB (bb)
1204     {
1205       gimple_stmt_iterator i;
1206       for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1207         {
1208           tree callee, result;
1209           gimple call = gsi_stmt (i);
1210
1211           if (gimple_code (call) != GIMPLE_CALL)
1212             continue;
1213
1214           callee = gimple_call_fndecl (call);
1215           if (!callee
1216               || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
1217               || DECL_FUNCTION_CODE (callee) != BUILT_IN_OBJECT_SIZE)
1218             continue;
1219
1220           init_object_sizes ();
1221           result = fold_call_stmt (call, false);
1222           if (!result)
1223             {
1224               if (gimple_call_num_args (call) == 2
1225                   && POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
1226                 {
1227                   tree ost = gimple_call_arg (call, 1);
1228
1229                   if (host_integerp (ost, 1))
1230                     {
1231                       unsigned HOST_WIDE_INT object_size_type
1232                         = tree_low_cst (ost, 1);
1233
1234                       if (object_size_type < 2)
1235                         result = fold_convert (size_type_node,
1236                                                integer_minus_one_node);
1237                       else if (object_size_type < 4)
1238                         result = build_zero_cst (size_type_node);
1239                     }
1240                 }
1241
1242               if (!result)
1243                 continue;
1244             }
1245
1246           if (dump_file && (dump_flags & TDF_DETAILS))
1247             {
1248               fprintf (dump_file, "Simplified\n  ");
1249               print_gimple_stmt (dump_file, call, 0, dump_flags);
1250             }
1251
1252           if (!update_call_from_tree (&i, result))
1253             gcc_unreachable ();
1254
1255           /* NOTE: In the pre-tuples code, we called update_stmt here.  This is
1256              now handled by gsi_replace, called from update_call_from_tree.  */
1257
1258           if (dump_file && (dump_flags & TDF_DETAILS))
1259             {
1260               fprintf (dump_file, "to\n  ");
1261               print_gimple_stmt (dump_file, call, 0, dump_flags);
1262               fprintf (dump_file, "\n");
1263             }
1264         }
1265     }
1266
1267   fini_object_sizes ();
1268   return 0;
1269 }
1270
1271 struct gimple_opt_pass pass_object_sizes =
1272 {
1273  {
1274   GIMPLE_PASS,
1275   "objsz",                              /* name */
1276   NULL,                                 /* gate */
1277   compute_object_sizes,                 /* execute */
1278   NULL,                                 /* sub */
1279   NULL,                                 /* next */
1280   0,                                    /* static_pass_number */
1281   TV_NONE,                              /* tv_id */
1282   PROP_cfg | PROP_ssa,                  /* properties_required */
1283   0,                                    /* properties_provided */
1284   0,                                    /* properties_destroyed */
1285   0,                                    /* todo_flags_start */
1286   TODO_verify_ssa                       /* todo_flags_finish */
1287  }
1288 };