OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-dev/] [or1k-gcc/] [gcc/] [tree-object-size.c] - Blame information for rev 849

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 684 jeremybenn
/* __builtin_object_size (ptr, object_size_type) computation
2
   Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3
   Free Software Foundation, Inc.
4
   Contributed by Jakub Jelinek <jakub@redhat.com>
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify
9
it under the terms of the GNU General Public License as published by
10
the Free Software Foundation; either version 3, or (at your option)
11
any later version.
12
 
13
GCC is distributed in the hope that it will be useful,
14
but WITHOUT ANY WARRANTY; without even the implied warranty of
15
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16
GNU General Public License for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
#include "config.h"
23
#include "system.h"
24
#include "coretypes.h"
25
#include "tm.h"
26
#include "tree.h"
27
#include "diagnostic-core.h"
28
#include "tree-pretty-print.h"
29
#include "gimple-pretty-print.h"
30
#include "tree-flow.h"
31
#include "tree-pass.h"
32
#include "tree-ssa-propagate.h"
33
 
34
struct object_size_info
35
{
36
  int object_size_type;
37
  bitmap visited, reexamine;
38
  int pass;
39
  bool changed;
40
  unsigned int *depths;
41
  unsigned int *stack, *tos;
42
};
43
 
44
static unsigned HOST_WIDE_INT unknown[4] = { -1, -1, 0, 0 };
45
 
46
static tree compute_object_offset (const_tree, const_tree);
47
static unsigned HOST_WIDE_INT addr_object_size (struct object_size_info *,
48
                                                const_tree, int);
49
static unsigned HOST_WIDE_INT alloc_object_size (const_gimple, int);
50
static tree pass_through_call (const_gimple);
51
static void collect_object_sizes_for (struct object_size_info *, tree);
52
static void expr_object_size (struct object_size_info *, tree, tree);
53
static bool merge_object_sizes (struct object_size_info *, tree, tree,
54
                                unsigned HOST_WIDE_INT);
55
static bool plus_stmt_object_size (struct object_size_info *, tree, gimple);
56
static bool cond_expr_object_size (struct object_size_info *, tree, gimple);
57
static unsigned int compute_object_sizes (void);
58
static void init_offset_limit (void);
59
static void check_for_plus_in_loops (struct object_size_info *, tree);
60
static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
61
                                       unsigned int);
62
 
63
/* object_sizes[0] is upper bound for number of bytes till the end of
64
   the object.
65
   object_sizes[1] is upper bound for number of bytes till the end of
66
   the subobject (innermost array or field with address taken).
67
   object_sizes[2] is lower bound for number of bytes till the end of
68
   the object and object_sizes[3] lower bound for subobject.  */
69
static unsigned HOST_WIDE_INT *object_sizes[4];
70
 
71
/* Bitmaps what object sizes have been computed already.  */
72
static bitmap computed[4];
73
 
74
/* Maximum value of offset we consider to be addition.  */
75
static unsigned HOST_WIDE_INT offset_limit;
76
 
77
 
78
/* Initialize OFFSET_LIMIT variable.  */
79
static void
80
init_offset_limit (void)
81
{
82
  if (host_integerp (TYPE_MAX_VALUE (sizetype), 1))
83
    offset_limit = tree_low_cst (TYPE_MAX_VALUE (sizetype), 1);
84
  else
85
    offset_limit = -1;
86
  offset_limit /= 2;
87
}
88
 
89
 
90
/* Compute offset of EXPR within VAR.  Return error_mark_node
91
   if unknown.  */
92
 
93
static tree
94
compute_object_offset (const_tree expr, const_tree var)
95
{
96
  enum tree_code code = PLUS_EXPR;
97
  tree base, off, t;
98
 
99
  if (expr == var)
100
    return size_zero_node;
101
 
102
  switch (TREE_CODE (expr))
103
    {
104
    case COMPONENT_REF:
105
      base = compute_object_offset (TREE_OPERAND (expr, 0), var);
106
      if (base == error_mark_node)
107
        return base;
108
 
109
      t = TREE_OPERAND (expr, 1);
110
      off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
111
                        size_int (tree_low_cst (DECL_FIELD_BIT_OFFSET (t), 1)
112
                                  / BITS_PER_UNIT));
113
      break;
114
 
115
    case REALPART_EXPR:
116
    CASE_CONVERT:
117
    case VIEW_CONVERT_EXPR:
118
    case NON_LVALUE_EXPR:
119
      return compute_object_offset (TREE_OPERAND (expr, 0), var);
120
 
121
    case IMAGPART_EXPR:
122
      base = compute_object_offset (TREE_OPERAND (expr, 0), var);
123
      if (base == error_mark_node)
124
        return base;
125
 
126
      off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
127
      break;
128
 
129
    case ARRAY_REF:
130
      base = compute_object_offset (TREE_OPERAND (expr, 0), var);
131
      if (base == error_mark_node)
132
        return base;
133
 
134
      t = TREE_OPERAND (expr, 1);
135
      if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
136
        {
137
          code = MINUS_EXPR;
138
          t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
139
        }
140
      t = fold_convert (sizetype, t);
141
      off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
142
      break;
143
 
144
    case MEM_REF:
145
      gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
146
      return double_int_to_tree (sizetype, mem_ref_offset (expr));
147
 
148
    default:
149
      return error_mark_node;
150
    }
151
 
152
  return size_binop (code, base, off);
153
}
154
 
155
 
156
/* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
157
   OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
158
   If unknown, return unknown[object_size_type].  */
159
 
160
static unsigned HOST_WIDE_INT
161
addr_object_size (struct object_size_info *osi, const_tree ptr,
162
                  int object_size_type)
163
{
164
  tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
165
 
166
  gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
167
 
168
  pt_var = TREE_OPERAND (ptr, 0);
169
  while (handled_component_p (pt_var))
170
    pt_var = TREE_OPERAND (pt_var, 0);
171
 
172
  if (pt_var
173
      && TREE_CODE (pt_var) == MEM_REF)
174
    {
175
      unsigned HOST_WIDE_INT sz;
176
 
177
      if (!osi || (object_size_type & 1) != 0
178
          || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
179
        {
180
          sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
181
                                            object_size_type & ~1);
182
        }
183
      else
184
        {
185
          tree var = TREE_OPERAND (pt_var, 0);
186
          if (osi->pass == 0)
187
            collect_object_sizes_for (osi, var);
188
          if (bitmap_bit_p (computed[object_size_type],
189
                            SSA_NAME_VERSION (var)))
190
            sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
191
          else
192
            sz = unknown[object_size_type];
193
        }
194
      if (sz != unknown[object_size_type])
195
        {
196
          double_int dsz = double_int_sub (uhwi_to_double_int (sz),
197
                                           mem_ref_offset (pt_var));
198
          if (double_int_negative_p (dsz))
199
            sz = 0;
200
          else if (double_int_fits_in_uhwi_p (dsz))
201
            sz = double_int_to_uhwi (dsz);
202
          else
203
            sz = unknown[object_size_type];
204
        }
205
 
206
      if (sz != unknown[object_size_type] && sz < offset_limit)
207
        pt_var_size = size_int (sz);
208
    }
209
  else if (pt_var
210
           && DECL_P (pt_var)
211
           && host_integerp (DECL_SIZE_UNIT (pt_var), 1)
212
           && (unsigned HOST_WIDE_INT)
213
                tree_low_cst (DECL_SIZE_UNIT (pt_var), 1) < offset_limit)
214
    pt_var_size = DECL_SIZE_UNIT (pt_var);
215
  else if (pt_var
216
           && TREE_CODE (pt_var) == STRING_CST
217
           && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
218
           && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
219
           && (unsigned HOST_WIDE_INT)
220
              tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)), 1)
221
              < offset_limit)
222
    pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
223
  else
224
    return unknown[object_size_type];
225
 
226
  if (pt_var != TREE_OPERAND (ptr, 0))
227
    {
228
      tree var;
229
 
230
      if (object_size_type & 1)
231
        {
232
          var = TREE_OPERAND (ptr, 0);
233
 
234
          while (var != pt_var
235
                 && TREE_CODE (var) != BIT_FIELD_REF
236
                 && TREE_CODE (var) != COMPONENT_REF
237
                 && TREE_CODE (var) != ARRAY_REF
238
                 && TREE_CODE (var) != ARRAY_RANGE_REF
239
                 && TREE_CODE (var) != REALPART_EXPR
240
                 && TREE_CODE (var) != IMAGPART_EXPR)
241
            var = TREE_OPERAND (var, 0);
242
          if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
243
            var = TREE_OPERAND (var, 0);
244
          if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
245
              || ! host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (var)), 1)
246
              || (pt_var_size
247
                  && tree_int_cst_lt (pt_var_size,
248
                                      TYPE_SIZE_UNIT (TREE_TYPE (var)))))
249
            var = pt_var;
250
          else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
251
            {
252
              tree v = var;
253
              /* For &X->fld, compute object size only if fld isn't the last
254
                 field, as struct { int i; char c[1]; } is often used instead
255
                 of flexible array member.  */
256
              while (v && v != pt_var)
257
                switch (TREE_CODE (v))
258
                  {
259
                  case ARRAY_REF:
260
                    if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
261
                        && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
262
                      {
263
                        tree domain
264
                          = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
265
                        if (domain
266
                            && TYPE_MAX_VALUE (domain)
267
                            && TREE_CODE (TYPE_MAX_VALUE (domain))
268
                               == INTEGER_CST
269
                            && tree_int_cst_lt (TREE_OPERAND (v, 1),
270
                                                TYPE_MAX_VALUE (domain)))
271
                          {
272
                            v = NULL_TREE;
273
                            break;
274
                          }
275
                      }
276
                    v = TREE_OPERAND (v, 0);
277
                    break;
278
                  case REALPART_EXPR:
279
                  case IMAGPART_EXPR:
280
                    v = NULL_TREE;
281
                    break;
282
                  case COMPONENT_REF:
283
                    if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
284
                      {
285
                        v = NULL_TREE;
286
                        break;
287
                      }
288
                    while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
289
                      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
290
                          != UNION_TYPE
291
                          && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
292
                          != QUAL_UNION_TYPE)
293
                        break;
294
                      else
295
                        v = TREE_OPERAND (v, 0);
296
                    if (TREE_CODE (v) == COMPONENT_REF
297
                        && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
298
                           == RECORD_TYPE)
299
                      {
300
                        tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
301
                        for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
302
                          if (TREE_CODE (fld_chain) == FIELD_DECL)
303
                            break;
304
 
305
                        if (fld_chain)
306
                          {
307
                            v = NULL_TREE;
308
                            break;
309
                          }
310
                        v = TREE_OPERAND (v, 0);
311
                      }
312
                    while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
313
                      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
314
                          != UNION_TYPE
315
                          && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
316
                          != QUAL_UNION_TYPE)
317
                        break;
318
                      else
319
                        v = TREE_OPERAND (v, 0);
320
                    if (v != pt_var)
321
                      v = NULL_TREE;
322
                    else
323
                      v = pt_var;
324
                    break;
325
                  default:
326
                    v = pt_var;
327
                    break;
328
                  }
329
              if (v == pt_var)
330
                var = pt_var;
331
            }
332
        }
333
      else
334
        var = pt_var;
335
 
336
      if (var != pt_var)
337
        var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
338
      else if (!pt_var_size)
339
        return unknown[object_size_type];
340
      else
341
        var_size = pt_var_size;
342
      bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
343
      if (bytes != error_mark_node)
344
        {
345
          if (TREE_CODE (bytes) == INTEGER_CST
346
              && tree_int_cst_lt (var_size, bytes))
347
            bytes = size_zero_node;
348
          else
349
            bytes = size_binop (MINUS_EXPR, var_size, bytes);
350
        }
351
      if (var != pt_var
352
          && pt_var_size
353
          && TREE_CODE (pt_var) == MEM_REF
354
          && bytes != error_mark_node)
355
        {
356
          tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
357
          if (bytes2 != error_mark_node)
358
            {
359
              if (TREE_CODE (bytes2) == INTEGER_CST
360
                  && tree_int_cst_lt (pt_var_size, bytes2))
361
                bytes2 = size_zero_node;
362
              else
363
                bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
364
              bytes = size_binop (MIN_EXPR, bytes, bytes2);
365
            }
366
        }
367
    }
368
  else if (!pt_var_size)
369
    return unknown[object_size_type];
370
  else
371
    bytes = pt_var_size;
372
 
373
  if (host_integerp (bytes, 1))
374
    return tree_low_cst (bytes, 1);
375
 
376
  return unknown[object_size_type];
377
}
378
 
379
 
380
/* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
381
   Handles various allocation calls.  OBJECT_SIZE_TYPE is the second
382
   argument from __builtin_object_size.  If unknown, return
383
   unknown[object_size_type].  */
384
 
385
static unsigned HOST_WIDE_INT
386
alloc_object_size (const_gimple call, int object_size_type)
387
{
388
  tree callee, bytes = NULL_TREE;
389
  tree alloc_size;
390
  int arg1 = -1, arg2 = -1;
391
 
392
  gcc_assert (is_gimple_call (call));
393
 
394
  callee = gimple_call_fndecl (call);
395
  if (!callee)
396
    return unknown[object_size_type];
397
 
398
  alloc_size = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (TREE_TYPE(callee)));
399
  if (alloc_size && TREE_VALUE (alloc_size))
400
    {
401
      tree p = TREE_VALUE (alloc_size);
402
 
403
      arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
404
      if (TREE_CHAIN (p))
405
        arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
406
    }
407
 
408
  if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
409
    switch (DECL_FUNCTION_CODE (callee))
410
      {
411
      case BUILT_IN_CALLOC:
412
        arg2 = 1;
413
        /* fall through */
414
      case BUILT_IN_MALLOC:
415
      case BUILT_IN_ALLOCA:
416
      case BUILT_IN_ALLOCA_WITH_ALIGN:
417
        arg1 = 0;
418
      default:
419
        break;
420
      }
421
 
422
  if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
423
      || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
424
      || (arg2 >= 0
425
          && (arg2 >= (int)gimple_call_num_args (call)
426
              || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
427
    return unknown[object_size_type];
428
 
429
  if (arg2 >= 0)
430
    bytes = size_binop (MULT_EXPR,
431
        fold_convert (sizetype, gimple_call_arg (call, arg1)),
432
        fold_convert (sizetype, gimple_call_arg (call, arg2)));
433
  else if (arg1 >= 0)
434
    bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
435
 
436
  if (bytes && host_integerp (bytes, 1))
437
    return tree_low_cst (bytes, 1);
438
 
439
  return unknown[object_size_type];
440
}
441
 
442
 
443
/* If object size is propagated from one of function's arguments directly
444
   to its return value, return that argument for GIMPLE_CALL statement CALL.
445
   Otherwise return NULL.  */
446
 
447
static tree
448
pass_through_call (const_gimple call)
449
{
450
  tree callee = gimple_call_fndecl (call);
451
 
452
  if (callee
453
      && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
454
    switch (DECL_FUNCTION_CODE (callee))
455
      {
456
      case BUILT_IN_MEMCPY:
457
      case BUILT_IN_MEMMOVE:
458
      case BUILT_IN_MEMSET:
459
      case BUILT_IN_STRCPY:
460
      case BUILT_IN_STRNCPY:
461
      case BUILT_IN_STRCAT:
462
      case BUILT_IN_STRNCAT:
463
      case BUILT_IN_MEMCPY_CHK:
464
      case BUILT_IN_MEMMOVE_CHK:
465
      case BUILT_IN_MEMSET_CHK:
466
      case BUILT_IN_STRCPY_CHK:
467
      case BUILT_IN_STRNCPY_CHK:
468
      case BUILT_IN_STPNCPY_CHK:
469
      case BUILT_IN_STRCAT_CHK:
470
      case BUILT_IN_STRNCAT_CHK:
471
      case BUILT_IN_ASSUME_ALIGNED:
472
        if (gimple_call_num_args (call) >= 1)
473
          return gimple_call_arg (call, 0);
474
        break;
475
      default:
476
        break;
477
      }
478
 
479
  return NULL_TREE;
480
}
481
 
482
 
483
/* Compute __builtin_object_size value for PTR.  OBJECT_SIZE_TYPE is the
484
   second argument from __builtin_object_size.  */
485
 
486
unsigned HOST_WIDE_INT
487
compute_builtin_object_size (tree ptr, int object_size_type)
488
{
489
  gcc_assert (object_size_type >= 0 && object_size_type <= 3);
490
 
491
  if (! offset_limit)
492
    init_offset_limit ();
493
 
494
  if (TREE_CODE (ptr) == ADDR_EXPR)
495
    return addr_object_size (NULL, ptr, object_size_type);
496
 
497
  if (TREE_CODE (ptr) == SSA_NAME
498
      && POINTER_TYPE_P (TREE_TYPE (ptr))
499
      && object_sizes[object_size_type] != NULL)
500
    {
501
      if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
502
        {
503
          struct object_size_info osi;
504
          bitmap_iterator bi;
505
          unsigned int i;
506
 
507
          if (dump_file)
508
            {
509
              fprintf (dump_file, "Computing %s %sobject size for ",
510
                       (object_size_type & 2) ? "minimum" : "maximum",
511
                       (object_size_type & 1) ? "sub" : "");
512
              print_generic_expr (dump_file, ptr, dump_flags);
513
              fprintf (dump_file, ":\n");
514
            }
515
 
516
          osi.visited = BITMAP_ALLOC (NULL);
517
          osi.reexamine = BITMAP_ALLOC (NULL);
518
          osi.object_size_type = object_size_type;
519
          osi.depths = NULL;
520
          osi.stack = NULL;
521
          osi.tos = NULL;
522
 
523
          /* First pass: walk UD chains, compute object sizes that
524
             can be computed.  osi.reexamine bitmap at the end will
525
             contain what variables were found in dependency cycles
526
             and therefore need to be reexamined.  */
527
          osi.pass = 0;
528
          osi.changed = false;
529
          collect_object_sizes_for (&osi, ptr);
530
 
531
          /* Second pass: keep recomputing object sizes of variables
532
             that need reexamination, until no object sizes are
533
             increased or all object sizes are computed.  */
534
          if (! bitmap_empty_p (osi.reexamine))
535
            {
536
              bitmap reexamine = BITMAP_ALLOC (NULL);
537
 
538
              /* If looking for minimum instead of maximum object size,
539
                 detect cases where a pointer is increased in a loop.
540
                 Although even without this detection pass 2 would eventually
541
                 terminate, it could take a long time.  If a pointer is
542
                 increasing this way, we need to assume 0 object size.
543
                 E.g. p = &buf[0]; while (cond) p = p + 4;  */
544
              if (object_size_type & 2)
545
                {
546
                  osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
547
                  osi.stack = XNEWVEC (unsigned int, num_ssa_names);
548
                  osi.tos = osi.stack;
549
                  osi.pass = 1;
550
                  /* collect_object_sizes_for is changing
551
                     osi.reexamine bitmap, so iterate over a copy.  */
552
                  bitmap_copy (reexamine, osi.reexamine);
553
                  EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
554
                    if (bitmap_bit_p (osi.reexamine, i))
555
                      check_for_plus_in_loops (&osi, ssa_name (i));
556
 
557
                  free (osi.depths);
558
                  osi.depths = NULL;
559
                  free (osi.stack);
560
                  osi.stack = NULL;
561
                  osi.tos = NULL;
562
                }
563
 
564
              do
565
                {
566
                  osi.pass = 2;
567
                  osi.changed = false;
568
                  /* collect_object_sizes_for is changing
569
                     osi.reexamine bitmap, so iterate over a copy.  */
570
                  bitmap_copy (reexamine, osi.reexamine);
571
                  EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
572
                    if (bitmap_bit_p (osi.reexamine, i))
573
                      {
574
                        collect_object_sizes_for (&osi, ssa_name (i));
575
                        if (dump_file && (dump_flags & TDF_DETAILS))
576
                          {
577
                            fprintf (dump_file, "Reexamining ");
578
                            print_generic_expr (dump_file, ssa_name (i),
579
                                                dump_flags);
580
                            fprintf (dump_file, "\n");
581
                          }
582
                      }
583
                }
584
              while (osi.changed);
585
 
586
              BITMAP_FREE (reexamine);
587
            }
588
          EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
589
            bitmap_set_bit (computed[object_size_type], i);
590
 
591
          /* Debugging dumps.  */
592
          if (dump_file)
593
            {
594
              EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
595
                if (object_sizes[object_size_type][i]
596
                    != unknown[object_size_type])
597
                  {
598
                    print_generic_expr (dump_file, ssa_name (i),
599
                                        dump_flags);
600
                    fprintf (dump_file,
601
                             ": %s %sobject size "
602
                             HOST_WIDE_INT_PRINT_UNSIGNED "\n",
603
                             (object_size_type & 2) ? "minimum" : "maximum",
604
                             (object_size_type & 1) ? "sub" : "",
605
                             object_sizes[object_size_type][i]);
606
                  }
607
            }
608
 
609
          BITMAP_FREE (osi.reexamine);
610
          BITMAP_FREE (osi.visited);
611
        }
612
 
613
      return object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
614
    }
615
 
616
  return unknown[object_size_type];
617
}
618
 
619
/* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME.  */
620
 
621
static void
622
expr_object_size (struct object_size_info *osi, tree ptr, tree value)
623
{
624
  int object_size_type = osi->object_size_type;
625
  unsigned int varno = SSA_NAME_VERSION (ptr);
626
  unsigned HOST_WIDE_INT bytes;
627
 
628
  gcc_assert (object_sizes[object_size_type][varno]
629
              != unknown[object_size_type]);
630
  gcc_assert (osi->pass == 0);
631
 
632
  if (TREE_CODE (value) == WITH_SIZE_EXPR)
633
    value = TREE_OPERAND (value, 0);
634
 
635
  /* Pointer variables should have been handled by merge_object_sizes.  */
636
  gcc_assert (TREE_CODE (value) != SSA_NAME
637
              || !POINTER_TYPE_P (TREE_TYPE (value)));
638
 
639
  if (TREE_CODE (value) == ADDR_EXPR)
640
    bytes = addr_object_size (osi, value, object_size_type);
641
  else
642
    bytes = unknown[object_size_type];
643
 
644
  if ((object_size_type & 2) == 0)
645
    {
646
      if (object_sizes[object_size_type][varno] < bytes)
647
        object_sizes[object_size_type][varno] = bytes;
648
    }
649
  else
650
    {
651
      if (object_sizes[object_size_type][varno] > bytes)
652
        object_sizes[object_size_type][varno] = bytes;
653
    }
654
}
655
 
656
 
657
/* Compute object_sizes for PTR, defined to the result of a call.  */
658
 
659
static void
660
call_object_size (struct object_size_info *osi, tree ptr, gimple call)
661
{
662
  int object_size_type = osi->object_size_type;
663
  unsigned int varno = SSA_NAME_VERSION (ptr);
664
  unsigned HOST_WIDE_INT bytes;
665
 
666
  gcc_assert (is_gimple_call (call));
667
 
668
  gcc_assert (object_sizes[object_size_type][varno]
669
              != unknown[object_size_type]);
670
  gcc_assert (osi->pass == 0);
671
 
672
  bytes = alloc_object_size (call, object_size_type);
673
 
674
  if ((object_size_type & 2) == 0)
675
    {
676
      if (object_sizes[object_size_type][varno] < bytes)
677
        object_sizes[object_size_type][varno] = bytes;
678
    }
679
  else
680
    {
681
      if (object_sizes[object_size_type][varno] > bytes)
682
        object_sizes[object_size_type][varno] = bytes;
683
    }
684
}
685
 
686
 
687
/* Compute object_sizes for PTR, defined to an unknown value.  */
688
 
689
static void
690
unknown_object_size (struct object_size_info *osi, tree ptr)
691
{
692
  int object_size_type = osi->object_size_type;
693
  unsigned int varno = SSA_NAME_VERSION (ptr);
694
  unsigned HOST_WIDE_INT bytes;
695
 
696
  gcc_assert (object_sizes[object_size_type][varno]
697
              != unknown[object_size_type]);
698
  gcc_assert (osi->pass == 0);
699
 
700
  bytes = unknown[object_size_type];
701
 
702
  if ((object_size_type & 2) == 0)
703
    {
704
      if (object_sizes[object_size_type][varno] < bytes)
705
        object_sizes[object_size_type][varno] = bytes;
706
    }
707
  else
708
    {
709
      if (object_sizes[object_size_type][varno] > bytes)
710
        object_sizes[object_size_type][varno] = bytes;
711
    }
712
}
713
 
714
 
715
/* Merge object sizes of ORIG + OFFSET into DEST.  Return true if
716
   the object size might need reexamination later.  */
717
 
718
static bool
719
merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
720
                    unsigned HOST_WIDE_INT offset)
721
{
722
  int object_size_type = osi->object_size_type;
723
  unsigned int varno = SSA_NAME_VERSION (dest);
724
  unsigned HOST_WIDE_INT orig_bytes;
725
 
726
  if (object_sizes[object_size_type][varno] == unknown[object_size_type])
727
    return false;
728
  if (offset >= offset_limit)
729
    {
730
      object_sizes[object_size_type][varno] = unknown[object_size_type];
731
      return false;
732
    }
733
 
734
  if (osi->pass == 0)
735
    collect_object_sizes_for (osi, orig);
736
 
737
  orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
738
  if (orig_bytes != unknown[object_size_type])
739
    orig_bytes = (offset > orig_bytes)
740
                 ? (unsigned HOST_WIDE_INT) 0 : orig_bytes - offset;
741
 
742
  if ((object_size_type & 2) == 0)
743
    {
744
      if (object_sizes[object_size_type][varno] < orig_bytes)
745
        {
746
          object_sizes[object_size_type][varno] = orig_bytes;
747
          osi->changed = true;
748
        }
749
    }
750
  else
751
    {
752
      if (object_sizes[object_size_type][varno] > orig_bytes)
753
        {
754
          object_sizes[object_size_type][varno] = orig_bytes;
755
          osi->changed = true;
756
        }
757
    }
758
  return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
759
}
760
 
761
 
762
/* Compute object_sizes for VAR, defined to the result of an assignment
763
   with operator POINTER_PLUS_EXPR.  Return true if the object size might
764
   need reexamination  later.  */
765
 
766
static bool
767
plus_stmt_object_size (struct object_size_info *osi, tree var, gimple stmt)
768
{
769
  int object_size_type = osi->object_size_type;
770
  unsigned int varno = SSA_NAME_VERSION (var);
771
  unsigned HOST_WIDE_INT bytes;
772
  tree op0, op1;
773
 
774
  if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
775
    {
776
      op0 = gimple_assign_rhs1 (stmt);
777
      op1 = gimple_assign_rhs2 (stmt);
778
    }
779
  else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
780
    {
781
      tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
782
      gcc_assert (TREE_CODE (rhs) == MEM_REF);
783
      op0 = TREE_OPERAND (rhs, 0);
784
      op1 = TREE_OPERAND (rhs, 1);
785
    }
786
  else
787
    gcc_unreachable ();
788
 
789
  if (object_sizes[object_size_type][varno] == unknown[object_size_type])
790
    return false;
791
 
792
  /* Handle PTR + OFFSET here.  */
793
  if (TREE_CODE (op1) == INTEGER_CST
794
      && (TREE_CODE (op0) == SSA_NAME
795
          || TREE_CODE (op0) == ADDR_EXPR))
796
    {
797
      if (! host_integerp (op1, 1))
798
        bytes = unknown[object_size_type];
799
      else if (TREE_CODE (op0) == SSA_NAME)
800
        return merge_object_sizes (osi, var, op0, tree_low_cst (op1, 1));
801
      else
802
        {
803
          unsigned HOST_WIDE_INT off = tree_low_cst (op1, 1);
804
 
805
          /* op0 will be ADDR_EXPR here.  */
806
          bytes = addr_object_size (osi, op0, object_size_type);
807
          if (bytes == unknown[object_size_type])
808
            ;
809
          else if (off > offset_limit)
810
            bytes = unknown[object_size_type];
811
          else if (off > bytes)
812
            bytes = 0;
813
          else
814
            bytes -= off;
815
        }
816
    }
817
  else
818
    bytes = unknown[object_size_type];
819
 
820
  if ((object_size_type & 2) == 0)
821
    {
822
      if (object_sizes[object_size_type][varno] < bytes)
823
        object_sizes[object_size_type][varno] = bytes;
824
    }
825
  else
826
    {
827
      if (object_sizes[object_size_type][varno] > bytes)
828
        object_sizes[object_size_type][varno] = bytes;
829
    }
830
  return false;
831
}
832
 
833
 
834
/* Compute object_sizes for VAR, defined at STMT, which is
835
   a COND_EXPR.  Return true if the object size might need reexamination
836
   later.  */
837
 
838
static bool
839
cond_expr_object_size (struct object_size_info *osi, tree var, gimple stmt)
840
{
841
  tree then_, else_;
842
  int object_size_type = osi->object_size_type;
843
  unsigned int varno = SSA_NAME_VERSION (var);
844
  bool reexamine = false;
845
 
846
  gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
847
 
848
  if (object_sizes[object_size_type][varno] == unknown[object_size_type])
849
    return false;
850
 
851
  then_ = gimple_assign_rhs2 (stmt);
852
  else_ = gimple_assign_rhs3 (stmt);
853
 
854
  if (TREE_CODE (then_) == SSA_NAME)
855
    reexamine |= merge_object_sizes (osi, var, then_, 0);
856
  else
857
    expr_object_size (osi, var, then_);
858
 
859
  if (TREE_CODE (else_) == SSA_NAME)
860
    reexamine |= merge_object_sizes (osi, var, else_, 0);
861
  else
862
    expr_object_size (osi, var, else_);
863
 
864
  return reexamine;
865
}
866
 
867
/* Compute object sizes for VAR.
868
   For ADDR_EXPR an object size is the number of remaining bytes
869
   to the end of the object (where what is considered an object depends on
870
   OSI->object_size_type).
871
   For allocation GIMPLE_CALL like malloc or calloc object size is the size
872
   of the allocation.
873
   For POINTER_PLUS_EXPR where second operand is a constant integer,
874
   object size is object size of the first operand minus the constant.
875
   If the constant is bigger than the number of remaining bytes until the
876
   end of the object, object size is 0, but if it is instead a pointer
877
   subtraction, object size is unknown[object_size_type].
878
   To differentiate addition from subtraction, ADDR_EXPR returns
879
   unknown[object_size_type] for all objects bigger than half of the address
880
   space, and constants less than half of the address space are considered
881
   addition, while bigger constants subtraction.
882
   For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
883
   object size is object size of that argument.
884
   Otherwise, object size is the maximum of object sizes of variables
885
   that it might be set to.  */
886
 
887
static void
888
collect_object_sizes_for (struct object_size_info *osi, tree var)
889
{
890
  int object_size_type = osi->object_size_type;
891
  unsigned int varno = SSA_NAME_VERSION (var);
892
  gimple stmt;
893
  bool reexamine;
894
 
895
  if (bitmap_bit_p (computed[object_size_type], varno))
896
    return;
897
 
898
  if (osi->pass == 0)
899
    {
900
      if (bitmap_set_bit (osi->visited, varno))
901
        {
902
          object_sizes[object_size_type][varno]
903
            = (object_size_type & 2) ? -1 : 0;
904
        }
905
      else
906
        {
907
          /* Found a dependency loop.  Mark the variable for later
908
             re-examination.  */
909
          bitmap_set_bit (osi->reexamine, varno);
910
          if (dump_file && (dump_flags & TDF_DETAILS))
911
            {
912
              fprintf (dump_file, "Found a dependency loop at ");
913
              print_generic_expr (dump_file, var, dump_flags);
914
              fprintf (dump_file, "\n");
915
            }
916
          return;
917
        }
918
    }
919
 
920
  if (dump_file && (dump_flags & TDF_DETAILS))
921
    {
922
      fprintf (dump_file, "Visiting use-def links for ");
923
      print_generic_expr (dump_file, var, dump_flags);
924
      fprintf (dump_file, "\n");
925
    }
926
 
927
  stmt = SSA_NAME_DEF_STMT (var);
928
  reexamine = false;
929
 
930
  switch (gimple_code (stmt))
931
    {
932
    case GIMPLE_ASSIGN:
933
      {
934
        tree rhs = gimple_assign_rhs1 (stmt);
935
        if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
936
            || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
937
                && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
938
          reexamine = plus_stmt_object_size (osi, var, stmt);
939
        else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
940
          reexamine = cond_expr_object_size (osi, var, stmt);
941
        else if (gimple_assign_single_p (stmt)
942
                 || gimple_assign_unary_nop_p (stmt))
943
          {
944
            if (TREE_CODE (rhs) == SSA_NAME
945
                && POINTER_TYPE_P (TREE_TYPE (rhs)))
946
              reexamine = merge_object_sizes (osi, var, rhs, 0);
947
            else
948
              expr_object_size (osi, var, rhs);
949
          }
950
        else
951
          unknown_object_size (osi, var);
952
        break;
953
      }
954
 
955
    case GIMPLE_CALL:
956
      {
957
        tree arg = pass_through_call (stmt);
958
        if (arg)
959
          {
960
            if (TREE_CODE (arg) == SSA_NAME
961
                && POINTER_TYPE_P (TREE_TYPE (arg)))
962
              reexamine = merge_object_sizes (osi, var, arg, 0);
963
            else
964
              expr_object_size (osi, var, arg);
965
          }
966
        else
967
          call_object_size (osi, var, stmt);
968
        break;
969
      }
970
 
971
    case GIMPLE_ASM:
972
      /* Pointers defined by __asm__ statements can point anywhere.  */
973
      object_sizes[object_size_type][varno] = unknown[object_size_type];
974
      break;
975
 
976
    case GIMPLE_NOP:
977
      {
978
        tree decl = SSA_NAME_VAR (var);
979
 
980
        if (TREE_CODE (decl) != PARM_DECL && DECL_INITIAL (decl))
981
          expr_object_size (osi, var, DECL_INITIAL (decl));
982
        else
983
          expr_object_size (osi, var, decl);
984
      }
985
      break;
986
 
987
    case GIMPLE_PHI:
988
      {
989
        unsigned i;
990
 
991
        for (i = 0; i < gimple_phi_num_args (stmt); i++)
992
          {
993
            tree rhs = gimple_phi_arg (stmt, i)->def;
994
 
995
            if (object_sizes[object_size_type][varno]
996
                == unknown[object_size_type])
997
              break;
998
 
999
            if (TREE_CODE (rhs) == SSA_NAME)
1000
              reexamine |= merge_object_sizes (osi, var, rhs, 0);
1001
            else if (osi->pass == 0)
1002
              expr_object_size (osi, var, rhs);
1003
          }
1004
        break;
1005
      }
1006
 
1007
    default:
1008
      gcc_unreachable ();
1009
    }
1010
 
1011
  if (! reexamine
1012
      || object_sizes[object_size_type][varno] == unknown[object_size_type])
1013
    {
1014
      bitmap_set_bit (computed[object_size_type], varno);
1015
      bitmap_clear_bit (osi->reexamine, varno);
1016
    }
1017
  else
1018
    {
1019
      bitmap_set_bit (osi->reexamine, varno);
1020
      if (dump_file && (dump_flags & TDF_DETAILS))
1021
        {
1022
          fprintf (dump_file, "Need to reexamine ");
1023
          print_generic_expr (dump_file, var, dump_flags);
1024
          fprintf (dump_file, "\n");
1025
        }
1026
    }
1027
}
1028
 
1029
 
1030
/* Helper function for check_for_plus_in_loops.  Called recursively
1031
   to detect loops.  */
1032
 
1033
static void
1034
check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1035
                           unsigned int depth)
1036
{
1037
  gimple stmt = SSA_NAME_DEF_STMT (var);
1038
  unsigned int varno = SSA_NAME_VERSION (var);
1039
 
1040
  if (osi->depths[varno])
1041
    {
1042
      if (osi->depths[varno] != depth)
1043
        {
1044
          unsigned int *sp;
1045
 
1046
          /* Found a loop involving pointer addition.  */
1047
          for (sp = osi->tos; sp > osi->stack; )
1048
            {
1049
              --sp;
1050
              bitmap_clear_bit (osi->reexamine, *sp);
1051
              bitmap_set_bit (computed[osi->object_size_type], *sp);
1052
              object_sizes[osi->object_size_type][*sp] = 0;
1053
              if (*sp == varno)
1054
                break;
1055
            }
1056
        }
1057
      return;
1058
    }
1059
  else if (! bitmap_bit_p (osi->reexamine, varno))
1060
    return;
1061
 
1062
  osi->depths[varno] = depth;
1063
  *osi->tos++ = varno;
1064
 
1065
  switch (gimple_code (stmt))
1066
    {
1067
 
1068
    case GIMPLE_ASSIGN:
1069
      {
1070
        if ((gimple_assign_single_p (stmt)
1071
             || gimple_assign_unary_nop_p (stmt))
1072
            && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1073
          {
1074
            tree rhs = gimple_assign_rhs1 (stmt);
1075
 
1076
            check_for_plus_in_loops_1 (osi, rhs, depth);
1077
          }
1078
        else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1079
          {
1080
            tree basevar = gimple_assign_rhs1 (stmt);
1081
            tree cst = gimple_assign_rhs2 (stmt);
1082
 
1083
            gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1084
 
1085
            check_for_plus_in_loops_1 (osi, basevar,
1086
                                       depth + !integer_zerop (cst));
1087
          }
1088
        else
1089
          gcc_unreachable ();
1090
        break;
1091
      }
1092
 
1093
    case GIMPLE_CALL:
1094
      {
1095
        tree arg = pass_through_call (stmt);
1096
        if (arg)
1097
          {
1098
            if (TREE_CODE (arg) == SSA_NAME)
1099
              check_for_plus_in_loops_1 (osi, arg, depth);
1100
            else
1101
              gcc_unreachable ();
1102
          }
1103
        break;
1104
      }
1105
 
1106
    case GIMPLE_PHI:
1107
      {
1108
        unsigned i;
1109
 
1110
        for (i = 0; i < gimple_phi_num_args (stmt); i++)
1111
          {
1112
            tree rhs = gimple_phi_arg (stmt, i)->def;
1113
 
1114
            if (TREE_CODE (rhs) == SSA_NAME)
1115
              check_for_plus_in_loops_1 (osi, rhs, depth);
1116
          }
1117
        break;
1118
      }
1119
 
1120
    default:
1121
      gcc_unreachable ();
1122
    }
1123
 
1124
  osi->depths[varno] = 0;
1125
  osi->tos--;
1126
}
1127
 
1128
 
1129
/* Check if some pointer we are computing object size of is being increased
1130
   within a loop.  If yes, assume all the SSA variables participating in
1131
   that loop have minimum object sizes 0.  */
1132
 
1133
static void
1134
check_for_plus_in_loops (struct object_size_info *osi, tree var)
1135
{
1136
  gimple stmt = SSA_NAME_DEF_STMT (var);
1137
 
1138
  /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1139
     and looked for a POINTER_PLUS_EXPR in the pass-through
1140
     argument, if any.  In GIMPLE, however, such an expression
1141
     is not a valid call operand.  */
1142
 
1143
  if (is_gimple_assign (stmt)
1144
      && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1145
    {
1146
      tree basevar = gimple_assign_rhs1 (stmt);
1147
      tree cst = gimple_assign_rhs2 (stmt);
1148
 
1149
      gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1150
 
1151
      if (integer_zerop (cst))
1152
        return;
1153
 
1154
      osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1155
      *osi->tos++ = SSA_NAME_VERSION (basevar);
1156
      check_for_plus_in_loops_1 (osi, var, 2);
1157
      osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1158
      osi->tos--;
1159
    }
1160
}
1161
 
1162
 
1163
/* Initialize data structures for the object size computation.  */
1164
 
1165
void
1166
init_object_sizes (void)
1167
{
1168
  int object_size_type;
1169
 
1170
  if (object_sizes[0])
1171
    return;
1172
 
1173
  for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1174
    {
1175
      object_sizes[object_size_type] = XNEWVEC (unsigned HOST_WIDE_INT, num_ssa_names);
1176
      computed[object_size_type] = BITMAP_ALLOC (NULL);
1177
    }
1178
 
1179
  init_offset_limit ();
1180
}
1181
 
1182
 
1183
/* Destroy data structures after the object size computation.  */
1184
 
1185
void
1186
fini_object_sizes (void)
1187
{
1188
  int object_size_type;
1189
 
1190
  for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1191
    {
1192
      free (object_sizes[object_size_type]);
1193
      BITMAP_FREE (computed[object_size_type]);
1194
      object_sizes[object_size_type] = NULL;
1195
    }
1196
}
1197
 
1198
 
1199
/* Simple pass to optimize all __builtin_object_size () builtins.  */
1200
 
1201
static unsigned int
1202
compute_object_sizes (void)
1203
{
1204
  basic_block bb;
1205
  FOR_EACH_BB (bb)
1206
    {
1207
      gimple_stmt_iterator i;
1208
      for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1209
        {
1210
          tree callee, result;
1211
          gimple call = gsi_stmt (i);
1212
 
1213
          if (gimple_code (call) != GIMPLE_CALL)
1214
            continue;
1215
 
1216
          callee = gimple_call_fndecl (call);
1217
          if (!callee
1218
              || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
1219
              || DECL_FUNCTION_CODE (callee) != BUILT_IN_OBJECT_SIZE)
1220
            continue;
1221
 
1222
          init_object_sizes ();
1223
          result = fold_call_stmt (call, false);
1224
          if (!result)
1225
            {
1226
              if (gimple_call_num_args (call) == 2
1227
                  && POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
1228
                {
1229
                  tree ost = gimple_call_arg (call, 1);
1230
 
1231
                  if (host_integerp (ost, 1))
1232
                    {
1233
                      unsigned HOST_WIDE_INT object_size_type
1234
                        = tree_low_cst (ost, 1);
1235
 
1236
                      if (object_size_type < 2)
1237
                        result = fold_convert (size_type_node,
1238
                                               integer_minus_one_node);
1239
                      else if (object_size_type < 4)
1240
                        result = build_zero_cst (size_type_node);
1241
                    }
1242
                }
1243
 
1244
              if (!result)
1245
                continue;
1246
            }
1247
 
1248
          if (dump_file && (dump_flags & TDF_DETAILS))
1249
            {
1250
              fprintf (dump_file, "Simplified\n  ");
1251
              print_gimple_stmt (dump_file, call, 0, dump_flags);
1252
            }
1253
 
1254
          if (!update_call_from_tree (&i, result))
1255
            gcc_unreachable ();
1256
 
1257
          if (dump_file && (dump_flags & TDF_DETAILS))
1258
            {
1259
              fprintf (dump_file, "to\n  ");
1260
              print_gimple_stmt (dump_file, gsi_stmt (i), 0, dump_flags);
1261
              fprintf (dump_file, "\n");
1262
            }
1263
        }
1264
    }
1265
 
1266
  fini_object_sizes ();
1267
  return 0;
1268
}
1269
 
1270
struct gimple_opt_pass pass_object_sizes =
1271
{
1272
 {
1273
  GIMPLE_PASS,
1274
  "objsz",                              /* name */
1275
  NULL,                                 /* gate */
1276
  compute_object_sizes,                 /* execute */
1277
  NULL,                                 /* sub */
1278
  NULL,                                 /* next */
1279
  0,                                     /* static_pass_number */
1280
  TV_NONE,                              /* tv_id */
1281
  PROP_cfg | PROP_ssa,                  /* properties_required */
1282
  0,                                     /* properties_provided */
1283
  0,                                     /* properties_destroyed */
1284
  0,                                     /* todo_flags_start */
1285
  TODO_verify_ssa                       /* todo_flags_finish */
1286
 }
1287
};

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.