OpenCores
URL https://opencores.org/ocsvn/scarts/scarts/trunk

Subversion Repositories scarts

[/] [scarts/] [trunk/] [toolchain/] [scarts-gcc/] [gcc-4.1.1/] [gcc/] [profile.c] - Blame information for rev 20

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 12 jlechner
/* Calculate branch probabilities, and basic block execution counts.
2
   Copyright (C) 1990, 1991, 1992, 1993, 1994, 1996, 1997, 1998, 1999,
3
   2000, 2001, 2002, 2003, 2004, 2005  Free Software Foundation, Inc.
4
   Contributed by James E. Wilson, UC Berkeley/Cygnus Support;
5
   based on some ideas from Dain Samples of UC Berkeley.
6
   Further mangling by Bob Manson, Cygnus Support.
7
 
8
This file is part of GCC.
9
 
10
GCC is free software; you can redistribute it and/or modify it under
11
the terms of the GNU General Public License as published by the Free
12
Software Foundation; either version 2, or (at your option) any later
13
version.
14
 
15
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16
WARRANTY; without even the implied warranty of MERCHANTABILITY or
17
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
18
for more details.
19
 
20
You should have received a copy of the GNU General Public License
21
along with GCC; see the file COPYING.  If not, write to the Free
22
Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
23
02110-1301, USA.  */
24
 
25
/* Generate basic block profile instrumentation and auxiliary files.
26
   Profile generation is optimized, so that not all arcs in the basic
27
   block graph need instrumenting. First, the BB graph is closed with
28
   one entry (function start), and one exit (function exit).  Any
29
   ABNORMAL_EDGE cannot be instrumented (because there is no control
30
   path to place the code). We close the graph by inserting fake
31
   EDGE_FAKE edges to the EXIT_BLOCK, from the sources of abnormal
32
   edges that do not go to the exit_block. We ignore such abnormal
33
   edges.  Naturally these fake edges are never directly traversed,
34
   and so *cannot* be directly instrumented.  Some other graph
35
   massaging is done. To optimize the instrumentation we generate the
36
   BB minimal span tree, only edges that are not on the span tree
37
   (plus the entry point) need instrumenting. From that information
38
   all other edge counts can be deduced.  By construction all fake
39
   edges must be on the spanning tree. We also attempt to place
40
   EDGE_CRITICAL edges on the spanning tree.
41
 
42
   The auxiliary files generated are <dumpbase>.gcno (at compile time)
43
   and <dumpbase>.gcda (at run time).  The format is
44
   described in full in gcov-io.h.  */
45
 
46
/* ??? Register allocation should use basic block execution counts to
47
   give preference to the most commonly executed blocks.  */
48
 
49
/* ??? Should calculate branch probabilities before instrumenting code, since
50
   then we can use arc counts to help decide which arcs to instrument.  */
51
 
52
#include "config.h"
53
#include "system.h"
54
#include "coretypes.h"
55
#include "tm.h"
56
#include "rtl.h"
57
#include "flags.h"
58
#include "output.h"
59
#include "regs.h"
60
#include "expr.h"
61
#include "function.h"
62
#include "toplev.h"
63
#include "coverage.h"
64
#include "value-prof.h"
65
#include "tree.h"
66
#include "cfghooks.h"
67
#include "tree-flow.h"
68
#include "timevar.h"
69
#include "cfgloop.h"
70
#include "tree-pass.h"
71
 
72
/* Hooks for profiling.  */
73
static struct profile_hooks* profile_hooks;
74
 
75
/* File for profiling debug output.  */
76
static inline FILE*
77
profile_dump_file (void) {
78
  return profile_hooks->profile_dump_file ();
79
}
80
 
81
/* Additional information about the edges we need.  */
82
struct edge_info {
83
  unsigned int count_valid : 1;
84
 
85
  /* Is on the spanning tree.  */
86
  unsigned int on_tree : 1;
87
 
88
  /* Pretend this edge does not exist (it is abnormal and we've
89
     inserted a fake to compensate).  */
90
  unsigned int ignore : 1;
91
};
92
 
93
struct bb_info {
94
  unsigned int count_valid : 1;
95
 
96
  /* Number of successor and predecessor edges.  */
97
  gcov_type succ_count;
98
  gcov_type pred_count;
99
};
100
 
101
#define EDGE_INFO(e)  ((struct edge_info *) (e)->aux)
102
#define BB_INFO(b)  ((struct bb_info *) (b)->aux)
103
 
104
/* Counter summary from the last set of coverage counts read.  */
105
 
106
const struct gcov_ctr_summary *profile_info;
107
 
108
/* Collect statistics on the performance of this pass for the entire source
109
   file.  */
110
 
111
static int total_num_blocks;
112
static int total_num_edges;
113
static int total_num_edges_ignored;
114
static int total_num_edges_instrumented;
115
static int total_num_blocks_created;
116
static int total_num_passes;
117
static int total_num_times_called;
118
static int total_hist_br_prob[20];
119
static int total_num_never_executed;
120
static int total_num_branches;
121
 
122
/* Forward declarations.  */
123
static void find_spanning_tree (struct edge_list *);
124
static unsigned instrument_edges (struct edge_list *);
125
static void instrument_values (histogram_values);
126
static void compute_branch_probabilities (void);
127
static void compute_value_histograms (histogram_values);
128
static gcov_type * get_exec_counts (void);
129
static basic_block find_group (basic_block);
130
static void union_groups (basic_block, basic_block);
131
 
132
 
133
/* Add edge instrumentation code to the entire insn chain.
134
 
135
   F is the first insn of the chain.
136
   NUM_BLOCKS is the number of basic blocks found in F.  */
137
 
138
static unsigned
139
instrument_edges (struct edge_list *el)
140
{
141
  unsigned num_instr_edges = 0;
142
  int num_edges = NUM_EDGES (el);
143
  basic_block bb;
144
 
145
  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
146
    {
147
      edge e;
148
      edge_iterator ei;
149
 
150
      FOR_EACH_EDGE (e, ei, bb->succs)
151
        {
152
          struct edge_info *inf = EDGE_INFO (e);
153
 
154
          if (!inf->ignore && !inf->on_tree)
155
            {
156
              gcc_assert (!(e->flags & EDGE_ABNORMAL));
157
              if (dump_file)
158
                fprintf (dump_file, "Edge %d to %d instrumented%s\n",
159
                         e->src->index, e->dest->index,
160
                         EDGE_CRITICAL_P (e) ? " (and split)" : "");
161
              (profile_hooks->gen_edge_profiler) (num_instr_edges++, e);
162
            }
163
        }
164
    }
165
 
166
  total_num_blocks_created += num_edges;
167
  if (dump_file)
168
    fprintf (dump_file, "%d edges instrumented\n", num_instr_edges);
169
  return num_instr_edges;
170
}
171
 
172
/* Add code to measure histograms for values in list VALUES.  */
173
static void
174
instrument_values (histogram_values values)
175
{
176
  unsigned i, t;
177
 
178
  /* Emit code to generate the histograms before the insns.  */
179
 
180
  for (i = 0; i < VEC_length (histogram_value, values); i++)
181
    {
182
      histogram_value hist = VEC_index (histogram_value, values, i);
183
      switch (hist->type)
184
        {
185
        case HIST_TYPE_INTERVAL:
186
          t = GCOV_COUNTER_V_INTERVAL;
187
          break;
188
 
189
        case HIST_TYPE_POW2:
190
          t = GCOV_COUNTER_V_POW2;
191
          break;
192
 
193
        case HIST_TYPE_SINGLE_VALUE:
194
          t = GCOV_COUNTER_V_SINGLE;
195
          break;
196
 
197
        case HIST_TYPE_CONST_DELTA:
198
          t = GCOV_COUNTER_V_DELTA;
199
          break;
200
 
201
        default:
202
          gcc_unreachable ();
203
        }
204
      if (!coverage_counter_alloc (t, hist->n_counters))
205
        continue;
206
 
207
      switch (hist->type)
208
        {
209
        case HIST_TYPE_INTERVAL:
210
          (profile_hooks->gen_interval_profiler) (hist, t, 0);
211
          break;
212
 
213
        case HIST_TYPE_POW2:
214
          (profile_hooks->gen_pow2_profiler) (hist, t, 0);
215
          break;
216
 
217
        case HIST_TYPE_SINGLE_VALUE:
218
          (profile_hooks->gen_one_value_profiler) (hist, t, 0);
219
          break;
220
 
221
        case HIST_TYPE_CONST_DELTA:
222
          (profile_hooks->gen_const_delta_profiler) (hist, t, 0);
223
          break;
224
 
225
        default:
226
          gcc_unreachable ();
227
        }
228
    }
229
  VEC_free (histogram_value, heap, values);
230
}
231
 
232
 
233
/* Computes hybrid profile for all matching entries in da_file.  */
234
 
235
static gcov_type *
236
get_exec_counts (void)
237
{
238
  unsigned num_edges = 0;
239
  basic_block bb;
240
  gcov_type *counts;
241
 
242
  /* Count the edges to be (possibly) instrumented.  */
243
  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
244
    {
245
      edge e;
246
      edge_iterator ei;
247
 
248
      FOR_EACH_EDGE (e, ei, bb->succs)
249
        if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
250
          num_edges++;
251
    }
252
 
253
  counts = get_coverage_counts (GCOV_COUNTER_ARCS, num_edges, &profile_info);
254
  if (!counts)
255
    return NULL;
256
 
257
  if (dump_file && profile_info)
258
    fprintf(dump_file, "Merged %u profiles with maximal count %u.\n",
259
            profile_info->runs, (unsigned) profile_info->sum_max);
260
 
261
  return counts;
262
}
263
 
264
 
265
/* Compute the branch probabilities for the various branches.
266
   Annotate them accordingly.  */
267
 
268
static void
269
compute_branch_probabilities (void)
270
{
271
  basic_block bb;
272
  int i;
273
  int num_edges = 0;
274
  int changes;
275
  int passes;
276
  int hist_br_prob[20];
277
  int num_never_executed;
278
  int num_branches;
279
  gcov_type *exec_counts = get_exec_counts ();
280
  int exec_counts_pos = 0;
281
 
282
  /* Very simple sanity checks so we catch bugs in our profiling code.  */
283
  if (profile_info)
284
    {
285
      if (profile_info->run_max * profile_info->runs < profile_info->sum_max)
286
        {
287
          error ("corrupted profile info: run_max * runs < sum_max");
288
          exec_counts = NULL;
289
        }
290
 
291
      if (profile_info->sum_all < profile_info->sum_max)
292
        {
293
          error ("corrupted profile info: sum_all is smaller than sum_max");
294
          exec_counts = NULL;
295
        }
296
    }
297
 
298
  /* Attach extra info block to each bb.  */
299
 
300
  alloc_aux_for_blocks (sizeof (struct bb_info));
301
  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
302
    {
303
      edge e;
304
      edge_iterator ei;
305
 
306
      FOR_EACH_EDGE (e, ei, bb->succs)
307
        if (!EDGE_INFO (e)->ignore)
308
          BB_INFO (bb)->succ_count++;
309
      FOR_EACH_EDGE (e, ei, bb->preds)
310
        if (!EDGE_INFO (e)->ignore)
311
          BB_INFO (bb)->pred_count++;
312
    }
313
 
314
  /* Avoid predicting entry on exit nodes.  */
315
  BB_INFO (EXIT_BLOCK_PTR)->succ_count = 2;
316
  BB_INFO (ENTRY_BLOCK_PTR)->pred_count = 2;
317
 
318
  /* For each edge not on the spanning tree, set its execution count from
319
     the .da file.  */
320
 
321
  /* The first count in the .da file is the number of times that the function
322
     was entered.  This is the exec_count for block zero.  */
323
 
324
  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
325
    {
326
      edge e;
327
      edge_iterator ei;
328
 
329
      FOR_EACH_EDGE (e, ei, bb->succs)
330
        if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
331
          {
332
            num_edges++;
333
            if (exec_counts)
334
              {
335
                e->count = exec_counts[exec_counts_pos++];
336
                if (e->count > profile_info->sum_max)
337
                  {
338
                    error ("corrupted profile info: edge from %i to %i exceeds maximal count",
339
                           bb->index, e->dest->index);
340
                  }
341
              }
342
            else
343
              e->count = 0;
344
 
345
            EDGE_INFO (e)->count_valid = 1;
346
            BB_INFO (bb)->succ_count--;
347
            BB_INFO (e->dest)->pred_count--;
348
            if (dump_file)
349
              {
350
                fprintf (dump_file, "\nRead edge from %i to %i, count:",
351
                         bb->index, e->dest->index);
352
                fprintf (dump_file, HOST_WIDEST_INT_PRINT_DEC,
353
                         (HOST_WIDEST_INT) e->count);
354
              }
355
          }
356
    }
357
 
358
  if (dump_file)
359
    fprintf (dump_file, "\n%d edge counts read\n", num_edges);
360
 
361
  /* For every block in the file,
362
     - if every exit/entrance edge has a known count, then set the block count
363
     - if the block count is known, and every exit/entrance edge but one has
364
     a known execution count, then set the count of the remaining edge
365
 
366
     As edge counts are set, decrement the succ/pred count, but don't delete
367
     the edge, that way we can easily tell when all edges are known, or only
368
     one edge is unknown.  */
369
 
370
  /* The order that the basic blocks are iterated through is important.
371
     Since the code that finds spanning trees starts with block 0, low numbered
372
     edges are put on the spanning tree in preference to high numbered edges.
373
     Hence, most instrumented edges are at the end.  Graph solving works much
374
     faster if we propagate numbers from the end to the start.
375
 
376
     This takes an average of slightly more than 3 passes.  */
377
 
378
  changes = 1;
379
  passes = 0;
380
  while (changes)
381
    {
382
      passes++;
383
      changes = 0;
384
      FOR_BB_BETWEEN (bb, EXIT_BLOCK_PTR, NULL, prev_bb)
385
        {
386
          struct bb_info *bi = BB_INFO (bb);
387
          if (! bi->count_valid)
388
            {
389
              if (bi->succ_count == 0)
390
                {
391
                  edge e;
392
                  edge_iterator ei;
393
                  gcov_type total = 0;
394
 
395
                  FOR_EACH_EDGE (e, ei, bb->succs)
396
                    total += e->count;
397
                  bb->count = total;
398
                  bi->count_valid = 1;
399
                  changes = 1;
400
                }
401
              else if (bi->pred_count == 0)
402
                {
403
                  edge e;
404
                  edge_iterator ei;
405
                  gcov_type total = 0;
406
 
407
                  FOR_EACH_EDGE (e, ei, bb->preds)
408
                    total += e->count;
409
                  bb->count = total;
410
                  bi->count_valid = 1;
411
                  changes = 1;
412
                }
413
            }
414
          if (bi->count_valid)
415
            {
416
              if (bi->succ_count == 1)
417
                {
418
                  edge e;
419
                  edge_iterator ei;
420
                  gcov_type total = 0;
421
 
422
                  /* One of the counts will be invalid, but it is zero,
423
                     so adding it in also doesn't hurt.  */
424
                  FOR_EACH_EDGE (e, ei, bb->succs)
425
                    total += e->count;
426
 
427
                  /* Seedgeh for the invalid edge, and set its count.  */
428
                  FOR_EACH_EDGE (e, ei, bb->succs)
429
                    if (! EDGE_INFO (e)->count_valid && ! EDGE_INFO (e)->ignore)
430
                      break;
431
 
432
                  /* Calculate count for remaining edge by conservation.  */
433
                  total = bb->count - total;
434
 
435
                  gcc_assert (e);
436
                  EDGE_INFO (e)->count_valid = 1;
437
                  e->count = total;
438
                  bi->succ_count--;
439
 
440
                  BB_INFO (e->dest)->pred_count--;
441
                  changes = 1;
442
                }
443
              if (bi->pred_count == 1)
444
                {
445
                  edge e;
446
                  edge_iterator ei;
447
                  gcov_type total = 0;
448
 
449
                  /* One of the counts will be invalid, but it is zero,
450
                     so adding it in also doesn't hurt.  */
451
                  FOR_EACH_EDGE (e, ei, bb->preds)
452
                    total += e->count;
453
 
454
                  /* Search for the invalid edge, and set its count.  */
455
                  FOR_EACH_EDGE (e, ei, bb->preds)
456
                    if (!EDGE_INFO (e)->count_valid && !EDGE_INFO (e)->ignore)
457
                      break;
458
 
459
                  /* Calculate count for remaining edge by conservation.  */
460
                  total = bb->count - total + e->count;
461
 
462
                  gcc_assert (e);
463
                  EDGE_INFO (e)->count_valid = 1;
464
                  e->count = total;
465
                  bi->pred_count--;
466
 
467
                  BB_INFO (e->src)->succ_count--;
468
                  changes = 1;
469
                }
470
            }
471
        }
472
    }
473
  if (dump_file)
474
    dump_flow_info (dump_file);
475
 
476
  total_num_passes += passes;
477
  if (dump_file)
478
    fprintf (dump_file, "Graph solving took %d passes.\n\n", passes);
479
 
480
  /* If the graph has been correctly solved, every block will have a
481
     succ and pred count of zero.  */
482
  FOR_EACH_BB (bb)
483
    {
484
      gcc_assert (!BB_INFO (bb)->succ_count && !BB_INFO (bb)->pred_count);
485
    }
486
 
487
  /* For every edge, calculate its branch probability and add a reg_note
488
     to the branch insn to indicate this.  */
489
 
490
  for (i = 0; i < 20; i++)
491
    hist_br_prob[i] = 0;
492
  num_never_executed = 0;
493
  num_branches = 0;
494
 
495
  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
496
    {
497
      edge e;
498
      edge_iterator ei;
499
      rtx note;
500
 
501
      if (bb->count < 0)
502
        {
503
          error ("corrupted profile info: number of iterations for basic block %d thought to be %i",
504
                 bb->index, (int)bb->count);
505
          bb->count = 0;
506
        }
507
      FOR_EACH_EDGE (e, ei, bb->succs)
508
        {
509
          /* Function may return twice in the cased the called function is
510
             setjmp or calls fork, but we can't represent this by extra
511
             edge from the entry, since extra edge from the exit is
512
             already present.  We get negative frequency from the entry
513
             point.  */
514
          if ((e->count < 0
515
               && e->dest == EXIT_BLOCK_PTR)
516
              || (e->count > bb->count
517
                  && e->dest != EXIT_BLOCK_PTR))
518
            {
519
              if (block_ends_with_call_p (bb))
520
                e->count = e->count < 0 ? 0 : bb->count;
521
            }
522
          if (e->count < 0 || e->count > bb->count)
523
            {
524
              error ("corrupted profile info: number of executions for edge %d-%d thought to be %i",
525
                     e->src->index, e->dest->index,
526
                     (int)e->count);
527
              e->count = bb->count / 2;
528
            }
529
        }
530
      if (bb->count)
531
        {
532
          FOR_EACH_EDGE (e, ei, bb->succs)
533
            e->probability = (e->count * REG_BR_PROB_BASE + bb->count / 2) / bb->count;
534
          if (bb->index >= 0
535
              && block_ends_with_condjump_p (bb)
536
              && EDGE_COUNT (bb->succs) >= 2)
537
            {
538
              int prob;
539
              edge e;
540
              int index;
541
 
542
              /* Find the branch edge.  It is possible that we do have fake
543
                 edges here.  */
544
              FOR_EACH_EDGE (e, ei, bb->succs)
545
                if (!(e->flags & (EDGE_FAKE | EDGE_FALLTHRU)))
546
                  break;
547
 
548
              prob = e->probability;
549
              index = prob * 20 / REG_BR_PROB_BASE;
550
 
551
              if (index == 20)
552
                index = 19;
553
              hist_br_prob[index]++;
554
 
555
              /* Do this for RTL only.  */
556
              if (!ir_type ())
557
                {
558
                  note = find_reg_note (BB_END (bb), REG_BR_PROB, 0);
559
                  /* There may be already note put by some other pass, such
560
                     as builtin_expect expander.  */
561
                  if (note)
562
                    XEXP (note, 0) = GEN_INT (prob);
563
                  else
564
                    REG_NOTES (BB_END (bb))
565
                      = gen_rtx_EXPR_LIST (REG_BR_PROB, GEN_INT (prob),
566
                                           REG_NOTES (BB_END (bb)));
567
                }
568
              num_branches++;
569
            }
570
        }
571
      /* Otherwise try to preserve the existing REG_BR_PROB probabilities
572
         tree based profile guessing put into code.  BB can be the
573
         ENTRY_BLOCK, and it can have multiple (fake) successors in
574
         EH cases, but it still has no code; don't crash in this case.  */
575
      else if (profile_status == PROFILE_ABSENT
576
               && !ir_type ()
577
               && EDGE_COUNT (bb->succs) > 1
578
               && BB_END (bb)
579
               && (note = find_reg_note (BB_END (bb), REG_BR_PROB, 0)))
580
        {
581
          int prob = INTVAL (XEXP (note, 0));
582
 
583
          BRANCH_EDGE (bb)->probability = prob;
584
          FALLTHRU_EDGE (bb)->probability = REG_BR_PROB_BASE - prob;
585
        }
586
      /* As a last resort, distribute the probabilities evenly.
587
         Use simple heuristics that if there are normal edges,
588
         give all abnormals frequency of 0, otherwise distribute the
589
         frequency over abnormals (this is the case of noreturn
590
         calls).  */
591
      else if (profile_status == PROFILE_ABSENT)
592
        {
593
          int total = 0;
594
 
595
          FOR_EACH_EDGE (e, ei, bb->succs)
596
            if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
597
              total ++;
598
          if (total)
599
            {
600
              FOR_EACH_EDGE (e, ei, bb->succs)
601
                if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
602
                  e->probability = REG_BR_PROB_BASE / total;
603
                else
604
                  e->probability = 0;
605
            }
606
          else
607
            {
608
              total += EDGE_COUNT (bb->succs);
609
              FOR_EACH_EDGE (e, ei, bb->succs)
610
                e->probability = REG_BR_PROB_BASE / total;
611
            }
612
          if (bb->index >= 0
613
              && block_ends_with_condjump_p (bb)
614
              && EDGE_COUNT (bb->succs) >= 2)
615
            num_branches++, num_never_executed;
616
        }
617
    }
618
  counts_to_freqs ();
619
 
620
  if (dump_file)
621
    {
622
      fprintf (dump_file, "%d branches\n", num_branches);
623
      fprintf (dump_file, "%d branches never executed\n",
624
               num_never_executed);
625
      if (num_branches)
626
        for (i = 0; i < 10; i++)
627
          fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
628
                   (hist_br_prob[i] + hist_br_prob[19-i]) * 100 / num_branches,
629
                   5 * i, 5 * i + 5);
630
 
631
      total_num_branches += num_branches;
632
      total_num_never_executed += num_never_executed;
633
      for (i = 0; i < 20; i++)
634
        total_hist_br_prob[i] += hist_br_prob[i];
635
 
636
      fputc ('\n', dump_file);
637
      fputc ('\n', dump_file);
638
    }
639
 
640
  free_aux_for_blocks ();
641
}
642
 
643
/* Load value histograms values whose description is stored in VALUES array
644
   from .gcda file.  */
645
 
646
static void
647
compute_value_histograms (histogram_values values)
648
{
649
  unsigned i, j, t, any;
650
  unsigned n_histogram_counters[GCOV_N_VALUE_COUNTERS];
651
  gcov_type *histogram_counts[GCOV_N_VALUE_COUNTERS];
652
  gcov_type *act_count[GCOV_N_VALUE_COUNTERS];
653
  gcov_type *aact_count;
654
 
655
  for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
656
    n_histogram_counters[t] = 0;
657
 
658
  for (i = 0; i < VEC_length (histogram_value, values); i++)
659
    {
660
      histogram_value hist = VEC_index (histogram_value, values, i);
661
      n_histogram_counters[(int) hist->type] += hist->n_counters;
662
    }
663
 
664
  any = 0;
665
  for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
666
    {
667
      if (!n_histogram_counters[t])
668
        {
669
          histogram_counts[t] = NULL;
670
          continue;
671
        }
672
 
673
      histogram_counts[t] =
674
        get_coverage_counts (COUNTER_FOR_HIST_TYPE (t),
675
                             n_histogram_counters[t], NULL);
676
      if (histogram_counts[t])
677
        any = 1;
678
      act_count[t] = histogram_counts[t];
679
    }
680
  if (!any)
681
    return;
682
 
683
  for (i = 0; i < VEC_length (histogram_value, values); i++)
684
    {
685
      histogram_value hist = VEC_index (histogram_value, values, i);
686
      tree stmt = hist->hvalue.stmt;
687
      stmt_ann_t ann = get_stmt_ann (stmt);
688
 
689
      t = (int) hist->type;
690
 
691
      aact_count = act_count[t];
692
      act_count[t] += hist->n_counters;
693
 
694
      hist->hvalue.next = ann->histograms;
695
      ann->histograms = hist;
696
      hist->hvalue.counters =
697
            xmalloc (sizeof (gcov_type) * hist->n_counters);
698
      for (j = 0; j < hist->n_counters; j++)
699
        hist->hvalue.counters[j] = aact_count[j];
700
    }
701
 
702
  for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
703
    if (histogram_counts[t])
704
      free (histogram_counts[t]);
705
}
706
 
707
#define BB_TO_GCOV_INDEX(bb)  ((bb)->index + 1)
708
/* When passed NULL as file_name, initialize.
709
   When passed something else, output the necessary commands to change
710
   line to LINE and offset to FILE_NAME.  */
711
static void
712
output_location (char const *file_name, int line,
713
                 gcov_position_t *offset, basic_block bb)
714
{
715
  static char const *prev_file_name;
716
  static int prev_line;
717
  bool name_differs, line_differs;
718
 
719
  if (!file_name)
720
    {
721
      prev_file_name = NULL;
722
      prev_line = -1;
723
      return;
724
    }
725
 
726
  name_differs = !prev_file_name || strcmp (file_name, prev_file_name);
727
  line_differs = prev_line != line;
728
 
729
  if (name_differs || line_differs)
730
    {
731
      if (!*offset)
732
        {
733
          *offset = gcov_write_tag (GCOV_TAG_LINES);
734
          gcov_write_unsigned (BB_TO_GCOV_INDEX (bb));
735
          name_differs = line_differs=true;
736
        }
737
 
738
      /* If this is a new source file, then output the
739
         file's name to the .bb file.  */
740
      if (name_differs)
741
        {
742
          prev_file_name = file_name;
743
          gcov_write_unsigned (0);
744
          gcov_write_string (prev_file_name);
745
        }
746
      if (line_differs)
747
        {
748
          gcov_write_unsigned (line);
749
          prev_line = line;
750
        }
751
     }
752
}
753
 
754
/* Instrument and/or analyze program behavior based on program flow graph.
755
   In either case, this function builds a flow graph for the function being
756
   compiled.  The flow graph is stored in BB_GRAPH.
757
 
758
   When FLAG_PROFILE_ARCS is nonzero, this function instruments the edges in
759
   the flow graph that are needed to reconstruct the dynamic behavior of the
760
   flow graph.
761
 
762
   When FLAG_BRANCH_PROBABILITIES is nonzero, this function reads auxiliary
763
   information from a data file containing edge count information from previous
764
   executions of the function being compiled.  In this case, the flow graph is
765
   annotated with actual execution counts, which are later propagated into the
766
   rtl for optimization purposes.
767
 
768
   Main entry point of this file.  */
769
 
770
void
771
branch_prob (void)
772
{
773
  basic_block bb;
774
  unsigned i;
775
  unsigned num_edges, ignored_edges;
776
  unsigned num_instrumented;
777
  struct edge_list *el;
778
  histogram_values values = NULL;
779
 
780
  total_num_times_called++;
781
 
782
  flow_call_edges_add (NULL);
783
  add_noreturn_fake_exit_edges ();
784
 
785
  /* We can't handle cyclic regions constructed using abnormal edges.
786
     To avoid these we replace every source of abnormal edge by a fake
787
     edge from entry node and every destination by fake edge to exit.
788
     This keeps graph acyclic and our calculation exact for all normal
789
     edges except for exit and entrance ones.
790
 
791
     We also add fake exit edges for each call and asm statement in the
792
     basic, since it may not return.  */
793
 
794
  FOR_EACH_BB (bb)
795
    {
796
      int need_exit_edge = 0, need_entry_edge = 0;
797
      int have_exit_edge = 0, have_entry_edge = 0;
798
      edge e;
799
      edge_iterator ei;
800
 
801
      /* Functions returning multiple times are not handled by extra edges.
802
         Instead we simply allow negative counts on edges from exit to the
803
         block past call and corresponding probabilities.  We can't go
804
         with the extra edges because that would result in flowgraph that
805
         needs to have fake edges outside the spanning tree.  */
806
 
807
      FOR_EACH_EDGE (e, ei, bb->succs)
808
        {
809
          block_stmt_iterator bsi;
810
          tree last = NULL;
811
 
812
          /* It may happen that there are compiler generated statements
813
             without a locus at all.  Go through the basic block from the
814
             last to the first statement looking for a locus.  */
815
          for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_prev (&bsi))
816
            {
817
              last = bsi_stmt (bsi);
818
              if (EXPR_LOCUS (last))
819
                break;
820
            }
821
 
822
          /* Edge with goto locus might get wrong coverage info unless
823
             it is the only edge out of BB.
824
             Don't do that when the locuses match, so
825
             if (blah) goto something;
826
             is not computed twice.  */
827
          if (last && EXPR_LOCUS (last)
828
              && e->goto_locus
829
              && !single_succ_p (bb)
830
#ifdef USE_MAPPED_LOCATION
831
              && (LOCATION_FILE (e->goto_locus)
832
                  != LOCATION_FILE (EXPR_LOCATION  (last))
833
                  || (LOCATION_LINE (e->goto_locus)
834
                      != LOCATION_LINE (EXPR_LOCATION  (last)))))
835
#else
836
              && (e->goto_locus->file != EXPR_LOCUS (last)->file
837
                  || (e->goto_locus->line != EXPR_LOCUS (last)->line)))
838
#endif
839
            {
840
              basic_block new = split_edge (e);
841
              single_succ_edge (new)->goto_locus = e->goto_locus;
842
            }
843
          if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
844
               && e->dest != EXIT_BLOCK_PTR)
845
            need_exit_edge = 1;
846
          if (e->dest == EXIT_BLOCK_PTR)
847
            have_exit_edge = 1;
848
        }
849
      FOR_EACH_EDGE (e, ei, bb->preds)
850
        {
851
          if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
852
               && e->src != ENTRY_BLOCK_PTR)
853
            need_entry_edge = 1;
854
          if (e->src == ENTRY_BLOCK_PTR)
855
            have_entry_edge = 1;
856
        }
857
 
858
      if (need_exit_edge && !have_exit_edge)
859
        {
860
          if (dump_file)
861
            fprintf (dump_file, "Adding fake exit edge to bb %i\n",
862
                     bb->index);
863
          make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
864
        }
865
      if (need_entry_edge && !have_entry_edge)
866
        {
867
          if (dump_file)
868
            fprintf (dump_file, "Adding fake entry edge to bb %i\n",
869
                     bb->index);
870
          make_edge (ENTRY_BLOCK_PTR, bb, EDGE_FAKE);
871
        }
872
    }
873
 
874
  el = create_edge_list ();
875
  num_edges = NUM_EDGES (el);
876
  alloc_aux_for_edges (sizeof (struct edge_info));
877
 
878
  /* The basic blocks are expected to be numbered sequentially.  */
879
  compact_blocks ();
880
 
881
  ignored_edges = 0;
882
  for (i = 0 ; i < num_edges ; i++)
883
    {
884
      edge e = INDEX_EDGE (el, i);
885
      e->count = 0;
886
 
887
      /* Mark edges we've replaced by fake edges above as ignored.  */
888
      if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
889
          && e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR)
890
        {
891
          EDGE_INFO (e)->ignore = 1;
892
          ignored_edges++;
893
        }
894
    }
895
 
896
  /* Create spanning tree from basic block graph, mark each edge that is
897
     on the spanning tree.  We insert as many abnormal and critical edges
898
     as possible to minimize number of edge splits necessary.  */
899
 
900
  find_spanning_tree (el);
901
 
902
  /* Fake edges that are not on the tree will not be instrumented, so
903
     mark them ignored.  */
904
  for (num_instrumented = i = 0; i < num_edges; i++)
905
    {
906
      edge e = INDEX_EDGE (el, i);
907
      struct edge_info *inf = EDGE_INFO (e);
908
 
909
      if (inf->ignore || inf->on_tree)
910
        /*NOP*/;
911
      else if (e->flags & EDGE_FAKE)
912
        {
913
          inf->ignore = 1;
914
          ignored_edges++;
915
        }
916
      else
917
        num_instrumented++;
918
    }
919
 
920
  total_num_blocks += n_basic_blocks + 2;
921
  if (dump_file)
922
    fprintf (dump_file, "%d basic blocks\n", n_basic_blocks);
923
 
924
  total_num_edges += num_edges;
925
  if (dump_file)
926
    fprintf (dump_file, "%d edges\n", num_edges);
927
 
928
  total_num_edges_ignored += ignored_edges;
929
  if (dump_file)
930
    fprintf (dump_file, "%d ignored edges\n", ignored_edges);
931
 
932
  /* Write the data from which gcov can reconstruct the basic block
933
     graph.  */
934
 
935
  /* Basic block flags */
936
  if (coverage_begin_output ())
937
    {
938
      gcov_position_t offset;
939
 
940
      offset = gcov_write_tag (GCOV_TAG_BLOCKS);
941
      for (i = 0; i != (unsigned) (n_basic_blocks + 2); i++)
942
        gcov_write_unsigned (0);
943
      gcov_write_length (offset);
944
    }
945
 
946
   /* Keep all basic block indexes nonnegative in the gcov output.
947
      Index 0 is used for entry block, last index is for exit block.
948
      */
949
  ENTRY_BLOCK_PTR->index = -1;
950
  EXIT_BLOCK_PTR->index = last_basic_block;
951
 
952
  /* Arcs */
953
  if (coverage_begin_output ())
954
    {
955
      gcov_position_t offset;
956
 
957
      FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
958
        {
959
          edge e;
960
          edge_iterator ei;
961
 
962
          offset = gcov_write_tag (GCOV_TAG_ARCS);
963
          gcov_write_unsigned (BB_TO_GCOV_INDEX (bb));
964
 
965
          FOR_EACH_EDGE (e, ei, bb->succs)
966
            {
967
              struct edge_info *i = EDGE_INFO (e);
968
              if (!i->ignore)
969
                {
970
                  unsigned flag_bits = 0;
971
 
972
                  if (i->on_tree)
973
                    flag_bits |= GCOV_ARC_ON_TREE;
974
                  if (e->flags & EDGE_FAKE)
975
                    flag_bits |= GCOV_ARC_FAKE;
976
                  if (e->flags & EDGE_FALLTHRU)
977
                    flag_bits |= GCOV_ARC_FALLTHROUGH;
978
                  /* On trees we don't have fallthru flags, but we can
979
                     recompute them from CFG shape.  */
980
                  if (ir_type ()
981
                      && e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)
982
                      && e->src->next_bb == e->dest)
983
                    flag_bits |= GCOV_ARC_FALLTHROUGH;
984
 
985
                  gcov_write_unsigned (BB_TO_GCOV_INDEX (e->dest));
986
                  gcov_write_unsigned (flag_bits);
987
                }
988
            }
989
 
990
          gcov_write_length (offset);
991
        }
992
    }
993
 
994
  /* Line numbers.  */
995
  if (coverage_begin_output ())
996
    {
997
      /* Initialize the output.  */
998
      output_location (NULL, 0, NULL, NULL);
999
 
1000
      if (!ir_type ())
1001
        {
1002
          gcov_position_t offset;
1003
 
1004
          FOR_EACH_BB (bb)
1005
            {
1006
              rtx insn = BB_HEAD (bb);
1007
              int ignore_next_note = 0;
1008
 
1009
              offset = 0;
1010
 
1011
              /* We are looking for line number notes.  Search backward
1012
                 before basic block to find correct ones.  */
1013
              insn = prev_nonnote_insn (insn);
1014
              if (!insn)
1015
                insn = get_insns ();
1016
              else
1017
                insn = NEXT_INSN (insn);
1018
 
1019
              while (insn != BB_END (bb))
1020
                {
1021
                  if (NOTE_P (insn))
1022
                    {
1023
                      /* Must ignore the line number notes that
1024
                         immediately follow the end of an inline function
1025
                         to avoid counting it twice.  There is a note
1026
                         before the call, and one after the call.  */
1027
                      if (NOTE_LINE_NUMBER (insn)
1028
                          == NOTE_INSN_REPEATED_LINE_NUMBER)
1029
                        ignore_next_note = 1;
1030
                      else if (NOTE_LINE_NUMBER (insn) <= 0)
1031
                        /*NOP*/;
1032
                      else if (ignore_next_note)
1033
                        ignore_next_note = 0;
1034
                      else
1035
                        {
1036
                          expanded_location s;
1037
                          NOTE_EXPANDED_LOCATION (s, insn);
1038
                          output_location (s.file, s.line, &offset, bb);
1039
                        }
1040
                    }
1041
                  insn = NEXT_INSN (insn);
1042
                }
1043
 
1044
              if (offset)
1045
                {
1046
                  /* A file of NULL indicates the end of run.  */
1047
                  gcov_write_unsigned (0);
1048
                  gcov_write_string (NULL);
1049
                  gcov_write_length (offset);
1050
                }
1051
            }
1052
        }
1053
      else
1054
        {
1055
          gcov_position_t offset;
1056
 
1057
          FOR_EACH_BB (bb)
1058
            {
1059
              block_stmt_iterator bsi;
1060
 
1061
              offset = 0;
1062
 
1063
              if (bb == ENTRY_BLOCK_PTR->next_bb)
1064
                {
1065
                  expanded_location curr_location =
1066
                    expand_location (DECL_SOURCE_LOCATION
1067
                                     (current_function_decl));
1068
                  output_location (curr_location.file, curr_location.line,
1069
                                   &offset, bb);
1070
                }
1071
 
1072
              for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1073
                {
1074
                  tree stmt = bsi_stmt (bsi);
1075
                  if (EXPR_HAS_LOCATION (stmt))
1076
                    output_location (EXPR_FILENAME (stmt),
1077
                                     EXPR_LINENO (stmt),
1078
                                     &offset, bb);
1079
                }
1080
 
1081
              /* Notice GOTO expressions we eliminated while constructing the
1082
                 CFG.  */
1083
              if (single_succ_p (bb) && single_succ_edge (bb)->goto_locus)
1084
                {
1085
                  /* ??? source_locus type is marked deprecated in input.h.  */
1086
                  source_locus curr_location = single_succ_edge (bb)->goto_locus;
1087
                  /* ??? The FILE/LINE API is inconsistent for these cases.  */
1088
#ifdef USE_MAPPED_LOCATION 
1089
                  output_location (LOCATION_FILE (curr_location),
1090
                                   LOCATION_LINE (curr_location),
1091
                                   &offset, bb);
1092
#else
1093
                  output_location (curr_location->file, curr_location->line,
1094
                                   &offset, bb);
1095
#endif
1096
                }
1097
 
1098
              if (offset)
1099
                {
1100
                  /* A file of NULL indicates the end of run.  */
1101
                  gcov_write_unsigned (0);
1102
                  gcov_write_string (NULL);
1103
                  gcov_write_length (offset);
1104
                }
1105
            }
1106
         }
1107
    }
1108
 
1109
  ENTRY_BLOCK_PTR->index = ENTRY_BLOCK;
1110
  EXIT_BLOCK_PTR->index = EXIT_BLOCK;
1111
#undef BB_TO_GCOV_INDEX
1112
 
1113
  if (flag_profile_values)
1114
    find_values_to_profile (&values);
1115
 
1116
  if (flag_branch_probabilities)
1117
    {
1118
      compute_branch_probabilities ();
1119
      if (flag_profile_values)
1120
        compute_value_histograms (values);
1121
    }
1122
 
1123
  remove_fake_edges ();
1124
 
1125
  /* For each edge not on the spanning tree, add counting code.  */
1126
  if (profile_arc_flag
1127
      && coverage_counter_alloc (GCOV_COUNTER_ARCS, num_instrumented))
1128
    {
1129
      unsigned n_instrumented;
1130
 
1131
      profile_hooks->init_edge_profiler ();
1132
 
1133
      n_instrumented = instrument_edges (el);
1134
 
1135
      gcc_assert (n_instrumented == num_instrumented);
1136
 
1137
      if (flag_profile_values)
1138
        instrument_values (values);
1139
 
1140
      /* Commit changes done by instrumentation.  */
1141
      if (ir_type ())
1142
        bsi_commit_edge_inserts ();
1143
      else
1144
        {
1145
          commit_edge_insertions_watch_calls ();
1146
          allocate_reg_info (max_reg_num (), FALSE, FALSE);
1147
        }
1148
    }
1149
 
1150
  free_aux_for_edges ();
1151
 
1152
  if (!ir_type ())
1153
    {
1154
      /* Re-merge split basic blocks and the mess introduced by
1155
         insert_insn_on_edge.  */
1156
      cleanup_cfg (profile_arc_flag ? CLEANUP_EXPENSIVE : 0);
1157
      if (profile_dump_file())
1158
        dump_flow_info (profile_dump_file());
1159
    }
1160
 
1161
  free_edge_list (el);
1162
  if (flag_branch_probabilities)
1163
    profile_status = PROFILE_READ;
1164
  coverage_end_function ();
1165
}
1166
 
1167
/* Union find algorithm implementation for the basic blocks using
1168
   aux fields.  */
1169
 
1170
static basic_block
1171
find_group (basic_block bb)
1172
{
1173
  basic_block group = bb, bb1;
1174
 
1175
  while ((basic_block) group->aux != group)
1176
    group = (basic_block) group->aux;
1177
 
1178
  /* Compress path.  */
1179
  while ((basic_block) bb->aux != group)
1180
    {
1181
      bb1 = (basic_block) bb->aux;
1182
      bb->aux = (void *) group;
1183
      bb = bb1;
1184
    }
1185
  return group;
1186
}
1187
 
1188
static void
1189
union_groups (basic_block bb1, basic_block bb2)
1190
{
1191
  basic_block bb1g = find_group (bb1);
1192
  basic_block bb2g = find_group (bb2);
1193
 
1194
  /* ??? I don't have a place for the rank field.  OK.  Lets go w/o it,
1195
     this code is unlikely going to be performance problem anyway.  */
1196
  gcc_assert (bb1g != bb2g);
1197
 
1198
  bb1g->aux = bb2g;
1199
}
1200
 
1201
/* This function searches all of the edges in the program flow graph, and puts
1202
   as many bad edges as possible onto the spanning tree.  Bad edges include
1203
   abnormals edges, which can't be instrumented at the moment.  Since it is
1204
   possible for fake edges to form a cycle, we will have to develop some
1205
   better way in the future.  Also put critical edges to the tree, since they
1206
   are more expensive to instrument.  */
1207
 
1208
static void
1209
find_spanning_tree (struct edge_list *el)
1210
{
1211
  int i;
1212
  int num_edges = NUM_EDGES (el);
1213
  basic_block bb;
1214
 
1215
  /* We use aux field for standard union-find algorithm.  */
1216
  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1217
    bb->aux = bb;
1218
 
1219
  /* Add fake edge exit to entry we can't instrument.  */
1220
  union_groups (EXIT_BLOCK_PTR, ENTRY_BLOCK_PTR);
1221
 
1222
  /* First add all abnormal edges to the tree unless they form a cycle. Also
1223
     add all edges to EXIT_BLOCK_PTR to avoid inserting profiling code behind
1224
     setting return value from function.  */
1225
  for (i = 0; i < num_edges; i++)
1226
    {
1227
      edge e = INDEX_EDGE (el, i);
1228
      if (((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL | EDGE_FAKE))
1229
           || e->dest == EXIT_BLOCK_PTR)
1230
          && !EDGE_INFO (e)->ignore
1231
          && (find_group (e->src) != find_group (e->dest)))
1232
        {
1233
          if (dump_file)
1234
            fprintf (dump_file, "Abnormal edge %d to %d put to tree\n",
1235
                     e->src->index, e->dest->index);
1236
          EDGE_INFO (e)->on_tree = 1;
1237
          union_groups (e->src, e->dest);
1238
        }
1239
    }
1240
 
1241
  /* Now insert all critical edges to the tree unless they form a cycle.  */
1242
  for (i = 0; i < num_edges; i++)
1243
    {
1244
      edge e = INDEX_EDGE (el, i);
1245
      if (EDGE_CRITICAL_P (e) && !EDGE_INFO (e)->ignore
1246
          && find_group (e->src) != find_group (e->dest))
1247
        {
1248
          if (dump_file)
1249
            fprintf (dump_file, "Critical edge %d to %d put to tree\n",
1250
                     e->src->index, e->dest->index);
1251
          EDGE_INFO (e)->on_tree = 1;
1252
          union_groups (e->src, e->dest);
1253
        }
1254
    }
1255
 
1256
  /* And now the rest.  */
1257
  for (i = 0; i < num_edges; i++)
1258
    {
1259
      edge e = INDEX_EDGE (el, i);
1260
      if (!EDGE_INFO (e)->ignore
1261
          && find_group (e->src) != find_group (e->dest))
1262
        {
1263
          if (dump_file)
1264
            fprintf (dump_file, "Normal edge %d to %d put to tree\n",
1265
                     e->src->index, e->dest->index);
1266
          EDGE_INFO (e)->on_tree = 1;
1267
          union_groups (e->src, e->dest);
1268
        }
1269
    }
1270
 
1271
  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1272
    bb->aux = NULL;
1273
}
1274
 
1275
/* Perform file-level initialization for branch-prob processing.  */
1276
 
1277
void
1278
init_branch_prob (void)
1279
{
1280
  int i;
1281
 
1282
  total_num_blocks = 0;
1283
  total_num_edges = 0;
1284
  total_num_edges_ignored = 0;
1285
  total_num_edges_instrumented = 0;
1286
  total_num_blocks_created = 0;
1287
  total_num_passes = 0;
1288
  total_num_times_called = 0;
1289
  total_num_branches = 0;
1290
  total_num_never_executed = 0;
1291
  for (i = 0; i < 20; i++)
1292
    total_hist_br_prob[i] = 0;
1293
}
1294
 
1295
/* Performs file-level cleanup after branch-prob processing
1296
   is completed.  */
1297
 
1298
void
1299
end_branch_prob (void)
1300
{
1301
  if (dump_file)
1302
    {
1303
      fprintf (dump_file, "\n");
1304
      fprintf (dump_file, "Total number of blocks: %d\n",
1305
               total_num_blocks);
1306
      fprintf (dump_file, "Total number of edges: %d\n", total_num_edges);
1307
      fprintf (dump_file, "Total number of ignored edges: %d\n",
1308
               total_num_edges_ignored);
1309
      fprintf (dump_file, "Total number of instrumented edges: %d\n",
1310
               total_num_edges_instrumented);
1311
      fprintf (dump_file, "Total number of blocks created: %d\n",
1312
               total_num_blocks_created);
1313
      fprintf (dump_file, "Total number of graph solution passes: %d\n",
1314
               total_num_passes);
1315
      if (total_num_times_called != 0)
1316
        fprintf (dump_file, "Average number of graph solution passes: %d\n",
1317
                 (total_num_passes + (total_num_times_called  >> 1))
1318
                 / total_num_times_called);
1319
      fprintf (dump_file, "Total number of branches: %d\n",
1320
               total_num_branches);
1321
      fprintf (dump_file, "Total number of branches never executed: %d\n",
1322
               total_num_never_executed);
1323
      if (total_num_branches)
1324
        {
1325
          int i;
1326
 
1327
          for (i = 0; i < 10; i++)
1328
            fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
1329
                     (total_hist_br_prob[i] + total_hist_br_prob[19-i]) * 100
1330
                     / total_num_branches, 5*i, 5*i+5);
1331
        }
1332
    }
1333
}
1334
 
1335
/* Set up hooks to enable tree-based profiling.  */
1336
 
1337
void
1338
tree_register_profile_hooks (void)
1339
{
1340
  gcc_assert (ir_type ());
1341
  profile_hooks = &tree_profile_hooks;
1342
}
1343
 
1344
 
1345
/* Do branch profiling and static profile estimation passes.  */
1346
static void
1347
rest_of_handle_branch_prob (void)
1348
{
1349
  struct loops loops;
1350
 
1351
  /* Discover and record the loop depth at the head of each basic
1352
     block.  The loop infrastructure does the real job for us.  */
1353
  flow_loops_find (&loops);
1354
 
1355
  if (dump_file)
1356
    flow_loops_dump (&loops, dump_file, NULL, 0);
1357
 
1358
  /* Estimate using heuristics if no profiling info is available.  */
1359
  if (flag_guess_branch_prob
1360
      && profile_status == PROFILE_ABSENT)
1361
    estimate_probability (&loops);
1362
 
1363
  flow_loops_free (&loops);
1364
  free_dominance_info (CDI_DOMINATORS);
1365
}
1366
 
1367
struct tree_opt_pass pass_branch_prob =
1368
{
1369
  "bp",                                 /* name */
1370
  NULL,                                 /* gate */
1371
  rest_of_handle_branch_prob,           /* execute */
1372
  NULL,                                 /* sub */
1373
  NULL,                                 /* next */
1374
  0,                                    /* static_pass_number */
1375
  TV_BRANCH_PROB,                       /* tv_id */
1376
  0,                                    /* properties_required */
1377
  0,                                    /* properties_provided */
1378
  0,                                    /* properties_destroyed */
1379
  0,                                    /* todo_flags_start */
1380
  TODO_dump_func,                       /* todo_flags_finish */
1381
  'b'                                   /* letter */
1382
};
1383
 
1384
 
1385
 

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.