OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-old/] [gcc-4.2.2/] [gcc/] [predict.c] - Blame information for rev 816

Details | Compare with Previous | View Log

Line No. Rev Author Line
1 38 julius
/* Branch prediction routines for the GNU compiler.
2
   Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2007
3
   Free Software Foundation, Inc.
4
 
5
This file is part of GCC.
6
 
7
GCC is free software; you can redistribute it and/or modify it under
8
the terms of the GNU General Public License as published by the Free
9
Software Foundation; either version 3, or (at your option) any later
10
version.
11
 
12
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13
WARRANTY; without even the implied warranty of MERCHANTABILITY or
14
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15
for more details.
16
 
17
You should have received a copy of the GNU General Public License
18
along with GCC; see the file COPYING3.  If not see
19
<http://www.gnu.org/licenses/>.  */
20
 
21
/* References:
22
 
23
   [1] "Branch Prediction for Free"
24
       Ball and Larus; PLDI '93.
25
   [2] "Static Branch Frequency and Program Profile Analysis"
26
       Wu and Larus; MICRO-27.
27
   [3] "Corpus-based Static Branch Prediction"
28
       Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95.  */
29
 
30
 
31
#include "config.h"
32
#include "system.h"
33
#include "coretypes.h"
34
#include "tm.h"
35
#include "tree.h"
36
#include "rtl.h"
37
#include "tm_p.h"
38
#include "hard-reg-set.h"
39
#include "basic-block.h"
40
#include "insn-config.h"
41
#include "regs.h"
42
#include "flags.h"
43
#include "output.h"
44
#include "function.h"
45
#include "except.h"
46
#include "toplev.h"
47
#include "recog.h"
48
#include "expr.h"
49
#include "predict.h"
50
#include "coverage.h"
51
#include "sreal.h"
52
#include "params.h"
53
#include "target.h"
54
#include "cfgloop.h"
55
#include "tree-flow.h"
56
#include "ggc.h"
57
#include "tree-dump.h"
58
#include "tree-pass.h"
59
#include "timevar.h"
60
#include "tree-scalar-evolution.h"
61
#include "cfgloop.h"
62
 
63
/* real constants: 0, 1, 1-1/REG_BR_PROB_BASE, REG_BR_PROB_BASE,
64
                   1/REG_BR_PROB_BASE, 0.5, BB_FREQ_MAX.  */
65
static sreal real_zero, real_one, real_almost_one, real_br_prob_base,
66
             real_inv_br_prob_base, real_one_half, real_bb_freq_max;
67
 
68
/* Random guesstimation given names.  */
69
#define PROB_VERY_UNLIKELY      (REG_BR_PROB_BASE / 100 - 1)
70
#define PROB_EVEN               (REG_BR_PROB_BASE / 2)
71
#define PROB_VERY_LIKELY        (REG_BR_PROB_BASE - PROB_VERY_UNLIKELY)
72
#define PROB_ALWAYS             (REG_BR_PROB_BASE)
73
 
74
static void combine_predictions_for_insn (rtx, basic_block);
75
static void dump_prediction (FILE *, enum br_predictor, int, basic_block, int);
76
static void estimate_loops_at_level (struct loop *, bitmap);
77
static void propagate_freq (struct loop *, bitmap);
78
static void estimate_bb_frequencies (struct loops *);
79
static void predict_paths_leading_to (basic_block, int *, enum br_predictor, enum prediction);
80
static bool last_basic_block_p (basic_block);
81
static void compute_function_frequency (void);
82
static void choose_function_section (void);
83
static bool can_predict_insn_p (rtx);
84
 
85
/* Information we hold about each branch predictor.
86
   Filled using information from predict.def.  */
87
 
88
struct predictor_info
89
{
90
  const char *const name;       /* Name used in the debugging dumps.  */
91
  const int hitrate;            /* Expected hitrate used by
92
                                   predict_insn_def call.  */
93
  const int flags;
94
};
95
 
96
/* Use given predictor without Dempster-Shaffer theory if it matches
97
   using first_match heuristics.  */
98
#define PRED_FLAG_FIRST_MATCH 1
99
 
100
/* Recompute hitrate in percent to our representation.  */
101
 
102
#define HITRATE(VAL) ((int) ((VAL) * REG_BR_PROB_BASE + 50) / 100)
103
 
104
#define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS},
105
static const struct predictor_info predictor_info[]= {
106
#include "predict.def"
107
 
108
  /* Upper bound on predictors.  */
109
  {NULL, 0, 0}
110
};
111
#undef DEF_PREDICTOR
112
 
113
/* Return true in case BB can be CPU intensive and should be optimized
114
   for maximal performance.  */
115
 
116
bool
117
maybe_hot_bb_p (basic_block bb)
118
{
119
  if (profile_info && flag_branch_probabilities
120
      && (bb->count
121
          < profile_info->sum_max / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
122
    return false;
123
  if (bb->frequency < BB_FREQ_MAX / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION))
124
    return false;
125
  return true;
126
}
127
 
128
/* Return true in case BB is cold and should be optimized for size.  */
129
 
130
bool
131
probably_cold_bb_p (basic_block bb)
132
{
133
  if (profile_info && flag_branch_probabilities
134
      && (bb->count
135
          < profile_info->sum_max / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
136
    return true;
137
  if (bb->frequency < BB_FREQ_MAX / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION))
138
    return true;
139
  return false;
140
}
141
 
142
/* Return true in case BB is probably never executed.  */
143
bool
144
probably_never_executed_bb_p (basic_block bb)
145
{
146
  if (profile_info && flag_branch_probabilities)
147
    return ((bb->count + profile_info->runs / 2) / profile_info->runs) == 0;
148
  return false;
149
}
150
 
151
/* Return true if the one of outgoing edges is already predicted by
152
   PREDICTOR.  */
153
 
154
bool
155
rtl_predicted_by_p (basic_block bb, enum br_predictor predictor)
156
{
157
  rtx note;
158
  if (!INSN_P (BB_END (bb)))
159
    return false;
160
  for (note = REG_NOTES (BB_END (bb)); note; note = XEXP (note, 1))
161
    if (REG_NOTE_KIND (note) == REG_BR_PRED
162
        && INTVAL (XEXP (XEXP (note, 0), 0)) == (int)predictor)
163
      return true;
164
  return false;
165
}
166
 
167
/* Return true if the one of outgoing edges is already predicted by
168
   PREDICTOR.  */
169
 
170
bool
171
tree_predicted_by_p (basic_block bb, enum br_predictor predictor)
172
{
173
  struct edge_prediction *i;
174
  for (i = bb->predictions; i; i = i->ep_next)
175
    if (i->ep_predictor == predictor)
176
      return true;
177
  return false;
178
}
179
 
180
/* Return true when the probability of edge is reliable.
181
 
182
   The profile guessing code is good at predicting branch outcome (ie.
183
   taken/not taken), that is predicted right slightly over 75% of time.
184
   It is however notoriously poor on predicting the probability itself.
185
   In general the profile appear a lot flatter (with probabilities closer
186
   to 50%) than the reality so it is bad idea to use it to drive optimization
187
   such as those disabling dynamic branch prediction for well predictable
188
   branches.
189
 
190
   There are two exceptions - edges leading to noreturn edges and edges
191
   predicted by number of iterations heuristics are predicted well.  This macro
192
   should be able to distinguish those, but at the moment it simply check for
193
   noreturn heuristic that is only one giving probability over 99% or bellow
194
   1%.  In future we might want to propagate reliability information across the
195
   CFG if we find this information useful on multiple places.   */
196
static bool
197
probability_reliable_p (int prob)
198
{
199
  return (profile_status == PROFILE_READ
200
          || (profile_status == PROFILE_GUESSED
201
              && (prob <= HITRATE (1) || prob >= HITRATE (99))));
202
}
203
 
204
/* Same predicate as above, working on edges.  */
205
bool
206
edge_probability_reliable_p (edge e)
207
{
208
  return probability_reliable_p (e->probability);
209
}
210
 
211
/* Same predicate as edge_probability_reliable_p, working on notes.  */
212
bool
213
br_prob_note_reliable_p (rtx note)
214
{
215
  gcc_assert (REG_NOTE_KIND (note) == REG_BR_PROB);
216
  return probability_reliable_p (INTVAL (XEXP (note, 0)));
217
}
218
 
219
static void
220
predict_insn (rtx insn, enum br_predictor predictor, int probability)
221
{
222
  gcc_assert (any_condjump_p (insn));
223
  if (!flag_guess_branch_prob)
224
    return;
225
 
226
  REG_NOTES (insn)
227
    = gen_rtx_EXPR_LIST (REG_BR_PRED,
228
                         gen_rtx_CONCAT (VOIDmode,
229
                                         GEN_INT ((int) predictor),
230
                                         GEN_INT ((int) probability)),
231
                         REG_NOTES (insn));
232
}
233
 
234
/* Predict insn by given predictor.  */
235
 
236
void
237
predict_insn_def (rtx insn, enum br_predictor predictor,
238
                  enum prediction taken)
239
{
240
   int probability = predictor_info[(int) predictor].hitrate;
241
 
242
   if (taken != TAKEN)
243
     probability = REG_BR_PROB_BASE - probability;
244
 
245
   predict_insn (insn, predictor, probability);
246
}
247
 
248
/* Predict edge E with given probability if possible.  */
249
 
250
void
251
rtl_predict_edge (edge e, enum br_predictor predictor, int probability)
252
{
253
  rtx last_insn;
254
  last_insn = BB_END (e->src);
255
 
256
  /* We can store the branch prediction information only about
257
     conditional jumps.  */
258
  if (!any_condjump_p (last_insn))
259
    return;
260
 
261
  /* We always store probability of branching.  */
262
  if (e->flags & EDGE_FALLTHRU)
263
    probability = REG_BR_PROB_BASE - probability;
264
 
265
  predict_insn (last_insn, predictor, probability);
266
}
267
 
268
/* Predict edge E with the given PROBABILITY.  */
269
void
270
tree_predict_edge (edge e, enum br_predictor predictor, int probability)
271
{
272
  gcc_assert (profile_status != PROFILE_GUESSED);
273
  if ((e->src != ENTRY_BLOCK_PTR && EDGE_COUNT (e->src->succs) > 1)
274
      && flag_guess_branch_prob && optimize)
275
    {
276
      struct edge_prediction *i = ggc_alloc (sizeof (struct edge_prediction));
277
 
278
      i->ep_next = e->src->predictions;
279
      e->src->predictions = i;
280
      i->ep_probability = probability;
281
      i->ep_predictor = predictor;
282
      i->ep_edge = e;
283
    }
284
}
285
 
286
/* Remove all predictions on given basic block that are attached
287
   to edge E.  */
288
void
289
remove_predictions_associated_with_edge (edge e)
290
{
291
  if (e->src->predictions)
292
    {
293
      struct edge_prediction **prediction = &e->src->predictions;
294
      while (*prediction)
295
        {
296
          if ((*prediction)->ep_edge == e)
297
            *prediction = (*prediction)->ep_next;
298
          else
299
            prediction = &((*prediction)->ep_next);
300
        }
301
    }
302
}
303
 
304
/* Return true when we can store prediction on insn INSN.
305
   At the moment we represent predictions only on conditional
306
   jumps, not at computed jump or other complicated cases.  */
307
static bool
308
can_predict_insn_p (rtx insn)
309
{
310
  return (JUMP_P (insn)
311
          && any_condjump_p (insn)
312
          && EDGE_COUNT (BLOCK_FOR_INSN (insn)->succs) >= 2);
313
}
314
 
315
/* Predict edge E by given predictor if possible.  */
316
 
317
void
318
predict_edge_def (edge e, enum br_predictor predictor,
319
                  enum prediction taken)
320
{
321
   int probability = predictor_info[(int) predictor].hitrate;
322
 
323
   if (taken != TAKEN)
324
     probability = REG_BR_PROB_BASE - probability;
325
 
326
   predict_edge (e, predictor, probability);
327
}
328
 
329
/* Invert all branch predictions or probability notes in the INSN.  This needs
330
   to be done each time we invert the condition used by the jump.  */
331
 
332
void
333
invert_br_probabilities (rtx insn)
334
{
335
  rtx note;
336
 
337
  for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
338
    if (REG_NOTE_KIND (note) == REG_BR_PROB)
339
      XEXP (note, 0) = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (note, 0)));
340
    else if (REG_NOTE_KIND (note) == REG_BR_PRED)
341
      XEXP (XEXP (note, 0), 1)
342
        = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (XEXP (note, 0), 1)));
343
}
344
 
345
/* Dump information about the branch prediction to the output file.  */
346
 
347
static void
348
dump_prediction (FILE *file, enum br_predictor predictor, int probability,
349
                 basic_block bb, int used)
350
{
351
  edge e;
352
  edge_iterator ei;
353
 
354
  if (!file)
355
    return;
356
 
357
  FOR_EACH_EDGE (e, ei, bb->succs)
358
    if (! (e->flags & EDGE_FALLTHRU))
359
      break;
360
 
361
  fprintf (file, "  %s heuristics%s: %.1f%%",
362
           predictor_info[predictor].name,
363
           used ? "" : " (ignored)", probability * 100.0 / REG_BR_PROB_BASE);
364
 
365
  if (bb->count)
366
    {
367
      fprintf (file, "  exec ");
368
      fprintf (file, HOST_WIDEST_INT_PRINT_DEC, bb->count);
369
      if (e)
370
        {
371
          fprintf (file, " hit ");
372
          fprintf (file, HOST_WIDEST_INT_PRINT_DEC, e->count);
373
          fprintf (file, " (%.1f%%)", e->count * 100.0 / bb->count);
374
        }
375
    }
376
 
377
  fprintf (file, "\n");
378
}
379
 
380
/* We can not predict the probabilities of outgoing edges of bb.  Set them
381
   evenly and hope for the best.  */
382
static void
383
set_even_probabilities (basic_block bb)
384
{
385
  int nedges = 0;
386
  edge e;
387
  edge_iterator ei;
388
 
389
  FOR_EACH_EDGE (e, ei, bb->succs)
390
    if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
391
      nedges ++;
392
  FOR_EACH_EDGE (e, ei, bb->succs)
393
    if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
394
      e->probability = (REG_BR_PROB_BASE + nedges / 2) / nedges;
395
    else
396
      e->probability = 0;
397
}
398
 
399
/* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB
400
   note if not already present.  Remove now useless REG_BR_PRED notes.  */
401
 
402
static void
403
combine_predictions_for_insn (rtx insn, basic_block bb)
404
{
405
  rtx prob_note;
406
  rtx *pnote;
407
  rtx note;
408
  int best_probability = PROB_EVEN;
409
  int best_predictor = END_PREDICTORS;
410
  int combined_probability = REG_BR_PROB_BASE / 2;
411
  int d;
412
  bool first_match = false;
413
  bool found = false;
414
 
415
  if (!can_predict_insn_p (insn))
416
    {
417
      set_even_probabilities (bb);
418
      return;
419
    }
420
 
421
  prob_note = find_reg_note (insn, REG_BR_PROB, 0);
422
  pnote = &REG_NOTES (insn);
423
  if (dump_file)
424
    fprintf (dump_file, "Predictions for insn %i bb %i\n", INSN_UID (insn),
425
             bb->index);
426
 
427
  /* We implement "first match" heuristics and use probability guessed
428
     by predictor with smallest index.  */
429
  for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
430
    if (REG_NOTE_KIND (note) == REG_BR_PRED)
431
      {
432
        int predictor = INTVAL (XEXP (XEXP (note, 0), 0));
433
        int probability = INTVAL (XEXP (XEXP (note, 0), 1));
434
 
435
        found = true;
436
        if (best_predictor > predictor)
437
          best_probability = probability, best_predictor = predictor;
438
 
439
        d = (combined_probability * probability
440
             + (REG_BR_PROB_BASE - combined_probability)
441
             * (REG_BR_PROB_BASE - probability));
442
 
443
        /* Use FP math to avoid overflows of 32bit integers.  */
444
        if (d == 0)
445
          /* If one probability is 0% and one 100%, avoid division by zero.  */
446
          combined_probability = REG_BR_PROB_BASE / 2;
447
        else
448
          combined_probability = (((double) combined_probability) * probability
449
                                  * REG_BR_PROB_BASE / d + 0.5);
450
      }
451
 
452
  /* Decide which heuristic to use.  In case we didn't match anything,
453
     use no_prediction heuristic, in case we did match, use either
454
     first match or Dempster-Shaffer theory depending on the flags.  */
455
 
456
  if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH)
457
    first_match = true;
458
 
459
  if (!found)
460
    dump_prediction (dump_file, PRED_NO_PREDICTION,
461
                     combined_probability, bb, true);
462
  else
463
    {
464
      dump_prediction (dump_file, PRED_DS_THEORY, combined_probability,
465
                       bb, !first_match);
466
      dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability,
467
                       bb, first_match);
468
    }
469
 
470
  if (first_match)
471
    combined_probability = best_probability;
472
  dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb, true);
473
 
474
  while (*pnote)
475
    {
476
      if (REG_NOTE_KIND (*pnote) == REG_BR_PRED)
477
        {
478
          int predictor = INTVAL (XEXP (XEXP (*pnote, 0), 0));
479
          int probability = INTVAL (XEXP (XEXP (*pnote, 0), 1));
480
 
481
          dump_prediction (dump_file, predictor, probability, bb,
482
                           !first_match || best_predictor == predictor);
483
          *pnote = XEXP (*pnote, 1);
484
        }
485
      else
486
        pnote = &XEXP (*pnote, 1);
487
    }
488
 
489
  if (!prob_note)
490
    {
491
      REG_NOTES (insn)
492
        = gen_rtx_EXPR_LIST (REG_BR_PROB,
493
                             GEN_INT (combined_probability), REG_NOTES (insn));
494
 
495
      /* Save the prediction into CFG in case we are seeing non-degenerated
496
         conditional jump.  */
497
      if (!single_succ_p (bb))
498
        {
499
          BRANCH_EDGE (bb)->probability = combined_probability;
500
          FALLTHRU_EDGE (bb)->probability
501
            = REG_BR_PROB_BASE - combined_probability;
502
        }
503
    }
504
  else if (!single_succ_p (bb))
505
    {
506
      int prob = INTVAL (XEXP (prob_note, 0));
507
 
508
      BRANCH_EDGE (bb)->probability = prob;
509
      FALLTHRU_EDGE (bb)->probability = REG_BR_PROB_BASE - prob;
510
    }
511
  else
512
    single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
513
}
514
 
515
/* Combine predictions into single probability and store them into CFG.
516
   Remove now useless prediction entries.  */
517
 
518
static void
519
combine_predictions_for_bb (basic_block bb)
520
{
521
  int best_probability = PROB_EVEN;
522
  int best_predictor = END_PREDICTORS;
523
  int combined_probability = REG_BR_PROB_BASE / 2;
524
  int d;
525
  bool first_match = false;
526
  bool found = false;
527
  struct edge_prediction *pred;
528
  int nedges = 0;
529
  edge e, first = NULL, second = NULL;
530
  edge_iterator ei;
531
 
532
  FOR_EACH_EDGE (e, ei, bb->succs)
533
    if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
534
      {
535
        nedges ++;
536
        if (first && !second)
537
          second = e;
538
        if (!first)
539
          first = e;
540
      }
541
 
542
  /* When there is no successor or only one choice, prediction is easy.
543
 
544
     We are lazy for now and predict only basic blocks with two outgoing
545
     edges.  It is possible to predict generic case too, but we have to
546
     ignore first match heuristics and do more involved combining.  Implement
547
     this later.  */
548
  if (nedges != 2)
549
    {
550
      if (!bb->count)
551
        set_even_probabilities (bb);
552
      bb->predictions = NULL;
553
      if (dump_file)
554
        fprintf (dump_file, "%i edges in bb %i predicted to even probabilities\n",
555
                 nedges, bb->index);
556
      return;
557
    }
558
 
559
  if (dump_file)
560
    fprintf (dump_file, "Predictions for bb %i\n", bb->index);
561
 
562
  /* We implement "first match" heuristics and use probability guessed
563
     by predictor with smallest index.  */
564
  for (pred = bb->predictions; pred; pred = pred->ep_next)
565
    {
566
      int predictor = pred->ep_predictor;
567
      int probability = pred->ep_probability;
568
 
569
      if (pred->ep_edge != first)
570
        probability = REG_BR_PROB_BASE - probability;
571
 
572
      found = true;
573
      if (best_predictor > predictor)
574
        best_probability = probability, best_predictor = predictor;
575
 
576
      d = (combined_probability * probability
577
           + (REG_BR_PROB_BASE - combined_probability)
578
           * (REG_BR_PROB_BASE - probability));
579
 
580
      /* Use FP math to avoid overflows of 32bit integers.  */
581
      if (d == 0)
582
        /* If one probability is 0% and one 100%, avoid division by zero.  */
583
        combined_probability = REG_BR_PROB_BASE / 2;
584
      else
585
        combined_probability = (((double) combined_probability) * probability
586
                                * REG_BR_PROB_BASE / d + 0.5);
587
    }
588
 
589
  /* Decide which heuristic to use.  In case we didn't match anything,
590
     use no_prediction heuristic, in case we did match, use either
591
     first match or Dempster-Shaffer theory depending on the flags.  */
592
 
593
  if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH)
594
    first_match = true;
595
 
596
  if (!found)
597
    dump_prediction (dump_file, PRED_NO_PREDICTION, combined_probability, bb, true);
598
  else
599
    {
600
      dump_prediction (dump_file, PRED_DS_THEORY, combined_probability, bb,
601
                       !first_match);
602
      dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability, bb,
603
                       first_match);
604
    }
605
 
606
  if (first_match)
607
    combined_probability = best_probability;
608
  dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb, true);
609
 
610
  for (pred = bb->predictions; pred; pred = pred->ep_next)
611
    {
612
      int predictor = pred->ep_predictor;
613
      int probability = pred->ep_probability;
614
 
615
      if (pred->ep_edge != EDGE_SUCC (bb, 0))
616
        probability = REG_BR_PROB_BASE - probability;
617
      dump_prediction (dump_file, predictor, probability, bb,
618
                       !first_match || best_predictor == predictor);
619
    }
620
  bb->predictions = NULL;
621
 
622
  if (!bb->count)
623
    {
624
      first->probability = combined_probability;
625
      second->probability = REG_BR_PROB_BASE - combined_probability;
626
    }
627
}
628
 
629
/* Predict edge probabilities by exploiting loop structure.
630
   When RTLSIMPLELOOPS is set, attempt to count number of iterations by analyzing
631
   RTL otherwise use tree based approach.  */
632
static void
633
predict_loops (struct loops *loops_info, bool rtlsimpleloops)
634
{
635
  unsigned i;
636
 
637
  if (!rtlsimpleloops)
638
    scev_initialize (loops_info);
639
 
640
  /* Try to predict out blocks in a loop that are not part of a
641
     natural loop.  */
642
  for (i = 1; i < loops_info->num; i++)
643
    {
644
      basic_block bb, *bbs;
645
      unsigned j;
646
      unsigned n_exits;
647
      struct loop *loop = loops_info->parray[i];
648
      struct niter_desc desc;
649
      unsigned HOST_WIDE_INT niter;
650
      edge *exits;
651
 
652
      exits = get_loop_exit_edges (loop, &n_exits);
653
 
654
      if (rtlsimpleloops)
655
        {
656
          iv_analysis_loop_init (loop);
657
          find_simple_exit (loop, &desc);
658
 
659
          if (desc.simple_p && desc.const_iter)
660
            {
661
              int prob;
662
              niter = desc.niter + 1;
663
              if (niter == 0)        /* We might overflow here.  */
664
                niter = desc.niter;
665
              if (niter
666
                  > (unsigned int) PARAM_VALUE (PARAM_MAX_PREDICTED_ITERATIONS))
667
                niter = PARAM_VALUE (PARAM_MAX_PREDICTED_ITERATIONS);
668
 
669
              prob = (REG_BR_PROB_BASE
670
                      - (REG_BR_PROB_BASE + niter /2) / niter);
671
              /* Branch prediction algorithm gives 0 frequency for everything
672
                 after the end of loop for loop having 0 probability to finish.  */
673
              if (prob == REG_BR_PROB_BASE)
674
                prob = REG_BR_PROB_BASE - 1;
675
              predict_edge (desc.in_edge, PRED_LOOP_ITERATIONS,
676
                            prob);
677
            }
678
        }
679
      else
680
        {
681
          struct tree_niter_desc niter_desc;
682
 
683
          for (j = 0; j < n_exits; j++)
684
            {
685
              tree niter = NULL;
686
 
687
              if (number_of_iterations_exit (loop, exits[j], &niter_desc, false))
688
                niter = niter_desc.niter;
689
              if (!niter || TREE_CODE (niter_desc.niter) != INTEGER_CST)
690
                niter = loop_niter_by_eval (loop, exits[j]);
691
 
692
              if (TREE_CODE (niter) == INTEGER_CST)
693
                {
694
                  int probability;
695
                  int max = PARAM_VALUE (PARAM_MAX_PREDICTED_ITERATIONS);
696
                  if (host_integerp (niter, 1)
697
                      && tree_int_cst_lt (niter,
698
                                          build_int_cstu (NULL_TREE, max - 1)))
699
                    {
700
                      HOST_WIDE_INT nitercst = tree_low_cst (niter, 1) + 1;
701
                      probability = ((REG_BR_PROB_BASE + nitercst / 2)
702
                                     / nitercst);
703
                    }
704
                  else
705
                    probability = ((REG_BR_PROB_BASE + max / 2) / max);
706
 
707
                  predict_edge (exits[j], PRED_LOOP_ITERATIONS, probability);
708
                }
709
            }
710
 
711
        }
712
      free (exits);
713
 
714
      bbs = get_loop_body (loop);
715
 
716
      for (j = 0; j < loop->num_nodes; j++)
717
        {
718
          int header_found = 0;
719
          edge e;
720
          edge_iterator ei;
721
 
722
          bb = bbs[j];
723
 
724
          /* Bypass loop heuristics on continue statement.  These
725
             statements construct loops via "non-loop" constructs
726
             in the source language and are better to be handled
727
             separately.  */
728
          if ((rtlsimpleloops && !can_predict_insn_p (BB_END (bb)))
729
              || predicted_by_p (bb, PRED_CONTINUE))
730
            continue;
731
 
732
          /* Loop branch heuristics - predict an edge back to a
733
             loop's head as taken.  */
734
          if (bb == loop->latch)
735
            {
736
              e = find_edge (loop->latch, loop->header);
737
              if (e)
738
                {
739
                  header_found = 1;
740
                  predict_edge_def (e, PRED_LOOP_BRANCH, TAKEN);
741
                }
742
            }
743
 
744
          /* Loop exit heuristics - predict an edge exiting the loop if the
745
             conditional has no loop header successors as not taken.  */
746
          if (!header_found)
747
            {
748
              /* For loop with many exits we don't want to predict all exits
749
                 with the pretty large probability, because if all exits are
750
                 considered in row, the loop would be predicted to iterate
751
                 almost never.  The code to divide probability by number of
752
                 exits is very rough.  It should compute the number of exits
753
                 taken in each patch through function (not the overall number
754
                 of exits that might be a lot higher for loops with wide switch
755
                 statements in them) and compute n-th square root.
756
 
757
                 We limit the minimal probability by 2% to avoid
758
                 EDGE_PROBABILITY_RELIABLE from trusting the branch prediction
759
                 as this was causing regression in perl benchmark containing such
760
                 a wide loop.  */
761
 
762
              int probability = ((REG_BR_PROB_BASE
763
                                  - predictor_info [(int) PRED_LOOP_EXIT].hitrate)
764
                                 / n_exits);
765
              if (probability < HITRATE (2))
766
                probability = HITRATE (2);
767
              FOR_EACH_EDGE (e, ei, bb->succs)
768
                if (e->dest->index < NUM_FIXED_BLOCKS
769
                    || !flow_bb_inside_loop_p (loop, e->dest))
770
                  predict_edge (e, PRED_LOOP_EXIT, probability);
771
            }
772
        }
773
 
774
      /* Free basic blocks from get_loop_body.  */
775
      free (bbs);
776
    }
777
 
778
  if (!rtlsimpleloops)
779
    {
780
      scev_finalize ();
781
      current_loops = NULL;
782
    }
783
}
784
 
785
/* Attempt to predict probabilities of BB outgoing edges using local
786
   properties.  */
787
static void
788
bb_estimate_probability_locally (basic_block bb)
789
{
790
  rtx last_insn = BB_END (bb);
791
  rtx cond;
792
 
793
  if (! can_predict_insn_p (last_insn))
794
    return;
795
  cond = get_condition (last_insn, NULL, false, false);
796
  if (! cond)
797
    return;
798
 
799
  /* Try "pointer heuristic."
800
     A comparison ptr == 0 is predicted as false.
801
     Similarly, a comparison ptr1 == ptr2 is predicted as false.  */
802
  if (COMPARISON_P (cond)
803
      && ((REG_P (XEXP (cond, 0)) && REG_POINTER (XEXP (cond, 0)))
804
          || (REG_P (XEXP (cond, 1)) && REG_POINTER (XEXP (cond, 1)))))
805
    {
806
      if (GET_CODE (cond) == EQ)
807
        predict_insn_def (last_insn, PRED_POINTER, NOT_TAKEN);
808
      else if (GET_CODE (cond) == NE)
809
        predict_insn_def (last_insn, PRED_POINTER, TAKEN);
810
    }
811
  else
812
 
813
  /* Try "opcode heuristic."
814
     EQ tests are usually false and NE tests are usually true. Also,
815
     most quantities are positive, so we can make the appropriate guesses
816
     about signed comparisons against zero.  */
817
    switch (GET_CODE (cond))
818
      {
819
      case CONST_INT:
820
        /* Unconditional branch.  */
821
        predict_insn_def (last_insn, PRED_UNCONDITIONAL,
822
                          cond == const0_rtx ? NOT_TAKEN : TAKEN);
823
        break;
824
 
825
      case EQ:
826
      case UNEQ:
827
        /* Floating point comparisons appears to behave in a very
828
           unpredictable way because of special role of = tests in
829
           FP code.  */
830
        if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
831
          ;
832
        /* Comparisons with 0 are often used for booleans and there is
833
           nothing useful to predict about them.  */
834
        else if (XEXP (cond, 1) == const0_rtx
835
                 || XEXP (cond, 0) == const0_rtx)
836
          ;
837
        else
838
          predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, NOT_TAKEN);
839
        break;
840
 
841
      case NE:
842
      case LTGT:
843
        /* Floating point comparisons appears to behave in a very
844
           unpredictable way because of special role of = tests in
845
           FP code.  */
846
        if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
847
          ;
848
        /* Comparisons with 0 are often used for booleans and there is
849
           nothing useful to predict about them.  */
850
        else if (XEXP (cond, 1) == const0_rtx
851
                 || XEXP (cond, 0) == const0_rtx)
852
          ;
853
        else
854
          predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, TAKEN);
855
        break;
856
 
857
      case ORDERED:
858
        predict_insn_def (last_insn, PRED_FPOPCODE, TAKEN);
859
        break;
860
 
861
      case UNORDERED:
862
        predict_insn_def (last_insn, PRED_FPOPCODE, NOT_TAKEN);
863
        break;
864
 
865
      case LE:
866
      case LT:
867
        if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
868
            || XEXP (cond, 1) == constm1_rtx)
869
          predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, NOT_TAKEN);
870
        break;
871
 
872
      case GE:
873
      case GT:
874
        if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
875
            || XEXP (cond, 1) == constm1_rtx)
876
          predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, TAKEN);
877
        break;
878
 
879
      default:
880
        break;
881
      }
882
}
883
 
884
/* Set edge->probability for each successor edge of BB.  */
885
void
886
guess_outgoing_edge_probabilities (basic_block bb)
887
{
888
  bb_estimate_probability_locally (bb);
889
  combine_predictions_for_insn (BB_END (bb), bb);
890
}
891
 
892
/* Return constant EXPR will likely have at execution time, NULL if unknown.
893
   The function is used by builtin_expect branch predictor so the evidence
894
   must come from this construct and additional possible constant folding.
895
 
896
   We may want to implement more involved value guess (such as value range
897
   propagation based prediction), but such tricks shall go to new
898
   implementation.  */
899
 
900
static tree
901
expr_expected_value (tree expr, bitmap visited)
902
{
903
  if (TREE_CONSTANT (expr))
904
    return expr;
905
  else if (TREE_CODE (expr) == SSA_NAME)
906
    {
907
      tree def = SSA_NAME_DEF_STMT (expr);
908
 
909
      /* If we were already here, break the infinite cycle.  */
910
      if (bitmap_bit_p (visited, SSA_NAME_VERSION (expr)))
911
        return NULL;
912
      bitmap_set_bit (visited, SSA_NAME_VERSION (expr));
913
 
914
      if (TREE_CODE (def) == PHI_NODE)
915
        {
916
          /* All the arguments of the PHI node must have the same constant
917
             length.  */
918
          int i;
919
          tree val = NULL, new_val;
920
 
921
          for (i = 0; i < PHI_NUM_ARGS (def); i++)
922
            {
923
              tree arg = PHI_ARG_DEF (def, i);
924
 
925
              /* If this PHI has itself as an argument, we cannot
926
                 determine the string length of this argument.  However,
927
                 if we can find an expected constant value for the other
928
                 PHI args then we can still be sure that this is
929
                 likely a constant.  So be optimistic and just
930
                 continue with the next argument.  */
931
              if (arg == PHI_RESULT (def))
932
                continue;
933
 
934
              new_val = expr_expected_value (arg, visited);
935
              if (!new_val)
936
                return NULL;
937
              if (!val)
938
                val = new_val;
939
              else if (!operand_equal_p (val, new_val, false))
940
                return NULL;
941
            }
942
          return val;
943
        }
944
      if (TREE_CODE (def) != MODIFY_EXPR || TREE_OPERAND (def, 0) != expr)
945
        return NULL;
946
      return expr_expected_value (TREE_OPERAND (def, 1), visited);
947
    }
948
  else if (TREE_CODE (expr) == CALL_EXPR)
949
    {
950
      tree decl = get_callee_fndecl (expr);
951
      if (!decl)
952
        return NULL;
953
      if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
954
          && DECL_FUNCTION_CODE (decl) == BUILT_IN_EXPECT)
955
        {
956
          tree arglist = TREE_OPERAND (expr, 1);
957
          tree val;
958
 
959
          if (arglist == NULL_TREE
960
              || TREE_CHAIN (arglist) == NULL_TREE)
961
            return NULL;
962
          val = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (expr, 1)));
963
          if (TREE_CONSTANT (val))
964
            return val;
965
          return TREE_VALUE (TREE_CHAIN (TREE_OPERAND (expr, 1)));
966
        }
967
    }
968
  if (BINARY_CLASS_P (expr) || COMPARISON_CLASS_P (expr))
969
    {
970
      tree op0, op1, res;
971
      op0 = expr_expected_value (TREE_OPERAND (expr, 0), visited);
972
      if (!op0)
973
        return NULL;
974
      op1 = expr_expected_value (TREE_OPERAND (expr, 1), visited);
975
      if (!op1)
976
        return NULL;
977
      res = fold_build2 (TREE_CODE (expr), TREE_TYPE (expr), op0, op1);
978
      if (TREE_CONSTANT (res))
979
        return res;
980
      return NULL;
981
    }
982
  if (UNARY_CLASS_P (expr))
983
    {
984
      tree op0, res;
985
      op0 = expr_expected_value (TREE_OPERAND (expr, 0), visited);
986
      if (!op0)
987
        return NULL;
988
      res = fold_build1 (TREE_CODE (expr), TREE_TYPE (expr), op0);
989
      if (TREE_CONSTANT (res))
990
        return res;
991
      return NULL;
992
    }
993
  return NULL;
994
}
995
 
996
/* Get rid of all builtin_expect calls we no longer need.  */
997
static void
998
strip_builtin_expect (void)
999
{
1000
  basic_block bb;
1001
  FOR_EACH_BB (bb)
1002
    {
1003
      block_stmt_iterator bi;
1004
      for (bi = bsi_start (bb); !bsi_end_p (bi); bsi_next (&bi))
1005
        {
1006
          tree stmt = bsi_stmt (bi);
1007
          tree fndecl;
1008
          tree arglist;
1009
 
1010
          if (TREE_CODE (stmt) == MODIFY_EXPR
1011
              && TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR
1012
              && (fndecl = get_callee_fndecl (TREE_OPERAND (stmt, 1)))
1013
              && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1014
              && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
1015
              && (arglist = TREE_OPERAND (TREE_OPERAND (stmt, 1), 1))
1016
              && TREE_CHAIN (arglist))
1017
            {
1018
              TREE_OPERAND (stmt, 1) = TREE_VALUE (arglist);
1019
              update_stmt (stmt);
1020
            }
1021
        }
1022
    }
1023
}
1024
 
1025
/* Predict using opcode of the last statement in basic block.  */
1026
static void
1027
tree_predict_by_opcode (basic_block bb)
1028
{
1029
  tree stmt = last_stmt (bb);
1030
  edge then_edge;
1031
  tree cond;
1032
  tree op0;
1033
  tree type;
1034
  tree val;
1035
  bitmap visited;
1036
  edge_iterator ei;
1037
 
1038
  if (!stmt || TREE_CODE (stmt) != COND_EXPR)
1039
    return;
1040
  FOR_EACH_EDGE (then_edge, ei, bb->succs)
1041
    if (then_edge->flags & EDGE_TRUE_VALUE)
1042
      break;
1043
  cond = TREE_OPERAND (stmt, 0);
1044
  if (!COMPARISON_CLASS_P (cond))
1045
    return;
1046
  op0 = TREE_OPERAND (cond, 0);
1047
  type = TREE_TYPE (op0);
1048
  visited = BITMAP_ALLOC (NULL);
1049
  val = expr_expected_value (cond, visited);
1050
  BITMAP_FREE (visited);
1051
  if (val)
1052
    {
1053
      if (integer_zerop (val))
1054
        predict_edge_def (then_edge, PRED_BUILTIN_EXPECT, NOT_TAKEN);
1055
      else
1056
        predict_edge_def (then_edge, PRED_BUILTIN_EXPECT, TAKEN);
1057
      return;
1058
    }
1059
  /* Try "pointer heuristic."
1060
     A comparison ptr == 0 is predicted as false.
1061
     Similarly, a comparison ptr1 == ptr2 is predicted as false.  */
1062
  if (POINTER_TYPE_P (type))
1063
    {
1064
      if (TREE_CODE (cond) == EQ_EXPR)
1065
        predict_edge_def (then_edge, PRED_TREE_POINTER, NOT_TAKEN);
1066
      else if (TREE_CODE (cond) == NE_EXPR)
1067
        predict_edge_def (then_edge, PRED_TREE_POINTER, TAKEN);
1068
    }
1069
  else
1070
 
1071
  /* Try "opcode heuristic."
1072
     EQ tests are usually false and NE tests are usually true. Also,
1073
     most quantities are positive, so we can make the appropriate guesses
1074
     about signed comparisons against zero.  */
1075
    switch (TREE_CODE (cond))
1076
      {
1077
      case EQ_EXPR:
1078
      case UNEQ_EXPR:
1079
        /* Floating point comparisons appears to behave in a very
1080
           unpredictable way because of special role of = tests in
1081
           FP code.  */
1082
        if (FLOAT_TYPE_P (type))
1083
          ;
1084
        /* Comparisons with 0 are often used for booleans and there is
1085
           nothing useful to predict about them.  */
1086
        else if (integer_zerop (op0)
1087
                 || integer_zerop (TREE_OPERAND (cond, 1)))
1088
          ;
1089
        else
1090
          predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, NOT_TAKEN);
1091
        break;
1092
 
1093
      case NE_EXPR:
1094
      case LTGT_EXPR:
1095
        /* Floating point comparisons appears to behave in a very
1096
           unpredictable way because of special role of = tests in
1097
           FP code.  */
1098
        if (FLOAT_TYPE_P (type))
1099
          ;
1100
        /* Comparisons with 0 are often used for booleans and there is
1101
           nothing useful to predict about them.  */
1102
        else if (integer_zerop (op0)
1103
                 || integer_zerop (TREE_OPERAND (cond, 1)))
1104
          ;
1105
        else
1106
          predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, TAKEN);
1107
        break;
1108
 
1109
      case ORDERED_EXPR:
1110
        predict_edge_def (then_edge, PRED_TREE_FPOPCODE, TAKEN);
1111
        break;
1112
 
1113
      case UNORDERED_EXPR:
1114
        predict_edge_def (then_edge, PRED_TREE_FPOPCODE, NOT_TAKEN);
1115
        break;
1116
 
1117
      case LE_EXPR:
1118
      case LT_EXPR:
1119
        if (integer_zerop (TREE_OPERAND (cond, 1))
1120
            || integer_onep (TREE_OPERAND (cond, 1))
1121
            || integer_all_onesp (TREE_OPERAND (cond, 1))
1122
            || real_zerop (TREE_OPERAND (cond, 1))
1123
            || real_onep (TREE_OPERAND (cond, 1))
1124
            || real_minus_onep (TREE_OPERAND (cond, 1)))
1125
          predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, NOT_TAKEN);
1126
        break;
1127
 
1128
      case GE_EXPR:
1129
      case GT_EXPR:
1130
        if (integer_zerop (TREE_OPERAND (cond, 1))
1131
            || integer_onep (TREE_OPERAND (cond, 1))
1132
            || integer_all_onesp (TREE_OPERAND (cond, 1))
1133
            || real_zerop (TREE_OPERAND (cond, 1))
1134
            || real_onep (TREE_OPERAND (cond, 1))
1135
            || real_minus_onep (TREE_OPERAND (cond, 1)))
1136
          predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, TAKEN);
1137
        break;
1138
 
1139
      default:
1140
        break;
1141
      }
1142
}
1143
 
1144
/* Try to guess whether the value of return means error code.  */
1145
static enum br_predictor
1146
return_prediction (tree val, enum prediction *prediction)
1147
{
1148
  /* VOID.  */
1149
  if (!val)
1150
    return PRED_NO_PREDICTION;
1151
  /* Different heuristics for pointers and scalars.  */
1152
  if (POINTER_TYPE_P (TREE_TYPE (val)))
1153
    {
1154
      /* NULL is usually not returned.  */
1155
      if (integer_zerop (val))
1156
        {
1157
          *prediction = NOT_TAKEN;
1158
          return PRED_NULL_RETURN;
1159
        }
1160
    }
1161
  else if (INTEGRAL_TYPE_P (TREE_TYPE (val)))
1162
    {
1163
      /* Negative return values are often used to indicate
1164
         errors.  */
1165
      if (TREE_CODE (val) == INTEGER_CST
1166
          && tree_int_cst_sgn (val) < 0)
1167
        {
1168
          *prediction = NOT_TAKEN;
1169
          return PRED_NEGATIVE_RETURN;
1170
        }
1171
      /* Constant return values seems to be commonly taken.
1172
         Zero/one often represent booleans so exclude them from the
1173
         heuristics.  */
1174
      if (TREE_CONSTANT (val)
1175
          && (!integer_zerop (val) && !integer_onep (val)))
1176
        {
1177
          *prediction = TAKEN;
1178
          return PRED_NEGATIVE_RETURN;
1179
        }
1180
    }
1181
  return PRED_NO_PREDICTION;
1182
}
1183
 
1184
/* Find the basic block with return expression and look up for possible
1185
   return value trying to apply RETURN_PREDICTION heuristics.  */
1186
static void
1187
apply_return_prediction (int *heads)
1188
{
1189
  tree return_stmt = NULL;
1190
  tree return_val;
1191
  edge e;
1192
  tree phi;
1193
  int phi_num_args, i;
1194
  enum br_predictor pred;
1195
  enum prediction direction;
1196
  edge_iterator ei;
1197
 
1198
  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1199
    {
1200
      return_stmt = last_stmt (e->src);
1201
      if (TREE_CODE (return_stmt) == RETURN_EXPR)
1202
        break;
1203
    }
1204
  if (!e)
1205
    return;
1206
  return_val = TREE_OPERAND (return_stmt, 0);
1207
  if (!return_val)
1208
    return;
1209
  if (TREE_CODE (return_val) == MODIFY_EXPR)
1210
    return_val = TREE_OPERAND (return_val, 1);
1211
  if (TREE_CODE (return_val) != SSA_NAME
1212
      || !SSA_NAME_DEF_STMT (return_val)
1213
      || TREE_CODE (SSA_NAME_DEF_STMT (return_val)) != PHI_NODE)
1214
    return;
1215
  for (phi = SSA_NAME_DEF_STMT (return_val); phi; phi = PHI_CHAIN (phi))
1216
    if (PHI_RESULT (phi) == return_val)
1217
      break;
1218
  if (!phi)
1219
    return;
1220
  phi_num_args = PHI_NUM_ARGS (phi);
1221
  pred = return_prediction (PHI_ARG_DEF (phi, 0), &direction);
1222
 
1223
  /* Avoid the degenerate case where all return values form the function
1224
     belongs to same category (ie they are all positive constants)
1225
     so we can hardly say something about them.  */
1226
  for (i = 1; i < phi_num_args; i++)
1227
    if (pred != return_prediction (PHI_ARG_DEF (phi, i), &direction))
1228
      break;
1229
  if (i != phi_num_args)
1230
    for (i = 0; i < phi_num_args; i++)
1231
      {
1232
        pred = return_prediction (PHI_ARG_DEF (phi, i), &direction);
1233
        if (pred != PRED_NO_PREDICTION)
1234
          predict_paths_leading_to (PHI_ARG_EDGE (phi, i)->src, heads, pred,
1235
                                    direction);
1236
      }
1237
}
1238
 
1239
/* Look for basic block that contains unlikely to happen events
1240
   (such as noreturn calls) and mark all paths leading to execution
1241
   of this basic blocks as unlikely.  */
1242
 
1243
static void
1244
tree_bb_level_predictions (void)
1245
{
1246
  basic_block bb;
1247
  int *heads;
1248
 
1249
  heads = XNEWVEC (int, last_basic_block);
1250
  memset (heads, ENTRY_BLOCK, sizeof (int) * last_basic_block);
1251
  heads[ENTRY_BLOCK_PTR->next_bb->index] = last_basic_block;
1252
 
1253
  apply_return_prediction (heads);
1254
 
1255
  FOR_EACH_BB (bb)
1256
    {
1257
      block_stmt_iterator bsi = bsi_last (bb);
1258
 
1259
      for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1260
        {
1261
          tree stmt = bsi_stmt (bsi);
1262
          switch (TREE_CODE (stmt))
1263
            {
1264
              case MODIFY_EXPR:
1265
                if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
1266
                  {
1267
                    stmt = TREE_OPERAND (stmt, 1);
1268
                    goto call_expr;
1269
                  }
1270
                break;
1271
              case CALL_EXPR:
1272
call_expr:;
1273
                if (call_expr_flags (stmt) & ECF_NORETURN)
1274
                  predict_paths_leading_to (bb, heads, PRED_NORETURN,
1275
                                            NOT_TAKEN);
1276
                break;
1277
              default:
1278
                break;
1279
            }
1280
        }
1281
    }
1282
 
1283
  free (heads);
1284
}
1285
 
1286
/* Predict branch probabilities and estimate profile of the tree CFG.  */
1287
static unsigned int
1288
tree_estimate_probability (void)
1289
{
1290
  basic_block bb;
1291
  struct loops loops_info;
1292
 
1293
  flow_loops_find (&loops_info);
1294
  if (dump_file && (dump_flags & TDF_DETAILS))
1295
    flow_loops_dump (&loops_info, dump_file, NULL, 0);
1296
 
1297
  add_noreturn_fake_exit_edges ();
1298
  connect_infinite_loops_to_exit ();
1299
  calculate_dominance_info (CDI_DOMINATORS);
1300
  calculate_dominance_info (CDI_POST_DOMINATORS);
1301
 
1302
  tree_bb_level_predictions ();
1303
 
1304
  mark_irreducible_loops (&loops_info);
1305
  predict_loops (&loops_info, false);
1306
 
1307
  FOR_EACH_BB (bb)
1308
    {
1309
      edge e;
1310
      edge_iterator ei;
1311
 
1312
      FOR_EACH_EDGE (e, ei, bb->succs)
1313
        {
1314
          /* Predict early returns to be probable, as we've already taken
1315
             care for error returns and other cases are often used for
1316
             fast paths through function.  */
1317
          if (e->dest == EXIT_BLOCK_PTR
1318
              && TREE_CODE (last_stmt (bb)) == RETURN_EXPR
1319
              && !single_pred_p (bb))
1320
            {
1321
              edge e1;
1322
              edge_iterator ei1;
1323
 
1324
              FOR_EACH_EDGE (e1, ei1, bb->preds)
1325
                if (!predicted_by_p (e1->src, PRED_NULL_RETURN)
1326
                    && !predicted_by_p (e1->src, PRED_CONST_RETURN)
1327
                    && !predicted_by_p (e1->src, PRED_NEGATIVE_RETURN)
1328
                    && !last_basic_block_p (e1->src))
1329
                  predict_edge_def (e1, PRED_TREE_EARLY_RETURN, NOT_TAKEN);
1330
            }
1331
 
1332
          /* Look for block we are guarding (ie we dominate it,
1333
             but it doesn't postdominate us).  */
1334
          if (e->dest != EXIT_BLOCK_PTR && e->dest != bb
1335
              && dominated_by_p (CDI_DOMINATORS, e->dest, e->src)
1336
              && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e->dest))
1337
            {
1338
              block_stmt_iterator bi;
1339
 
1340
              /* The call heuristic claims that a guarded function call
1341
                 is improbable.  This is because such calls are often used
1342
                 to signal exceptional situations such as printing error
1343
                 messages.  */
1344
              for (bi = bsi_start (e->dest); !bsi_end_p (bi);
1345
                   bsi_next (&bi))
1346
                {
1347
                  tree stmt = bsi_stmt (bi);
1348
                  if ((TREE_CODE (stmt) == CALL_EXPR
1349
                       || (TREE_CODE (stmt) == MODIFY_EXPR
1350
                           && TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR))
1351
                      /* Constant and pure calls are hardly used to signalize
1352
                         something exceptional.  */
1353
                      && TREE_SIDE_EFFECTS (stmt))
1354
                    {
1355
                      predict_edge_def (e, PRED_CALL, NOT_TAKEN);
1356
                      break;
1357
                    }
1358
                }
1359
            }
1360
        }
1361
      tree_predict_by_opcode (bb);
1362
    }
1363
  FOR_EACH_BB (bb)
1364
    combine_predictions_for_bb (bb);
1365
 
1366
  strip_builtin_expect ();
1367
  estimate_bb_frequencies (&loops_info);
1368
  free_dominance_info (CDI_POST_DOMINATORS);
1369
  remove_fake_exit_edges ();
1370
  flow_loops_free (&loops_info);
1371
  if (dump_file && (dump_flags & TDF_DETAILS))
1372
    dump_tree_cfg (dump_file, dump_flags);
1373
  if (profile_status == PROFILE_ABSENT)
1374
    profile_status = PROFILE_GUESSED;
1375
  return 0;
1376
}
1377
 
1378
/* __builtin_expect dropped tokens into the insn stream describing expected
1379
   values of registers.  Generate branch probabilities based off these
1380
   values.  */
1381
 
1382
void
1383
expected_value_to_br_prob (void)
1384
{
1385
  rtx insn, cond, ev = NULL_RTX, ev_reg = NULL_RTX;
1386
 
1387
  for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1388
    {
1389
      switch (GET_CODE (insn))
1390
        {
1391
        case NOTE:
1392
          /* Look for expected value notes.  */
1393
          if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EXPECTED_VALUE)
1394
            {
1395
              ev = NOTE_EXPECTED_VALUE (insn);
1396
              ev_reg = XEXP (ev, 0);
1397
              delete_insn (insn);
1398
            }
1399
          continue;
1400
 
1401
        case CODE_LABEL:
1402
          /* Never propagate across labels.  */
1403
          ev = NULL_RTX;
1404
          continue;
1405
 
1406
        case JUMP_INSN:
1407
          /* Look for simple conditional branches.  If we haven't got an
1408
             expected value yet, no point going further.  */
1409
          if (!JUMP_P (insn) || ev == NULL_RTX
1410
              || ! any_condjump_p (insn))
1411
            continue;
1412
          break;
1413
 
1414
        default:
1415
          /* Look for insns that clobber the EV register.  */
1416
          if (ev && reg_set_p (ev_reg, insn))
1417
            ev = NULL_RTX;
1418
          continue;
1419
        }
1420
 
1421
      /* Collect the branch condition, hopefully relative to EV_REG.  */
1422
      /* ???  At present we'll miss things like
1423
                (expected_value (eq r70 0))
1424
                (set r71 -1)
1425
                (set r80 (lt r70 r71))
1426
                (set pc (if_then_else (ne r80 0) ...))
1427
         as canonicalize_condition will render this to us as
1428
                (lt r70, r71)
1429
         Could use cselib to try and reduce this further.  */
1430
      cond = XEXP (SET_SRC (pc_set (insn)), 0);
1431
      cond = canonicalize_condition (insn, cond, 0, NULL, ev_reg,
1432
                                     false, false);
1433
      if (! cond || XEXP (cond, 0) != ev_reg
1434
          || GET_CODE (XEXP (cond, 1)) != CONST_INT)
1435
        continue;
1436
 
1437
      /* Substitute and simplify.  Given that the expression we're
1438
         building involves two constants, we should wind up with either
1439
         true or false.  */
1440
      cond = gen_rtx_fmt_ee (GET_CODE (cond), VOIDmode,
1441
                             XEXP (ev, 1), XEXP (cond, 1));
1442
      cond = simplify_rtx (cond);
1443
 
1444
      /* Turn the condition into a scaled branch probability.  */
1445
      gcc_assert (cond == const_true_rtx || cond == const0_rtx);
1446
      predict_insn_def (insn, PRED_BUILTIN_EXPECT,
1447
                        cond == const_true_rtx ? TAKEN : NOT_TAKEN);
1448
    }
1449
}
1450
 
1451
/* Check whether this is the last basic block of function.  Commonly
1452
   there is one extra common cleanup block.  */
1453
static bool
1454
last_basic_block_p (basic_block bb)
1455
{
1456
  if (bb == EXIT_BLOCK_PTR)
1457
    return false;
1458
 
1459
  return (bb->next_bb == EXIT_BLOCK_PTR
1460
          || (bb->next_bb->next_bb == EXIT_BLOCK_PTR
1461
              && single_succ_p (bb)
1462
              && single_succ (bb)->next_bb == EXIT_BLOCK_PTR));
1463
}
1464
 
1465
/* Sets branch probabilities according to PREDiction and
1466
   FLAGS. HEADS[bb->index] should be index of basic block in that we
1467
   need to alter branch predictions (i.e. the first of our dominators
1468
   such that we do not post-dominate it) (but we fill this information
1469
   on demand, so -1 may be there in case this was not needed yet).  */
1470
 
1471
static void
1472
predict_paths_leading_to (basic_block bb, int *heads, enum br_predictor pred,
1473
                          enum prediction taken)
1474
{
1475
  edge e;
1476
  edge_iterator ei;
1477
  int y;
1478
 
1479
  if (heads[bb->index] == ENTRY_BLOCK)
1480
    {
1481
      /* This is first time we need this field in heads array; so
1482
         find first dominator that we do not post-dominate (we are
1483
         using already known members of heads array).  */
1484
      basic_block ai = bb;
1485
      basic_block next_ai = get_immediate_dominator (CDI_DOMINATORS, bb);
1486
      int head;
1487
 
1488
      while (heads[next_ai->index] == ENTRY_BLOCK)
1489
        {
1490
          if (!dominated_by_p (CDI_POST_DOMINATORS, next_ai, bb))
1491
            break;
1492
          heads[next_ai->index] = ai->index;
1493
          ai = next_ai;
1494
          next_ai = get_immediate_dominator (CDI_DOMINATORS, next_ai);
1495
        }
1496
      if (!dominated_by_p (CDI_POST_DOMINATORS, next_ai, bb))
1497
        head = next_ai->index;
1498
      else
1499
        head = heads[next_ai->index];
1500
      while (next_ai != bb)
1501
        {
1502
          next_ai = ai;
1503
          ai = BASIC_BLOCK (heads[ai->index]);
1504
          heads[next_ai->index] = head;
1505
        }
1506
    }
1507
  y = heads[bb->index];
1508
 
1509
  /* Now find the edge that leads to our branch and aply the prediction.  */
1510
 
1511
  if (y == last_basic_block)
1512
    return;
1513
  FOR_EACH_EDGE (e, ei, BASIC_BLOCK (y)->succs)
1514
    if (e->dest->index >= NUM_FIXED_BLOCKS
1515
        && dominated_by_p (CDI_POST_DOMINATORS, e->dest, bb))
1516
      predict_edge_def (e, pred, taken);
1517
}
1518
 
1519
/* This is used to carry information about basic blocks.  It is
1520
   attached to the AUX field of the standard CFG block.  */
1521
 
1522
typedef struct block_info_def
1523
{
1524
  /* Estimated frequency of execution of basic_block.  */
1525
  sreal frequency;
1526
 
1527
  /* To keep queue of basic blocks to process.  */
1528
  basic_block next;
1529
 
1530
  /* Number of predecessors we need to visit first.  */
1531
  int npredecessors;
1532
} *block_info;
1533
 
1534
/* Similar information for edges.  */
1535
typedef struct edge_info_def
1536
{
1537
  /* In case edge is a loopback edge, the probability edge will be reached
1538
     in case header is.  Estimated number of iterations of the loop can be
1539
     then computed as 1 / (1 - back_edge_prob).  */
1540
  sreal back_edge_prob;
1541
  /* True if the edge is a loopback edge in the natural loop.  */
1542
  unsigned int back_edge:1;
1543
} *edge_info;
1544
 
1545
#define BLOCK_INFO(B)   ((block_info) (B)->aux)
1546
#define EDGE_INFO(E)    ((edge_info) (E)->aux)
1547
 
1548
/* Helper function for estimate_bb_frequencies.
1549
   Propagate the frequencies for LOOP.  */
1550
 
1551
static void
1552
propagate_freq (struct loop *loop, bitmap tovisit)
1553
{
1554
  basic_block head = loop->header;
1555
  basic_block bb;
1556
  basic_block last;
1557
  unsigned i;
1558
  edge e;
1559
  basic_block nextbb;
1560
  bitmap_iterator bi;
1561
 
1562
  /* For each basic block we need to visit count number of his predecessors
1563
     we need to visit first.  */
1564
  EXECUTE_IF_SET_IN_BITMAP (tovisit, 0, i, bi)
1565
    {
1566
      edge_iterator ei;
1567
      int count = 0;
1568
 
1569
       /* The outermost "loop" includes the exit block, which we can not
1570
          look up via BASIC_BLOCK.  Detect this and use EXIT_BLOCK_PTR
1571
          directly.  Do the same for the entry block.  */
1572
      bb = BASIC_BLOCK (i);
1573
 
1574
      FOR_EACH_EDGE (e, ei, bb->preds)
1575
        {
1576
          bool visit = bitmap_bit_p (tovisit, e->src->index);
1577
 
1578
          if (visit && !(e->flags & EDGE_DFS_BACK))
1579
            count++;
1580
          else if (visit && dump_file && !EDGE_INFO (e)->back_edge)
1581
            fprintf (dump_file,
1582
                     "Irreducible region hit, ignoring edge to %i->%i\n",
1583
                     e->src->index, bb->index);
1584
        }
1585
      BLOCK_INFO (bb)->npredecessors = count;
1586
    }
1587
 
1588
  memcpy (&BLOCK_INFO (head)->frequency, &real_one, sizeof (real_one));
1589
  last = head;
1590
  for (bb = head; bb; bb = nextbb)
1591
    {
1592
      edge_iterator ei;
1593
      sreal cyclic_probability, frequency;
1594
 
1595
      memcpy (&cyclic_probability, &real_zero, sizeof (real_zero));
1596
      memcpy (&frequency, &real_zero, sizeof (real_zero));
1597
 
1598
      nextbb = BLOCK_INFO (bb)->next;
1599
      BLOCK_INFO (bb)->next = NULL;
1600
 
1601
      /* Compute frequency of basic block.  */
1602
      if (bb != head)
1603
        {
1604
#ifdef ENABLE_CHECKING
1605
          FOR_EACH_EDGE (e, ei, bb->preds)
1606
            gcc_assert (!bitmap_bit_p (tovisit, e->src->index)
1607
                        || (e->flags & EDGE_DFS_BACK));
1608
#endif
1609
 
1610
          FOR_EACH_EDGE (e, ei, bb->preds)
1611
            if (EDGE_INFO (e)->back_edge)
1612
              {
1613
                sreal_add (&cyclic_probability, &cyclic_probability,
1614
                           &EDGE_INFO (e)->back_edge_prob);
1615
              }
1616
            else if (!(e->flags & EDGE_DFS_BACK))
1617
              {
1618
                sreal tmp;
1619
 
1620
                /*  frequency += (e->probability
1621
                                  * BLOCK_INFO (e->src)->frequency /
1622
                                  REG_BR_PROB_BASE);  */
1623
 
1624
                sreal_init (&tmp, e->probability, 0);
1625
                sreal_mul (&tmp, &tmp, &BLOCK_INFO (e->src)->frequency);
1626
                sreal_mul (&tmp, &tmp, &real_inv_br_prob_base);
1627
                sreal_add (&frequency, &frequency, &tmp);
1628
              }
1629
 
1630
          if (sreal_compare (&cyclic_probability, &real_zero) == 0)
1631
            {
1632
              memcpy (&BLOCK_INFO (bb)->frequency, &frequency,
1633
                      sizeof (frequency));
1634
            }
1635
          else
1636
            {
1637
              if (sreal_compare (&cyclic_probability, &real_almost_one) > 0)
1638
                {
1639
                  memcpy (&cyclic_probability, &real_almost_one,
1640
                          sizeof (real_almost_one));
1641
                }
1642
 
1643
              /* BLOCK_INFO (bb)->frequency = frequency
1644
                                              / (1 - cyclic_probability) */
1645
 
1646
              sreal_sub (&cyclic_probability, &real_one, &cyclic_probability);
1647
              sreal_div (&BLOCK_INFO (bb)->frequency,
1648
                         &frequency, &cyclic_probability);
1649
            }
1650
        }
1651
 
1652
      bitmap_clear_bit (tovisit, bb->index);
1653
 
1654
      e = find_edge (bb, head);
1655
      if (e)
1656
        {
1657
          sreal tmp;
1658
 
1659
          /* EDGE_INFO (e)->back_edge_prob
1660
             = ((e->probability * BLOCK_INFO (bb)->frequency)
1661
             / REG_BR_PROB_BASE); */
1662
 
1663
          sreal_init (&tmp, e->probability, 0);
1664
          sreal_mul (&tmp, &tmp, &BLOCK_INFO (bb)->frequency);
1665
          sreal_mul (&EDGE_INFO (e)->back_edge_prob,
1666
                     &tmp, &real_inv_br_prob_base);
1667
        }
1668
 
1669
      /* Propagate to successor blocks.  */
1670
      FOR_EACH_EDGE (e, ei, bb->succs)
1671
        if (!(e->flags & EDGE_DFS_BACK)
1672
            && BLOCK_INFO (e->dest)->npredecessors)
1673
          {
1674
            BLOCK_INFO (e->dest)->npredecessors--;
1675
            if (!BLOCK_INFO (e->dest)->npredecessors)
1676
              {
1677
                if (!nextbb)
1678
                  nextbb = e->dest;
1679
                else
1680
                  BLOCK_INFO (last)->next = e->dest;
1681
 
1682
                last = e->dest;
1683
              }
1684
          }
1685
    }
1686
}
1687
 
1688
/* Estimate probabilities of loopback edges in loops at same nest level.  */
1689
 
1690
static void
1691
estimate_loops_at_level (struct loop *first_loop, bitmap tovisit)
1692
{
1693
  struct loop *loop;
1694
 
1695
  for (loop = first_loop; loop; loop = loop->next)
1696
    {
1697
      edge e;
1698
      basic_block *bbs;
1699
      unsigned i;
1700
 
1701
      estimate_loops_at_level (loop->inner, tovisit);
1702
 
1703
      /* Do not do this for dummy function loop.  */
1704
      if (EDGE_COUNT (loop->latch->succs) > 0)
1705
        {
1706
          /* Find current loop back edge and mark it.  */
1707
          e = loop_latch_edge (loop);
1708
          EDGE_INFO (e)->back_edge = 1;
1709
       }
1710
 
1711
      bbs = get_loop_body (loop);
1712
      for (i = 0; i < loop->num_nodes; i++)
1713
        bitmap_set_bit (tovisit, bbs[i]->index);
1714
      free (bbs);
1715
      propagate_freq (loop, tovisit);
1716
    }
1717
}
1718
 
1719
/* Convert counts measured by profile driven feedback to frequencies.
1720
   Return nonzero iff there was any nonzero execution count.  */
1721
 
1722
int
1723
counts_to_freqs (void)
1724
{
1725
  gcov_type count_max, true_count_max = 0;
1726
  basic_block bb;
1727
 
1728
  FOR_EACH_BB (bb)
1729
    true_count_max = MAX (bb->count, true_count_max);
1730
 
1731
  count_max = MAX (true_count_max, 1);
1732
  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1733
    bb->frequency = (bb->count * BB_FREQ_MAX + count_max / 2) / count_max;
1734
  return true_count_max;
1735
}
1736
 
1737
/* Return true if function is likely to be expensive, so there is no point to
1738
   optimize performance of prologue, epilogue or do inlining at the expense
1739
   of code size growth.  THRESHOLD is the limit of number of instructions
1740
   function can execute at average to be still considered not expensive.  */
1741
 
1742
bool
1743
expensive_function_p (int threshold)
1744
{
1745
  unsigned int sum = 0;
1746
  basic_block bb;
1747
  unsigned int limit;
1748
 
1749
  /* We can not compute accurately for large thresholds due to scaled
1750
     frequencies.  */
1751
  gcc_assert (threshold <= BB_FREQ_MAX);
1752
 
1753
  /* Frequencies are out of range.  This either means that function contains
1754
     internal loop executing more than BB_FREQ_MAX times or profile feedback
1755
     is available and function has not been executed at all.  */
1756
  if (ENTRY_BLOCK_PTR->frequency == 0)
1757
    return true;
1758
 
1759
  /* Maximally BB_FREQ_MAX^2 so overflow won't happen.  */
1760
  limit = ENTRY_BLOCK_PTR->frequency * threshold;
1761
  FOR_EACH_BB (bb)
1762
    {
1763
      rtx insn;
1764
 
1765
      for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
1766
           insn = NEXT_INSN (insn))
1767
        if (active_insn_p (insn))
1768
          {
1769
            sum += bb->frequency;
1770
            if (sum > limit)
1771
              return true;
1772
        }
1773
    }
1774
 
1775
  return false;
1776
}
1777
 
1778
/* Estimate basic blocks frequency by given branch probabilities.  */
1779
 
1780
static void
1781
estimate_bb_frequencies (struct loops *loops)
1782
{
1783
  basic_block bb;
1784
  sreal freq_max;
1785
 
1786
  if (!flag_branch_probabilities || !counts_to_freqs ())
1787
    {
1788
      static int real_values_initialized = 0;
1789
      bitmap tovisit;
1790
 
1791
      if (!real_values_initialized)
1792
        {
1793
          real_values_initialized = 1;
1794
          sreal_init (&real_zero, 0, 0);
1795
          sreal_init (&real_one, 1, 0);
1796
          sreal_init (&real_br_prob_base, REG_BR_PROB_BASE, 0);
1797
          sreal_init (&real_bb_freq_max, BB_FREQ_MAX, 0);
1798
          sreal_init (&real_one_half, 1, -1);
1799
          sreal_div (&real_inv_br_prob_base, &real_one, &real_br_prob_base);
1800
          sreal_sub (&real_almost_one, &real_one, &real_inv_br_prob_base);
1801
        }
1802
 
1803
      mark_dfs_back_edges ();
1804
 
1805
      single_succ_edge (ENTRY_BLOCK_PTR)->probability = REG_BR_PROB_BASE;
1806
 
1807
      /* Set up block info for each basic block.  */
1808
      tovisit = BITMAP_ALLOC (NULL);
1809
      alloc_aux_for_blocks (sizeof (struct block_info_def));
1810
      alloc_aux_for_edges (sizeof (struct edge_info_def));
1811
      FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1812
        {
1813
          edge e;
1814
          edge_iterator ei;
1815
 
1816
          FOR_EACH_EDGE (e, ei, bb->succs)
1817
            {
1818
              sreal_init (&EDGE_INFO (e)->back_edge_prob, e->probability, 0);
1819
              sreal_mul (&EDGE_INFO (e)->back_edge_prob,
1820
                         &EDGE_INFO (e)->back_edge_prob,
1821
                         &real_inv_br_prob_base);
1822
            }
1823
        }
1824
 
1825
      /* First compute probabilities locally for each loop from innermost
1826
         to outermost to examine probabilities for back edges.  */
1827
      estimate_loops_at_level (loops->tree_root, tovisit);
1828
 
1829
      memcpy (&freq_max, &real_zero, sizeof (real_zero));
1830
      FOR_EACH_BB (bb)
1831
        if (sreal_compare (&freq_max, &BLOCK_INFO (bb)->frequency) < 0)
1832
          memcpy (&freq_max, &BLOCK_INFO (bb)->frequency, sizeof (freq_max));
1833
 
1834
      sreal_div (&freq_max, &real_bb_freq_max, &freq_max);
1835
      FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1836
        {
1837
          sreal tmp;
1838
 
1839
          sreal_mul (&tmp, &BLOCK_INFO (bb)->frequency, &freq_max);
1840
          sreal_add (&tmp, &tmp, &real_one_half);
1841
          bb->frequency = sreal_to_int (&tmp);
1842
        }
1843
 
1844
      free_aux_for_blocks ();
1845
      free_aux_for_edges ();
1846
      BITMAP_FREE (tovisit);
1847
    }
1848
  compute_function_frequency ();
1849
  if (flag_reorder_functions)
1850
    choose_function_section ();
1851
}
1852
 
1853
/* Decide whether function is hot, cold or unlikely executed.  */
1854
static void
1855
compute_function_frequency (void)
1856
{
1857
  basic_block bb;
1858
 
1859
  if (!profile_info || !flag_branch_probabilities)
1860
    return;
1861
  cfun->function_frequency = FUNCTION_FREQUENCY_UNLIKELY_EXECUTED;
1862
  FOR_EACH_BB (bb)
1863
    {
1864
      if (maybe_hot_bb_p (bb))
1865
        {
1866
          cfun->function_frequency = FUNCTION_FREQUENCY_HOT;
1867
          return;
1868
        }
1869
      if (!probably_never_executed_bb_p (bb))
1870
        cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
1871
    }
1872
}
1873
 
1874
/* Choose appropriate section for the function.  */
1875
static void
1876
choose_function_section (void)
1877
{
1878
  if (DECL_SECTION_NAME (current_function_decl)
1879
      || !targetm.have_named_sections
1880
      /* Theoretically we can split the gnu.linkonce text section too,
1881
         but this requires more work as the frequency needs to match
1882
         for all generated objects so we need to merge the frequency
1883
         of all instances.  For now just never set frequency for these.  */
1884
      || DECL_ONE_ONLY (current_function_decl))
1885
    return;
1886
 
1887
  /* If we are doing the partitioning optimization, let the optimization
1888
     choose the correct section into which to put things.  */
1889
 
1890
  if (flag_reorder_blocks_and_partition)
1891
    return;
1892
 
1893
  if (cfun->function_frequency == FUNCTION_FREQUENCY_HOT)
1894
    DECL_SECTION_NAME (current_function_decl) =
1895
      build_string (strlen (HOT_TEXT_SECTION_NAME), HOT_TEXT_SECTION_NAME);
1896
  if (cfun->function_frequency == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED)
1897
    DECL_SECTION_NAME (current_function_decl) =
1898
      build_string (strlen (UNLIKELY_EXECUTED_TEXT_SECTION_NAME),
1899
                    UNLIKELY_EXECUTED_TEXT_SECTION_NAME);
1900
}
1901
 
1902
static bool
1903
gate_estimate_probability (void)
1904
{
1905
  return flag_guess_branch_prob;
1906
}
1907
 
1908
struct tree_opt_pass pass_profile =
1909
{
1910
  "profile",                            /* name */
1911
  gate_estimate_probability,            /* gate */
1912
  tree_estimate_probability,            /* execute */
1913
  NULL,                                 /* sub */
1914
  NULL,                                 /* next */
1915
  0,                                     /* static_pass_number */
1916
  TV_BRANCH_PROB,                       /* tv_id */
1917
  PROP_cfg,                             /* properties_required */
1918
  0,                                     /* properties_provided */
1919
  0,                                     /* properties_destroyed */
1920
  0,                                     /* todo_flags_start */
1921
  TODO_ggc_collect | TODO_verify_ssa,                   /* todo_flags_finish */
1922
 
1923
};

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.