OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-dev/] [or1k-gcc/] [gcc/] [predict.c] - Blame information for rev 774

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 684 jeremybenn
/* Branch prediction routines for the GNU compiler.
2
   Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010
3
   Free Software Foundation, Inc.
4
 
5
This file is part of GCC.
6
 
7
GCC is free software; you can redistribute it and/or modify it under
8
the terms of the GNU General Public License as published by the Free
9
Software Foundation; either version 3, or (at your option) any later
10
version.
11
 
12
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13
WARRANTY; without even the implied warranty of MERCHANTABILITY or
14
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15
for more details.
16
 
17
You should have received a copy of the GNU General Public License
18
along with GCC; see the file COPYING3.  If not see
19
<http://www.gnu.org/licenses/>.  */
20
 
21
/* References:
22
 
23
   [1] "Branch Prediction for Free"
24
       Ball and Larus; PLDI '93.
25
   [2] "Static Branch Frequency and Program Profile Analysis"
26
       Wu and Larus; MICRO-27.
27
   [3] "Corpus-based Static Branch Prediction"
28
       Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95.  */
29
 
30
 
31
#include "config.h"
32
#include "system.h"
33
#include "coretypes.h"
34
#include "tm.h"
35
#include "tree.h"
36
#include "rtl.h"
37
#include "tm_p.h"
38
#include "hard-reg-set.h"
39
#include "basic-block.h"
40
#include "insn-config.h"
41
#include "regs.h"
42
#include "flags.h"
43
#include "output.h"
44
#include "function.h"
45
#include "except.h"
46
#include "diagnostic-core.h"
47
#include "recog.h"
48
#include "expr.h"
49
#include "predict.h"
50
#include "coverage.h"
51
#include "sreal.h"
52
#include "params.h"
53
#include "target.h"
54
#include "cfgloop.h"
55
#include "tree-flow.h"
56
#include "ggc.h"
57
#include "tree-dump.h"
58
#include "tree-pass.h"
59
#include "timevar.h"
60
#include "tree-scalar-evolution.h"
61
#include "cfgloop.h"
62
#include "pointer-set.h"
63
 
64
/* real constants: 0, 1, 1-1/REG_BR_PROB_BASE, REG_BR_PROB_BASE,
65
                   1/REG_BR_PROB_BASE, 0.5, BB_FREQ_MAX.  */
66
static sreal real_zero, real_one, real_almost_one, real_br_prob_base,
67
             real_inv_br_prob_base, real_one_half, real_bb_freq_max;
68
 
69
/* Random guesstimation given names.
70
   PROV_VERY_UNLIKELY should be small enough so basic block predicted
71
   by it gets bellow HOT_BB_FREQUENCY_FRANCTION.  */
72
#define PROB_VERY_UNLIKELY      (REG_BR_PROB_BASE / 2000 - 1)
73
#define PROB_EVEN               (REG_BR_PROB_BASE / 2)
74
#define PROB_VERY_LIKELY        (REG_BR_PROB_BASE - PROB_VERY_UNLIKELY)
75
#define PROB_ALWAYS             (REG_BR_PROB_BASE)
76
 
77
static void combine_predictions_for_insn (rtx, basic_block);
78
static void dump_prediction (FILE *, enum br_predictor, int, basic_block, int);
79
static void predict_paths_leading_to (basic_block, enum br_predictor, enum prediction);
80
static void predict_paths_leading_to_edge (edge, enum br_predictor, enum prediction);
81
static bool can_predict_insn_p (const_rtx);
82
 
83
/* Information we hold about each branch predictor.
84
   Filled using information from predict.def.  */
85
 
86
struct predictor_info
87
{
88
  const char *const name;       /* Name used in the debugging dumps.  */
89
  const int hitrate;            /* Expected hitrate used by
90
                                   predict_insn_def call.  */
91
  const int flags;
92
};
93
 
94
/* Use given predictor without Dempster-Shaffer theory if it matches
95
   using first_match heuristics.  */
96
#define PRED_FLAG_FIRST_MATCH 1
97
 
98
/* Recompute hitrate in percent to our representation.  */
99
 
100
#define HITRATE(VAL) ((int) ((VAL) * REG_BR_PROB_BASE + 50) / 100)
101
 
102
#define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS},
103
static const struct predictor_info predictor_info[]= {
104
#include "predict.def"
105
 
106
  /* Upper bound on predictors.  */
107
  {NULL, 0, 0}
108
};
109
#undef DEF_PREDICTOR
110
 
111
/* Return TRUE if frequency FREQ is considered to be hot.  */
112
 
113
static inline bool
114
maybe_hot_frequency_p (int freq)
115
{
116
  struct cgraph_node *node = cgraph_get_node (current_function_decl);
117
  if (!profile_info || !flag_branch_probabilities)
118
    {
119
      if (node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
120
        return false;
121
      if (node->frequency == NODE_FREQUENCY_HOT)
122
        return true;
123
    }
124
  if (profile_status == PROFILE_ABSENT)
125
    return true;
126
  if (node->frequency == NODE_FREQUENCY_EXECUTED_ONCE
127
      && freq < (ENTRY_BLOCK_PTR->frequency * 2 / 3))
128
    return false;
129
  if (freq < ENTRY_BLOCK_PTR->frequency / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION))
130
    return false;
131
  return true;
132
}
133
 
134
/* Return TRUE if frequency FREQ is considered to be hot.  */
135
 
136
static inline bool
137
maybe_hot_count_p (gcov_type count)
138
{
139
  if (profile_status != PROFILE_READ)
140
    return true;
141
  /* Code executed at most once is not hot.  */
142
  if (profile_info->runs >= count)
143
    return false;
144
  return (count
145
          > profile_info->sum_max / PARAM_VALUE (HOT_BB_COUNT_FRACTION));
146
}
147
 
148
/* Return true in case BB can be CPU intensive and should be optimized
149
   for maximal performance.  */
150
 
151
bool
152
maybe_hot_bb_p (const_basic_block bb)
153
{
154
  if (profile_status == PROFILE_READ)
155
    return maybe_hot_count_p (bb->count);
156
  return maybe_hot_frequency_p (bb->frequency);
157
}
158
 
159
/* Return true if the call can be hot.  */
160
 
161
bool
162
cgraph_maybe_hot_edge_p (struct cgraph_edge *edge)
163
{
164
  if (profile_info && flag_branch_probabilities
165
      && (edge->count
166
          <= profile_info->sum_max / PARAM_VALUE (HOT_BB_COUNT_FRACTION)))
167
    return false;
168
  if (edge->caller->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED
169
      || edge->callee->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
170
    return false;
171
  if (edge->caller->frequency > NODE_FREQUENCY_UNLIKELY_EXECUTED
172
      && edge->callee->frequency <= NODE_FREQUENCY_EXECUTED_ONCE)
173
    return false;
174
  if (optimize_size)
175
    return false;
176
  if (edge->caller->frequency == NODE_FREQUENCY_HOT)
177
    return true;
178
  if (edge->caller->frequency == NODE_FREQUENCY_EXECUTED_ONCE
179
      && edge->frequency < CGRAPH_FREQ_BASE * 3 / 2)
180
    return false;
181
  if (flag_guess_branch_prob
182
      && edge->frequency <= (CGRAPH_FREQ_BASE
183
                             / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION)))
184
    return false;
185
  return true;
186
}
187
 
188
/* Return true in case BB can be CPU intensive and should be optimized
189
   for maximal performance.  */
190
 
191
bool
192
maybe_hot_edge_p (edge e)
193
{
194
  if (profile_status == PROFILE_READ)
195
    return maybe_hot_count_p (e->count);
196
  return maybe_hot_frequency_p (EDGE_FREQUENCY (e));
197
}
198
 
199
 
200
/* Return true in case BB is probably never executed.  */
201
 
202
bool
203
probably_never_executed_bb_p (const_basic_block bb)
204
{
205
  if (profile_info && flag_branch_probabilities)
206
    return ((bb->count + profile_info->runs / 2) / profile_info->runs) == 0;
207
  if ((!profile_info || !flag_branch_probabilities)
208
      && (cgraph_get_node (current_function_decl)->frequency
209
          == NODE_FREQUENCY_UNLIKELY_EXECUTED))
210
    return true;
211
  return false;
212
}
213
 
214
/* Return true if NODE should be optimized for size.  */
215
 
216
bool
217
cgraph_optimize_for_size_p (struct cgraph_node *node)
218
{
219
  if (optimize_size)
220
    return true;
221
  if (node && (node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED))
222
    return true;
223
  else
224
    return false;
225
}
226
 
227
/* Return true when current function should always be optimized for size.  */
228
 
229
bool
230
optimize_function_for_size_p (struct function *fun)
231
{
232
  if (optimize_size)
233
    return true;
234
  if (!fun || !fun->decl)
235
    return false;
236
  return cgraph_optimize_for_size_p (cgraph_get_node (fun->decl));
237
}
238
 
239
/* Return true when current function should always be optimized for speed.  */
240
 
241
bool
242
optimize_function_for_speed_p (struct function *fun)
243
{
244
  return !optimize_function_for_size_p (fun);
245
}
246
 
247
/* Return TRUE when BB should be optimized for size.  */
248
 
249
bool
250
optimize_bb_for_size_p (const_basic_block bb)
251
{
252
  return optimize_function_for_size_p (cfun) || !maybe_hot_bb_p (bb);
253
}
254
 
255
/* Return TRUE when BB should be optimized for speed.  */
256
 
257
bool
258
optimize_bb_for_speed_p (const_basic_block bb)
259
{
260
  return !optimize_bb_for_size_p (bb);
261
}
262
 
263
/* Return TRUE when BB should be optimized for size.  */
264
 
265
bool
266
optimize_edge_for_size_p (edge e)
267
{
268
  return optimize_function_for_size_p (cfun) || !maybe_hot_edge_p (e);
269
}
270
 
271
/* Return TRUE when BB should be optimized for speed.  */
272
 
273
bool
274
optimize_edge_for_speed_p (edge e)
275
{
276
  return !optimize_edge_for_size_p (e);
277
}
278
 
279
/* Return TRUE when BB should be optimized for size.  */
280
 
281
bool
282
optimize_insn_for_size_p (void)
283
{
284
  return optimize_function_for_size_p (cfun) || !crtl->maybe_hot_insn_p;
285
}
286
 
287
/* Return TRUE when BB should be optimized for speed.  */
288
 
289
bool
290
optimize_insn_for_speed_p (void)
291
{
292
  return !optimize_insn_for_size_p ();
293
}
294
 
295
/* Return TRUE when LOOP should be optimized for size.  */
296
 
297
bool
298
optimize_loop_for_size_p (struct loop *loop)
299
{
300
  return optimize_bb_for_size_p (loop->header);
301
}
302
 
303
/* Return TRUE when LOOP should be optimized for speed.  */
304
 
305
bool
306
optimize_loop_for_speed_p (struct loop *loop)
307
{
308
  return optimize_bb_for_speed_p (loop->header);
309
}
310
 
311
/* Return TRUE when LOOP nest should be optimized for speed.  */
312
 
313
bool
314
optimize_loop_nest_for_speed_p (struct loop *loop)
315
{
316
  struct loop *l = loop;
317
  if (optimize_loop_for_speed_p (loop))
318
    return true;
319
  l = loop->inner;
320
  while (l && l != loop)
321
    {
322
      if (optimize_loop_for_speed_p (l))
323
        return true;
324
      if (l->inner)
325
        l = l->inner;
326
      else if (l->next)
327
        l = l->next;
328
      else
329
        {
330
          while (l != loop && !l->next)
331
            l = loop_outer (l);
332
          if (l != loop)
333
            l = l->next;
334
        }
335
    }
336
  return false;
337
}
338
 
339
/* Return TRUE when LOOP nest should be optimized for size.  */
340
 
341
bool
342
optimize_loop_nest_for_size_p (struct loop *loop)
343
{
344
  return !optimize_loop_nest_for_speed_p (loop);
345
}
346
 
347
/* Return true when edge E is likely to be well predictable by branch
348
   predictor.  */
349
 
350
bool
351
predictable_edge_p (edge e)
352
{
353
  if (profile_status == PROFILE_ABSENT)
354
    return false;
355
  if ((e->probability
356
       <= PARAM_VALUE (PARAM_PREDICTABLE_BRANCH_OUTCOME) * REG_BR_PROB_BASE / 100)
357
      || (REG_BR_PROB_BASE - e->probability
358
          <= PARAM_VALUE (PARAM_PREDICTABLE_BRANCH_OUTCOME) * REG_BR_PROB_BASE / 100))
359
    return true;
360
  return false;
361
}
362
 
363
 
364
/* Set RTL expansion for BB profile.  */
365
 
366
void
367
rtl_profile_for_bb (basic_block bb)
368
{
369
  crtl->maybe_hot_insn_p = maybe_hot_bb_p (bb);
370
}
371
 
372
/* Set RTL expansion for edge profile.  */
373
 
374
void
375
rtl_profile_for_edge (edge e)
376
{
377
  crtl->maybe_hot_insn_p = maybe_hot_edge_p (e);
378
}
379
 
380
/* Set RTL expansion to default mode (i.e. when profile info is not known).  */
381
void
382
default_rtl_profile (void)
383
{
384
  crtl->maybe_hot_insn_p = true;
385
}
386
 
387
/* Return true if the one of outgoing edges is already predicted by
388
   PREDICTOR.  */
389
 
390
bool
391
rtl_predicted_by_p (const_basic_block bb, enum br_predictor predictor)
392
{
393
  rtx note;
394
  if (!INSN_P (BB_END (bb)))
395
    return false;
396
  for (note = REG_NOTES (BB_END (bb)); note; note = XEXP (note, 1))
397
    if (REG_NOTE_KIND (note) == REG_BR_PRED
398
        && INTVAL (XEXP (XEXP (note, 0), 0)) == (int)predictor)
399
      return true;
400
  return false;
401
}
402
 
403
/* This map contains for a basic block the list of predictions for the
404
   outgoing edges.  */
405
 
406
static struct pointer_map_t *bb_predictions;
407
 
408
/*  Structure representing predictions in tree level. */
409
 
410
struct edge_prediction {
411
    struct edge_prediction *ep_next;
412
    edge ep_edge;
413
    enum br_predictor ep_predictor;
414
    int ep_probability;
415
};
416
 
417
/* Return true if the one of outgoing edges is already predicted by
418
   PREDICTOR.  */
419
 
420
bool
421
gimple_predicted_by_p (const_basic_block bb, enum br_predictor predictor)
422
{
423
  struct edge_prediction *i;
424
  void **preds = pointer_map_contains (bb_predictions, bb);
425
 
426
  if (!preds)
427
    return false;
428
 
429
  for (i = (struct edge_prediction *) *preds; i; i = i->ep_next)
430
    if (i->ep_predictor == predictor)
431
      return true;
432
  return false;
433
}
434
 
435
/* Return true when the probability of edge is reliable.
436
 
437
   The profile guessing code is good at predicting branch outcome (ie.
438
   taken/not taken), that is predicted right slightly over 75% of time.
439
   It is however notoriously poor on predicting the probability itself.
440
   In general the profile appear a lot flatter (with probabilities closer
441
   to 50%) than the reality so it is bad idea to use it to drive optimization
442
   such as those disabling dynamic branch prediction for well predictable
443
   branches.
444
 
445
   There are two exceptions - edges leading to noreturn edges and edges
446
   predicted by number of iterations heuristics are predicted well.  This macro
447
   should be able to distinguish those, but at the moment it simply check for
448
   noreturn heuristic that is only one giving probability over 99% or bellow
449
   1%.  In future we might want to propagate reliability information across the
450
   CFG if we find this information useful on multiple places.   */
451
static bool
452
probability_reliable_p (int prob)
453
{
454
  return (profile_status == PROFILE_READ
455
          || (profile_status == PROFILE_GUESSED
456
              && (prob <= HITRATE (1) || prob >= HITRATE (99))));
457
}
458
 
459
/* Same predicate as above, working on edges.  */
460
bool
461
edge_probability_reliable_p (const_edge e)
462
{
463
  return probability_reliable_p (e->probability);
464
}
465
 
466
/* Same predicate as edge_probability_reliable_p, working on notes.  */
467
bool
468
br_prob_note_reliable_p (const_rtx note)
469
{
470
  gcc_assert (REG_NOTE_KIND (note) == REG_BR_PROB);
471
  return probability_reliable_p (INTVAL (XEXP (note, 0)));
472
}
473
 
474
static void
475
predict_insn (rtx insn, enum br_predictor predictor, int probability)
476
{
477
  gcc_assert (any_condjump_p (insn));
478
  if (!flag_guess_branch_prob)
479
    return;
480
 
481
  add_reg_note (insn, REG_BR_PRED,
482
                gen_rtx_CONCAT (VOIDmode,
483
                                GEN_INT ((int) predictor),
484
                                GEN_INT ((int) probability)));
485
}
486
 
487
/* Predict insn by given predictor.  */
488
 
489
void
490
predict_insn_def (rtx insn, enum br_predictor predictor,
491
                  enum prediction taken)
492
{
493
   int probability = predictor_info[(int) predictor].hitrate;
494
 
495
   if (taken != TAKEN)
496
     probability = REG_BR_PROB_BASE - probability;
497
 
498
   predict_insn (insn, predictor, probability);
499
}
500
 
501
/* Predict edge E with given probability if possible.  */
502
 
503
void
504
rtl_predict_edge (edge e, enum br_predictor predictor, int probability)
505
{
506
  rtx last_insn;
507
  last_insn = BB_END (e->src);
508
 
509
  /* We can store the branch prediction information only about
510
     conditional jumps.  */
511
  if (!any_condjump_p (last_insn))
512
    return;
513
 
514
  /* We always store probability of branching.  */
515
  if (e->flags & EDGE_FALLTHRU)
516
    probability = REG_BR_PROB_BASE - probability;
517
 
518
  predict_insn (last_insn, predictor, probability);
519
}
520
 
521
/* Predict edge E with the given PROBABILITY.  */
522
void
523
gimple_predict_edge (edge e, enum br_predictor predictor, int probability)
524
{
525
  gcc_assert (profile_status != PROFILE_GUESSED);
526
  if ((e->src != ENTRY_BLOCK_PTR && EDGE_COUNT (e->src->succs) > 1)
527
      && flag_guess_branch_prob && optimize)
528
    {
529
      struct edge_prediction *i = XNEW (struct edge_prediction);
530
      void **preds = pointer_map_insert (bb_predictions, e->src);
531
 
532
      i->ep_next = (struct edge_prediction *) *preds;
533
      *preds = i;
534
      i->ep_probability = probability;
535
      i->ep_predictor = predictor;
536
      i->ep_edge = e;
537
    }
538
}
539
 
540
/* Remove all predictions on given basic block that are attached
541
   to edge E.  */
542
void
543
remove_predictions_associated_with_edge (edge e)
544
{
545
  void **preds;
546
 
547
  if (!bb_predictions)
548
    return;
549
 
550
  preds = pointer_map_contains (bb_predictions, e->src);
551
 
552
  if (preds)
553
    {
554
      struct edge_prediction **prediction = (struct edge_prediction **) preds;
555
      struct edge_prediction *next;
556
 
557
      while (*prediction)
558
        {
559
          if ((*prediction)->ep_edge == e)
560
            {
561
              next = (*prediction)->ep_next;
562
              free (*prediction);
563
              *prediction = next;
564
            }
565
          else
566
            prediction = &((*prediction)->ep_next);
567
        }
568
    }
569
}
570
 
571
/* Clears the list of predictions stored for BB.  */
572
 
573
static void
574
clear_bb_predictions (basic_block bb)
575
{
576
  void **preds = pointer_map_contains (bb_predictions, bb);
577
  struct edge_prediction *pred, *next;
578
 
579
  if (!preds)
580
    return;
581
 
582
  for (pred = (struct edge_prediction *) *preds; pred; pred = next)
583
    {
584
      next = pred->ep_next;
585
      free (pred);
586
    }
587
  *preds = NULL;
588
}
589
 
590
/* Return true when we can store prediction on insn INSN.
591
   At the moment we represent predictions only on conditional
592
   jumps, not at computed jump or other complicated cases.  */
593
static bool
594
can_predict_insn_p (const_rtx insn)
595
{
596
  return (JUMP_P (insn)
597
          && any_condjump_p (insn)
598
          && EDGE_COUNT (BLOCK_FOR_INSN (insn)->succs) >= 2);
599
}
600
 
601
/* Predict edge E by given predictor if possible.  */
602
 
603
void
604
predict_edge_def (edge e, enum br_predictor predictor,
605
                  enum prediction taken)
606
{
607
   int probability = predictor_info[(int) predictor].hitrate;
608
 
609
   if (taken != TAKEN)
610
     probability = REG_BR_PROB_BASE - probability;
611
 
612
   predict_edge (e, predictor, probability);
613
}
614
 
615
/* Invert all branch predictions or probability notes in the INSN.  This needs
616
   to be done each time we invert the condition used by the jump.  */
617
 
618
void
619
invert_br_probabilities (rtx insn)
620
{
621
  rtx note;
622
 
623
  for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
624
    if (REG_NOTE_KIND (note) == REG_BR_PROB)
625
      XEXP (note, 0) = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (note, 0)));
626
    else if (REG_NOTE_KIND (note) == REG_BR_PRED)
627
      XEXP (XEXP (note, 0), 1)
628
        = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (XEXP (note, 0), 1)));
629
}
630
 
631
/* Dump information about the branch prediction to the output file.  */
632
 
633
static void
634
dump_prediction (FILE *file, enum br_predictor predictor, int probability,
635
                 basic_block bb, int used)
636
{
637
  edge e;
638
  edge_iterator ei;
639
 
640
  if (!file)
641
    return;
642
 
643
  FOR_EACH_EDGE (e, ei, bb->succs)
644
    if (! (e->flags & EDGE_FALLTHRU))
645
      break;
646
 
647
  fprintf (file, "  %s heuristics%s: %.1f%%",
648
           predictor_info[predictor].name,
649
           used ? "" : " (ignored)", probability * 100.0 / REG_BR_PROB_BASE);
650
 
651
  if (bb->count)
652
    {
653
      fprintf (file, "  exec ");
654
      fprintf (file, HOST_WIDEST_INT_PRINT_DEC, bb->count);
655
      if (e)
656
        {
657
          fprintf (file, " hit ");
658
          fprintf (file, HOST_WIDEST_INT_PRINT_DEC, e->count);
659
          fprintf (file, " (%.1f%%)", e->count * 100.0 / bb->count);
660
        }
661
    }
662
 
663
  fprintf (file, "\n");
664
}
665
 
666
/* We can not predict the probabilities of outgoing edges of bb.  Set them
667
   evenly and hope for the best.  */
668
static void
669
set_even_probabilities (basic_block bb)
670
{
671
  int nedges = 0;
672
  edge e;
673
  edge_iterator ei;
674
 
675
  FOR_EACH_EDGE (e, ei, bb->succs)
676
    if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
677
      nedges ++;
678
  FOR_EACH_EDGE (e, ei, bb->succs)
679
    if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
680
      e->probability = (REG_BR_PROB_BASE + nedges / 2) / nedges;
681
    else
682
      e->probability = 0;
683
}
684
 
685
/* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB
686
   note if not already present.  Remove now useless REG_BR_PRED notes.  */
687
 
688
static void
689
combine_predictions_for_insn (rtx insn, basic_block bb)
690
{
691
  rtx prob_note;
692
  rtx *pnote;
693
  rtx note;
694
  int best_probability = PROB_EVEN;
695
  enum br_predictor best_predictor = END_PREDICTORS;
696
  int combined_probability = REG_BR_PROB_BASE / 2;
697
  int d;
698
  bool first_match = false;
699
  bool found = false;
700
 
701
  if (!can_predict_insn_p (insn))
702
    {
703
      set_even_probabilities (bb);
704
      return;
705
    }
706
 
707
  prob_note = find_reg_note (insn, REG_BR_PROB, 0);
708
  pnote = &REG_NOTES (insn);
709
  if (dump_file)
710
    fprintf (dump_file, "Predictions for insn %i bb %i\n", INSN_UID (insn),
711
             bb->index);
712
 
713
  /* We implement "first match" heuristics and use probability guessed
714
     by predictor with smallest index.  */
715
  for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
716
    if (REG_NOTE_KIND (note) == REG_BR_PRED)
717
      {
718
        enum br_predictor predictor = ((enum br_predictor)
719
                                       INTVAL (XEXP (XEXP (note, 0), 0)));
720
        int probability = INTVAL (XEXP (XEXP (note, 0), 1));
721
 
722
        found = true;
723
        if (best_predictor > predictor)
724
          best_probability = probability, best_predictor = predictor;
725
 
726
        d = (combined_probability * probability
727
             + (REG_BR_PROB_BASE - combined_probability)
728
             * (REG_BR_PROB_BASE - probability));
729
 
730
        /* Use FP math to avoid overflows of 32bit integers.  */
731
        if (d == 0)
732
          /* If one probability is 0% and one 100%, avoid division by zero.  */
733
          combined_probability = REG_BR_PROB_BASE / 2;
734
        else
735
          combined_probability = (((double) combined_probability) * probability
736
                                  * REG_BR_PROB_BASE / d + 0.5);
737
      }
738
 
739
  /* Decide which heuristic to use.  In case we didn't match anything,
740
     use no_prediction heuristic, in case we did match, use either
741
     first match or Dempster-Shaffer theory depending on the flags.  */
742
 
743
  if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH)
744
    first_match = true;
745
 
746
  if (!found)
747
    dump_prediction (dump_file, PRED_NO_PREDICTION,
748
                     combined_probability, bb, true);
749
  else
750
    {
751
      dump_prediction (dump_file, PRED_DS_THEORY, combined_probability,
752
                       bb, !first_match);
753
      dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability,
754
                       bb, first_match);
755
    }
756
 
757
  if (first_match)
758
    combined_probability = best_probability;
759
  dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb, true);
760
 
761
  while (*pnote)
762
    {
763
      if (REG_NOTE_KIND (*pnote) == REG_BR_PRED)
764
        {
765
          enum br_predictor predictor = ((enum br_predictor)
766
                                         INTVAL (XEXP (XEXP (*pnote, 0), 0)));
767
          int probability = INTVAL (XEXP (XEXP (*pnote, 0), 1));
768
 
769
          dump_prediction (dump_file, predictor, probability, bb,
770
                           !first_match || best_predictor == predictor);
771
          *pnote = XEXP (*pnote, 1);
772
        }
773
      else
774
        pnote = &XEXP (*pnote, 1);
775
    }
776
 
777
  if (!prob_note)
778
    {
779
      add_reg_note (insn, REG_BR_PROB, GEN_INT (combined_probability));
780
 
781
      /* Save the prediction into CFG in case we are seeing non-degenerated
782
         conditional jump.  */
783
      if (!single_succ_p (bb))
784
        {
785
          BRANCH_EDGE (bb)->probability = combined_probability;
786
          FALLTHRU_EDGE (bb)->probability
787
            = REG_BR_PROB_BASE - combined_probability;
788
        }
789
    }
790
  else if (!single_succ_p (bb))
791
    {
792
      int prob = INTVAL (XEXP (prob_note, 0));
793
 
794
      BRANCH_EDGE (bb)->probability = prob;
795
      FALLTHRU_EDGE (bb)->probability = REG_BR_PROB_BASE - prob;
796
    }
797
  else
798
    single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
799
}
800
 
801
/* Combine predictions into single probability and store them into CFG.
802
   Remove now useless prediction entries.  */
803
 
804
static void
805
combine_predictions_for_bb (basic_block bb)
806
{
807
  int best_probability = PROB_EVEN;
808
  enum br_predictor best_predictor = END_PREDICTORS;
809
  int combined_probability = REG_BR_PROB_BASE / 2;
810
  int d;
811
  bool first_match = false;
812
  bool found = false;
813
  struct edge_prediction *pred;
814
  int nedges = 0;
815
  edge e, first = NULL, second = NULL;
816
  edge_iterator ei;
817
  void **preds;
818
 
819
  FOR_EACH_EDGE (e, ei, bb->succs)
820
    if (!(e->flags & (EDGE_EH | EDGE_FAKE)))
821
      {
822
        nedges ++;
823
        if (first && !second)
824
          second = e;
825
        if (!first)
826
          first = e;
827
      }
828
 
829
  /* When there is no successor or only one choice, prediction is easy.
830
 
831
     We are lazy for now and predict only basic blocks with two outgoing
832
     edges.  It is possible to predict generic case too, but we have to
833
     ignore first match heuristics and do more involved combining.  Implement
834
     this later.  */
835
  if (nedges != 2)
836
    {
837
      if (!bb->count)
838
        set_even_probabilities (bb);
839
      clear_bb_predictions (bb);
840
      if (dump_file)
841
        fprintf (dump_file, "%i edges in bb %i predicted to even probabilities\n",
842
                 nedges, bb->index);
843
      return;
844
    }
845
 
846
  if (dump_file)
847
    fprintf (dump_file, "Predictions for bb %i\n", bb->index);
848
 
849
  preds = pointer_map_contains (bb_predictions, bb);
850
  if (preds)
851
    {
852
      /* We implement "first match" heuristics and use probability guessed
853
         by predictor with smallest index.  */
854
      for (pred = (struct edge_prediction *) *preds; pred; pred = pred->ep_next)
855
        {
856
          enum br_predictor predictor = pred->ep_predictor;
857
          int probability = pred->ep_probability;
858
 
859
          if (pred->ep_edge != first)
860
            probability = REG_BR_PROB_BASE - probability;
861
 
862
          found = true;
863
          /* First match heuristics would be widly confused if we predicted
864
             both directions.  */
865
          if (best_predictor > predictor)
866
            {
867
              struct edge_prediction *pred2;
868
              int prob = probability;
869
 
870
              for (pred2 = (struct edge_prediction *) *preds; pred2; pred2 = pred2->ep_next)
871
               if (pred2 != pred && pred2->ep_predictor == pred->ep_predictor)
872
                 {
873
                   int probability2 = pred->ep_probability;
874
 
875
                   if (pred2->ep_edge != first)
876
                     probability2 = REG_BR_PROB_BASE - probability2;
877
 
878
                   if ((probability < REG_BR_PROB_BASE / 2) !=
879
                       (probability2 < REG_BR_PROB_BASE / 2))
880
                     break;
881
 
882
                   /* If the same predictor later gave better result, go for it! */
883
                   if ((probability >= REG_BR_PROB_BASE / 2 && (probability2 > probability))
884
                       || (probability <= REG_BR_PROB_BASE / 2 && (probability2 < probability)))
885
                     prob = probability2;
886
                 }
887
              if (!pred2)
888
                best_probability = prob, best_predictor = predictor;
889
            }
890
 
891
          d = (combined_probability * probability
892
               + (REG_BR_PROB_BASE - combined_probability)
893
               * (REG_BR_PROB_BASE - probability));
894
 
895
          /* Use FP math to avoid overflows of 32bit integers.  */
896
          if (d == 0)
897
            /* If one probability is 0% and one 100%, avoid division by zero.  */
898
            combined_probability = REG_BR_PROB_BASE / 2;
899
          else
900
            combined_probability = (((double) combined_probability)
901
                                    * probability
902
                                    * REG_BR_PROB_BASE / d + 0.5);
903
        }
904
    }
905
 
906
  /* Decide which heuristic to use.  In case we didn't match anything,
907
     use no_prediction heuristic, in case we did match, use either
908
     first match or Dempster-Shaffer theory depending on the flags.  */
909
 
910
  if (predictor_info [best_predictor].flags & PRED_FLAG_FIRST_MATCH)
911
    first_match = true;
912
 
913
  if (!found)
914
    dump_prediction (dump_file, PRED_NO_PREDICTION, combined_probability, bb, true);
915
  else
916
    {
917
      dump_prediction (dump_file, PRED_DS_THEORY, combined_probability, bb,
918
                       !first_match);
919
      dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability, bb,
920
                       first_match);
921
    }
922
 
923
  if (first_match)
924
    combined_probability = best_probability;
925
  dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb, true);
926
 
927
  if (preds)
928
    {
929
      for (pred = (struct edge_prediction *) *preds; pred; pred = pred->ep_next)
930
        {
931
          enum br_predictor predictor = pred->ep_predictor;
932
          int probability = pred->ep_probability;
933
 
934
          if (pred->ep_edge != EDGE_SUCC (bb, 0))
935
            probability = REG_BR_PROB_BASE - probability;
936
          dump_prediction (dump_file, predictor, probability, bb,
937
                           !first_match || best_predictor == predictor);
938
        }
939
    }
940
  clear_bb_predictions (bb);
941
 
942
  if (!bb->count)
943
    {
944
      first->probability = combined_probability;
945
      second->probability = REG_BR_PROB_BASE - combined_probability;
946
    }
947
}
948
 
949
/* Predict edge probabilities by exploiting loop structure.  */
950
 
951
static void
952
predict_loops (void)
953
{
954
  loop_iterator li;
955
  struct loop *loop;
956
 
957
  /* Try to predict out blocks in a loop that are not part of a
958
     natural loop.  */
959
  FOR_EACH_LOOP (li, loop, 0)
960
    {
961
      basic_block bb, *bbs;
962
      unsigned j, n_exits;
963
      VEC (edge, heap) *exits;
964
      struct tree_niter_desc niter_desc;
965
      edge ex;
966
 
967
      exits = get_loop_exit_edges (loop);
968
      n_exits = VEC_length (edge, exits);
969
 
970
      FOR_EACH_VEC_ELT (edge, exits, j, ex)
971
        {
972
          tree niter = NULL;
973
          HOST_WIDE_INT nitercst;
974
          int max = PARAM_VALUE (PARAM_MAX_PREDICTED_ITERATIONS);
975
          int probability;
976
          enum br_predictor predictor;
977
 
978
          if (number_of_iterations_exit (loop, ex, &niter_desc, false))
979
            niter = niter_desc.niter;
980
          if (!niter || TREE_CODE (niter_desc.niter) != INTEGER_CST)
981
            niter = loop_niter_by_eval (loop, ex);
982
 
983
          if (TREE_CODE (niter) == INTEGER_CST)
984
            {
985
              if (host_integerp (niter, 1)
986
                  && compare_tree_int (niter, max-1) == -1)
987
                nitercst = tree_low_cst (niter, 1) + 1;
988
              else
989
                nitercst = max;
990
              predictor = PRED_LOOP_ITERATIONS;
991
            }
992
          /* If we have just one exit and we can derive some information about
993
             the number of iterations of the loop from the statements inside
994
             the loop, use it to predict this exit.  */
995
          else if (n_exits == 1)
996
            {
997
              nitercst = max_stmt_executions_int (loop, false);
998
              if (nitercst < 0)
999
                continue;
1000
              if (nitercst > max)
1001
                nitercst = max;
1002
 
1003
              predictor = PRED_LOOP_ITERATIONS_GUESSED;
1004
            }
1005
          else
1006
            continue;
1007
 
1008
          probability = ((REG_BR_PROB_BASE + nitercst / 2) / nitercst);
1009
          predict_edge (ex, predictor, probability);
1010
        }
1011
      VEC_free (edge, heap, exits);
1012
 
1013
      bbs = get_loop_body (loop);
1014
 
1015
      for (j = 0; j < loop->num_nodes; j++)
1016
        {
1017
          int header_found = 0;
1018
          edge e;
1019
          edge_iterator ei;
1020
 
1021
          bb = bbs[j];
1022
 
1023
          /* Bypass loop heuristics on continue statement.  These
1024
             statements construct loops via "non-loop" constructs
1025
             in the source language and are better to be handled
1026
             separately.  */
1027
          if (predicted_by_p (bb, PRED_CONTINUE))
1028
            continue;
1029
 
1030
          /* Loop branch heuristics - predict an edge back to a
1031
             loop's head as taken.  */
1032
          if (bb == loop->latch)
1033
            {
1034
              e = find_edge (loop->latch, loop->header);
1035
              if (e)
1036
                {
1037
                  header_found = 1;
1038
                  predict_edge_def (e, PRED_LOOP_BRANCH, TAKEN);
1039
                }
1040
            }
1041
 
1042
          /* Loop exit heuristics - predict an edge exiting the loop if the
1043
             conditional has no loop header successors as not taken.  */
1044
          if (!header_found
1045
              /* If we already used more reliable loop exit predictors, do not
1046
                 bother with PRED_LOOP_EXIT.  */
1047
              && !predicted_by_p (bb, PRED_LOOP_ITERATIONS_GUESSED)
1048
              && !predicted_by_p (bb, PRED_LOOP_ITERATIONS))
1049
            {
1050
              /* For loop with many exits we don't want to predict all exits
1051
                 with the pretty large probability, because if all exits are
1052
                 considered in row, the loop would be predicted to iterate
1053
                 almost never.  The code to divide probability by number of
1054
                 exits is very rough.  It should compute the number of exits
1055
                 taken in each patch through function (not the overall number
1056
                 of exits that might be a lot higher for loops with wide switch
1057
                 statements in them) and compute n-th square root.
1058
 
1059
                 We limit the minimal probability by 2% to avoid
1060
                 EDGE_PROBABILITY_RELIABLE from trusting the branch prediction
1061
                 as this was causing regression in perl benchmark containing such
1062
                 a wide loop.  */
1063
 
1064
              int probability = ((REG_BR_PROB_BASE
1065
                                  - predictor_info [(int) PRED_LOOP_EXIT].hitrate)
1066
                                 / n_exits);
1067
              if (probability < HITRATE (2))
1068
                probability = HITRATE (2);
1069
              FOR_EACH_EDGE (e, ei, bb->succs)
1070
                if (e->dest->index < NUM_FIXED_BLOCKS
1071
                    || !flow_bb_inside_loop_p (loop, e->dest))
1072
                  predict_edge (e, PRED_LOOP_EXIT, probability);
1073
            }
1074
        }
1075
 
1076
      /* Free basic blocks from get_loop_body.  */
1077
      free (bbs);
1078
    }
1079
}
1080
 
1081
/* Attempt to predict probabilities of BB outgoing edges using local
1082
   properties.  */
1083
static void
1084
bb_estimate_probability_locally (basic_block bb)
1085
{
1086
  rtx last_insn = BB_END (bb);
1087
  rtx cond;
1088
 
1089
  if (! can_predict_insn_p (last_insn))
1090
    return;
1091
  cond = get_condition (last_insn, NULL, false, false);
1092
  if (! cond)
1093
    return;
1094
 
1095
  /* Try "pointer heuristic."
1096
     A comparison ptr == 0 is predicted as false.
1097
     Similarly, a comparison ptr1 == ptr2 is predicted as false.  */
1098
  if (COMPARISON_P (cond)
1099
      && ((REG_P (XEXP (cond, 0)) && REG_POINTER (XEXP (cond, 0)))
1100
          || (REG_P (XEXP (cond, 1)) && REG_POINTER (XEXP (cond, 1)))))
1101
    {
1102
      if (GET_CODE (cond) == EQ)
1103
        predict_insn_def (last_insn, PRED_POINTER, NOT_TAKEN);
1104
      else if (GET_CODE (cond) == NE)
1105
        predict_insn_def (last_insn, PRED_POINTER, TAKEN);
1106
    }
1107
  else
1108
 
1109
  /* Try "opcode heuristic."
1110
     EQ tests are usually false and NE tests are usually true. Also,
1111
     most quantities are positive, so we can make the appropriate guesses
1112
     about signed comparisons against zero.  */
1113
    switch (GET_CODE (cond))
1114
      {
1115
      case CONST_INT:
1116
        /* Unconditional branch.  */
1117
        predict_insn_def (last_insn, PRED_UNCONDITIONAL,
1118
                          cond == const0_rtx ? NOT_TAKEN : TAKEN);
1119
        break;
1120
 
1121
      case EQ:
1122
      case UNEQ:
1123
        /* Floating point comparisons appears to behave in a very
1124
           unpredictable way because of special role of = tests in
1125
           FP code.  */
1126
        if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
1127
          ;
1128
        /* Comparisons with 0 are often used for booleans and there is
1129
           nothing useful to predict about them.  */
1130
        else if (XEXP (cond, 1) == const0_rtx
1131
                 || XEXP (cond, 0) == const0_rtx)
1132
          ;
1133
        else
1134
          predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, NOT_TAKEN);
1135
        break;
1136
 
1137
      case NE:
1138
      case LTGT:
1139
        /* Floating point comparisons appears to behave in a very
1140
           unpredictable way because of special role of = tests in
1141
           FP code.  */
1142
        if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
1143
          ;
1144
        /* Comparisons with 0 are often used for booleans and there is
1145
           nothing useful to predict about them.  */
1146
        else if (XEXP (cond, 1) == const0_rtx
1147
                 || XEXP (cond, 0) == const0_rtx)
1148
          ;
1149
        else
1150
          predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, TAKEN);
1151
        break;
1152
 
1153
      case ORDERED:
1154
        predict_insn_def (last_insn, PRED_FPOPCODE, TAKEN);
1155
        break;
1156
 
1157
      case UNORDERED:
1158
        predict_insn_def (last_insn, PRED_FPOPCODE, NOT_TAKEN);
1159
        break;
1160
 
1161
      case LE:
1162
      case LT:
1163
        if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
1164
            || XEXP (cond, 1) == constm1_rtx)
1165
          predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, NOT_TAKEN);
1166
        break;
1167
 
1168
      case GE:
1169
      case GT:
1170
        if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
1171
            || XEXP (cond, 1) == constm1_rtx)
1172
          predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, TAKEN);
1173
        break;
1174
 
1175
      default:
1176
        break;
1177
      }
1178
}
1179
 
1180
/* Set edge->probability for each successor edge of BB.  */
1181
void
1182
guess_outgoing_edge_probabilities (basic_block bb)
1183
{
1184
  bb_estimate_probability_locally (bb);
1185
  combine_predictions_for_insn (BB_END (bb), bb);
1186
}
1187
 
1188
static tree expr_expected_value (tree, bitmap);
1189
 
1190
/* Helper function for expr_expected_value.  */
1191
 
1192
static tree
1193
expr_expected_value_1 (tree type, tree op0, enum tree_code code,
1194
                       tree op1, bitmap visited)
1195
{
1196
  gimple def;
1197
 
1198
  if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
1199
    {
1200
      if (TREE_CONSTANT (op0))
1201
        return op0;
1202
 
1203
      if (code != SSA_NAME)
1204
        return NULL_TREE;
1205
 
1206
      def = SSA_NAME_DEF_STMT (op0);
1207
 
1208
      /* If we were already here, break the infinite cycle.  */
1209
      if (!bitmap_set_bit (visited, SSA_NAME_VERSION (op0)))
1210
        return NULL;
1211
 
1212
      if (gimple_code (def) == GIMPLE_PHI)
1213
        {
1214
          /* All the arguments of the PHI node must have the same constant
1215
             length.  */
1216
          int i, n = gimple_phi_num_args (def);
1217
          tree val = NULL, new_val;
1218
 
1219
          for (i = 0; i < n; i++)
1220
            {
1221
              tree arg = PHI_ARG_DEF (def, i);
1222
 
1223
              /* If this PHI has itself as an argument, we cannot
1224
                 determine the string length of this argument.  However,
1225
                 if we can find an expected constant value for the other
1226
                 PHI args then we can still be sure that this is
1227
                 likely a constant.  So be optimistic and just
1228
                 continue with the next argument.  */
1229
              if (arg == PHI_RESULT (def))
1230
                continue;
1231
 
1232
              new_val = expr_expected_value (arg, visited);
1233
              if (!new_val)
1234
                return NULL;
1235
              if (!val)
1236
                val = new_val;
1237
              else if (!operand_equal_p (val, new_val, false))
1238
                return NULL;
1239
            }
1240
          return val;
1241
        }
1242
      if (is_gimple_assign (def))
1243
        {
1244
          if (gimple_assign_lhs (def) != op0)
1245
            return NULL;
1246
 
1247
          return expr_expected_value_1 (TREE_TYPE (gimple_assign_lhs (def)),
1248
                                        gimple_assign_rhs1 (def),
1249
                                        gimple_assign_rhs_code (def),
1250
                                        gimple_assign_rhs2 (def),
1251
                                        visited);
1252
        }
1253
 
1254
      if (is_gimple_call (def))
1255
        {
1256
          tree decl = gimple_call_fndecl (def);
1257
          if (!decl)
1258
            return NULL;
1259
          if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
1260
            switch (DECL_FUNCTION_CODE (decl))
1261
              {
1262
              case BUILT_IN_EXPECT:
1263
                {
1264
                  tree val;
1265
                  if (gimple_call_num_args (def) != 2)
1266
                    return NULL;
1267
                  val = gimple_call_arg (def, 0);
1268
                  if (TREE_CONSTANT (val))
1269
                    return val;
1270
                  return gimple_call_arg (def, 1);
1271
                }
1272
 
1273
              case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_N:
1274
              case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
1275
              case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
1276
              case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
1277
              case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
1278
              case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
1279
              case BUILT_IN_ATOMIC_COMPARE_EXCHANGE:
1280
              case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_N:
1281
              case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
1282
              case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
1283
              case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
1284
              case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
1285
              case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
1286
                /* Assume that any given atomic operation has low contention,
1287
                   and thus the compare-and-swap operation succeeds.  */
1288
                return boolean_true_node;
1289
            }
1290
        }
1291
 
1292
      return NULL;
1293
    }
1294
 
1295
  if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
1296
    {
1297
      tree res;
1298
      op0 = expr_expected_value (op0, visited);
1299
      if (!op0)
1300
        return NULL;
1301
      op1 = expr_expected_value (op1, visited);
1302
      if (!op1)
1303
        return NULL;
1304
      res = fold_build2 (code, type, op0, op1);
1305
      if (TREE_CONSTANT (res))
1306
        return res;
1307
      return NULL;
1308
    }
1309
  if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
1310
    {
1311
      tree res;
1312
      op0 = expr_expected_value (op0, visited);
1313
      if (!op0)
1314
        return NULL;
1315
      res = fold_build1 (code, type, op0);
1316
      if (TREE_CONSTANT (res))
1317
        return res;
1318
      return NULL;
1319
    }
1320
  return NULL;
1321
}
1322
 
1323
/* Return constant EXPR will likely have at execution time, NULL if unknown.
1324
   The function is used by builtin_expect branch predictor so the evidence
1325
   must come from this construct and additional possible constant folding.
1326
 
1327
   We may want to implement more involved value guess (such as value range
1328
   propagation based prediction), but such tricks shall go to new
1329
   implementation.  */
1330
 
1331
static tree
1332
expr_expected_value (tree expr, bitmap visited)
1333
{
1334
  enum tree_code code;
1335
  tree op0, op1;
1336
 
1337
  if (TREE_CONSTANT (expr))
1338
    return expr;
1339
 
1340
  extract_ops_from_tree (expr, &code, &op0, &op1);
1341
  return expr_expected_value_1 (TREE_TYPE (expr),
1342
                                op0, code, op1, visited);
1343
}
1344
 
1345
 
1346
/* Get rid of all builtin_expect calls and GIMPLE_PREDICT statements
1347
   we no longer need.  */
1348
static unsigned int
1349
strip_predict_hints (void)
1350
{
1351
  basic_block bb;
1352
  gimple ass_stmt;
1353
  tree var;
1354
 
1355
  FOR_EACH_BB (bb)
1356
    {
1357
      gimple_stmt_iterator bi;
1358
      for (bi = gsi_start_bb (bb); !gsi_end_p (bi);)
1359
        {
1360
          gimple stmt = gsi_stmt (bi);
1361
 
1362
          if (gimple_code (stmt) == GIMPLE_PREDICT)
1363
            {
1364
              gsi_remove (&bi, true);
1365
              continue;
1366
            }
1367
          else if (gimple_code (stmt) == GIMPLE_CALL)
1368
            {
1369
              tree fndecl = gimple_call_fndecl (stmt);
1370
 
1371
              if (fndecl
1372
                  && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1373
                  && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
1374
                  && gimple_call_num_args (stmt) == 2)
1375
                {
1376
                  var = gimple_call_lhs (stmt);
1377
                  if (var)
1378
                    {
1379
                      ass_stmt
1380
                        = gimple_build_assign (var, gimple_call_arg (stmt, 0));
1381
                      gsi_replace (&bi, ass_stmt, true);
1382
                    }
1383
                  else
1384
                    {
1385
                      gsi_remove (&bi, true);
1386
                      continue;
1387
                    }
1388
                }
1389
            }
1390
          gsi_next (&bi);
1391
        }
1392
    }
1393
  return 0;
1394
}
1395
 
1396
/* Predict using opcode of the last statement in basic block.  */
1397
static void
1398
tree_predict_by_opcode (basic_block bb)
1399
{
1400
  gimple stmt = last_stmt (bb);
1401
  edge then_edge;
1402
  tree op0, op1;
1403
  tree type;
1404
  tree val;
1405
  enum tree_code cmp;
1406
  bitmap visited;
1407
  edge_iterator ei;
1408
 
1409
  if (!stmt || gimple_code (stmt) != GIMPLE_COND)
1410
    return;
1411
  FOR_EACH_EDGE (then_edge, ei, bb->succs)
1412
    if (then_edge->flags & EDGE_TRUE_VALUE)
1413
      break;
1414
  op0 = gimple_cond_lhs (stmt);
1415
  op1 = gimple_cond_rhs (stmt);
1416
  cmp = gimple_cond_code (stmt);
1417
  type = TREE_TYPE (op0);
1418
  visited = BITMAP_ALLOC (NULL);
1419
  val = expr_expected_value_1 (boolean_type_node, op0, cmp, op1, visited);
1420
  BITMAP_FREE (visited);
1421
  if (val)
1422
    {
1423
      if (integer_zerop (val))
1424
        predict_edge_def (then_edge, PRED_BUILTIN_EXPECT, NOT_TAKEN);
1425
      else
1426
        predict_edge_def (then_edge, PRED_BUILTIN_EXPECT, TAKEN);
1427
      return;
1428
    }
1429
  /* Try "pointer heuristic."
1430
     A comparison ptr == 0 is predicted as false.
1431
     Similarly, a comparison ptr1 == ptr2 is predicted as false.  */
1432
  if (POINTER_TYPE_P (type))
1433
    {
1434
      if (cmp == EQ_EXPR)
1435
        predict_edge_def (then_edge, PRED_TREE_POINTER, NOT_TAKEN);
1436
      else if (cmp == NE_EXPR)
1437
        predict_edge_def (then_edge, PRED_TREE_POINTER, TAKEN);
1438
    }
1439
  else
1440
 
1441
  /* Try "opcode heuristic."
1442
     EQ tests are usually false and NE tests are usually true. Also,
1443
     most quantities are positive, so we can make the appropriate guesses
1444
     about signed comparisons against zero.  */
1445
    switch (cmp)
1446
      {
1447
      case EQ_EXPR:
1448
      case UNEQ_EXPR:
1449
        /* Floating point comparisons appears to behave in a very
1450
           unpredictable way because of special role of = tests in
1451
           FP code.  */
1452
        if (FLOAT_TYPE_P (type))
1453
          ;
1454
        /* Comparisons with 0 are often used for booleans and there is
1455
           nothing useful to predict about them.  */
1456
        else if (integer_zerop (op0) || integer_zerop (op1))
1457
          ;
1458
        else
1459
          predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, NOT_TAKEN);
1460
        break;
1461
 
1462
      case NE_EXPR:
1463
      case LTGT_EXPR:
1464
        /* Floating point comparisons appears to behave in a very
1465
           unpredictable way because of special role of = tests in
1466
           FP code.  */
1467
        if (FLOAT_TYPE_P (type))
1468
          ;
1469
        /* Comparisons with 0 are often used for booleans and there is
1470
           nothing useful to predict about them.  */
1471
        else if (integer_zerop (op0)
1472
                 || integer_zerop (op1))
1473
          ;
1474
        else
1475
          predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, TAKEN);
1476
        break;
1477
 
1478
      case ORDERED_EXPR:
1479
        predict_edge_def (then_edge, PRED_TREE_FPOPCODE, TAKEN);
1480
        break;
1481
 
1482
      case UNORDERED_EXPR:
1483
        predict_edge_def (then_edge, PRED_TREE_FPOPCODE, NOT_TAKEN);
1484
        break;
1485
 
1486
      case LE_EXPR:
1487
      case LT_EXPR:
1488
        if (integer_zerop (op1)
1489
            || integer_onep (op1)
1490
            || integer_all_onesp (op1)
1491
            || real_zerop (op1)
1492
            || real_onep (op1)
1493
            || real_minus_onep (op1))
1494
          predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, NOT_TAKEN);
1495
        break;
1496
 
1497
      case GE_EXPR:
1498
      case GT_EXPR:
1499
        if (integer_zerop (op1)
1500
            || integer_onep (op1)
1501
            || integer_all_onesp (op1)
1502
            || real_zerop (op1)
1503
            || real_onep (op1)
1504
            || real_minus_onep (op1))
1505
          predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, TAKEN);
1506
        break;
1507
 
1508
      default:
1509
        break;
1510
      }
1511
}
1512
 
1513
/* Try to guess whether the value of return means error code.  */
1514
 
1515
static enum br_predictor
1516
return_prediction (tree val, enum prediction *prediction)
1517
{
1518
  /* VOID.  */
1519
  if (!val)
1520
    return PRED_NO_PREDICTION;
1521
  /* Different heuristics for pointers and scalars.  */
1522
  if (POINTER_TYPE_P (TREE_TYPE (val)))
1523
    {
1524
      /* NULL is usually not returned.  */
1525
      if (integer_zerop (val))
1526
        {
1527
          *prediction = NOT_TAKEN;
1528
          return PRED_NULL_RETURN;
1529
        }
1530
    }
1531
  else if (INTEGRAL_TYPE_P (TREE_TYPE (val)))
1532
    {
1533
      /* Negative return values are often used to indicate
1534
         errors.  */
1535
      if (TREE_CODE (val) == INTEGER_CST
1536
          && tree_int_cst_sgn (val) < 0)
1537
        {
1538
          *prediction = NOT_TAKEN;
1539
          return PRED_NEGATIVE_RETURN;
1540
        }
1541
      /* Constant return values seems to be commonly taken.
1542
         Zero/one often represent booleans so exclude them from the
1543
         heuristics.  */
1544
      if (TREE_CONSTANT (val)
1545
          && (!integer_zerop (val) && !integer_onep (val)))
1546
        {
1547
          *prediction = TAKEN;
1548
          return PRED_CONST_RETURN;
1549
        }
1550
    }
1551
  return PRED_NO_PREDICTION;
1552
}
1553
 
1554
/* Find the basic block with return expression and look up for possible
1555
   return value trying to apply RETURN_PREDICTION heuristics.  */
1556
static void
1557
apply_return_prediction (void)
1558
{
1559
  gimple return_stmt = NULL;
1560
  tree return_val;
1561
  edge e;
1562
  gimple phi;
1563
  int phi_num_args, i;
1564
  enum br_predictor pred;
1565
  enum prediction direction;
1566
  edge_iterator ei;
1567
 
1568
  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1569
    {
1570
      return_stmt = last_stmt (e->src);
1571
      if (return_stmt
1572
          && gimple_code (return_stmt) == GIMPLE_RETURN)
1573
        break;
1574
    }
1575
  if (!e)
1576
    return;
1577
  return_val = gimple_return_retval (return_stmt);
1578
  if (!return_val)
1579
    return;
1580
  if (TREE_CODE (return_val) != SSA_NAME
1581
      || !SSA_NAME_DEF_STMT (return_val)
1582
      || gimple_code (SSA_NAME_DEF_STMT (return_val)) != GIMPLE_PHI)
1583
    return;
1584
  phi = SSA_NAME_DEF_STMT (return_val);
1585
  phi_num_args = gimple_phi_num_args (phi);
1586
  pred = return_prediction (PHI_ARG_DEF (phi, 0), &direction);
1587
 
1588
  /* Avoid the degenerate case where all return values form the function
1589
     belongs to same category (ie they are all positive constants)
1590
     so we can hardly say something about them.  */
1591
  for (i = 1; i < phi_num_args; i++)
1592
    if (pred != return_prediction (PHI_ARG_DEF (phi, i), &direction))
1593
      break;
1594
  if (i != phi_num_args)
1595
    for (i = 0; i < phi_num_args; i++)
1596
      {
1597
        pred = return_prediction (PHI_ARG_DEF (phi, i), &direction);
1598
        if (pred != PRED_NO_PREDICTION)
1599
          predict_paths_leading_to_edge (gimple_phi_arg_edge (phi, i), pred,
1600
                                         direction);
1601
      }
1602
}
1603
 
1604
/* Look for basic block that contains unlikely to happen events
1605
   (such as noreturn calls) and mark all paths leading to execution
1606
   of this basic blocks as unlikely.  */
1607
 
1608
static void
1609
tree_bb_level_predictions (void)
1610
{
1611
  basic_block bb;
1612
  bool has_return_edges = false;
1613
  edge e;
1614
  edge_iterator ei;
1615
 
1616
  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1617
    if (!(e->flags & (EDGE_ABNORMAL | EDGE_FAKE | EDGE_EH)))
1618
      {
1619
        has_return_edges = true;
1620
        break;
1621
      }
1622
 
1623
  apply_return_prediction ();
1624
 
1625
  FOR_EACH_BB (bb)
1626
    {
1627
      gimple_stmt_iterator gsi;
1628
 
1629
      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1630
        {
1631
          gimple stmt = gsi_stmt (gsi);
1632
          tree decl;
1633
 
1634
          if (is_gimple_call (stmt))
1635
            {
1636
              if ((gimple_call_flags (stmt) & ECF_NORETURN)
1637
                  && has_return_edges)
1638
                predict_paths_leading_to (bb, PRED_NORETURN,
1639
                                          NOT_TAKEN);
1640
              decl = gimple_call_fndecl (stmt);
1641
              if (decl
1642
                  && lookup_attribute ("cold",
1643
                                       DECL_ATTRIBUTES (decl)))
1644
                predict_paths_leading_to (bb, PRED_COLD_FUNCTION,
1645
                                          NOT_TAKEN);
1646
            }
1647
          else if (gimple_code (stmt) == GIMPLE_PREDICT)
1648
            {
1649
              predict_paths_leading_to (bb, gimple_predict_predictor (stmt),
1650
                                        gimple_predict_outcome (stmt));
1651
              /* Keep GIMPLE_PREDICT around so early inlining will propagate
1652
                 hints to callers.  */
1653
            }
1654
        }
1655
    }
1656
}
1657
 
1658
#ifdef ENABLE_CHECKING
1659
 
1660
/* Callback for pointer_map_traverse, asserts that the pointer map is
1661
   empty.  */
1662
 
1663
static bool
1664
assert_is_empty (const void *key ATTRIBUTE_UNUSED, void **value,
1665
                 void *data ATTRIBUTE_UNUSED)
1666
{
1667
  gcc_assert (!*value);
1668
  return false;
1669
}
1670
#endif
1671
 
1672
/* Predict branch probabilities and estimate profile for basic block BB.  */
1673
 
1674
static void
1675
tree_estimate_probability_bb (basic_block bb)
1676
{
1677
  edge e;
1678
  edge_iterator ei;
1679
  gimple last;
1680
 
1681
  FOR_EACH_EDGE (e, ei, bb->succs)
1682
    {
1683
      /* Predict early returns to be probable, as we've already taken
1684
         care for error returns and other cases are often used for
1685
         fast paths through function.
1686
 
1687
         Since we've already removed the return statements, we are
1688
         looking for CFG like:
1689
 
1690
         if (conditional)
1691
         {
1692
         ..
1693
         goto return_block
1694
         }
1695
         some other blocks
1696
         return_block:
1697
         return_stmt.  */
1698
      if (e->dest != bb->next_bb
1699
          && e->dest != EXIT_BLOCK_PTR
1700
          && single_succ_p (e->dest)
1701
          && single_succ_edge (e->dest)->dest == EXIT_BLOCK_PTR
1702
          && (last = last_stmt (e->dest)) != NULL
1703
          && gimple_code (last) == GIMPLE_RETURN)
1704
        {
1705
          edge e1;
1706
          edge_iterator ei1;
1707
 
1708
          if (single_succ_p (bb))
1709
            {
1710
              FOR_EACH_EDGE (e1, ei1, bb->preds)
1711
                if (!predicted_by_p (e1->src, PRED_NULL_RETURN)
1712
                    && !predicted_by_p (e1->src, PRED_CONST_RETURN)
1713
                    && !predicted_by_p (e1->src, PRED_NEGATIVE_RETURN))
1714
                  predict_edge_def (e1, PRED_TREE_EARLY_RETURN, NOT_TAKEN);
1715
            }
1716
          else
1717
            if (!predicted_by_p (e->src, PRED_NULL_RETURN)
1718
                && !predicted_by_p (e->src, PRED_CONST_RETURN)
1719
                && !predicted_by_p (e->src, PRED_NEGATIVE_RETURN))
1720
              predict_edge_def (e, PRED_TREE_EARLY_RETURN, NOT_TAKEN);
1721
        }
1722
 
1723
      /* Look for block we are guarding (ie we dominate it,
1724
         but it doesn't postdominate us).  */
1725
      if (e->dest != EXIT_BLOCK_PTR && e->dest != bb
1726
          && dominated_by_p (CDI_DOMINATORS, e->dest, e->src)
1727
          && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e->dest))
1728
        {
1729
          gimple_stmt_iterator bi;
1730
 
1731
          /* The call heuristic claims that a guarded function call
1732
             is improbable.  This is because such calls are often used
1733
             to signal exceptional situations such as printing error
1734
             messages.  */
1735
          for (bi = gsi_start_bb (e->dest); !gsi_end_p (bi);
1736
               gsi_next (&bi))
1737
            {
1738
              gimple stmt = gsi_stmt (bi);
1739
              if (is_gimple_call (stmt)
1740
                  /* Constant and pure calls are hardly used to signalize
1741
                     something exceptional.  */
1742
                  && gimple_has_side_effects (stmt))
1743
                {
1744
                  predict_edge_def (e, PRED_CALL, NOT_TAKEN);
1745
                  break;
1746
                }
1747
            }
1748
        }
1749
    }
1750
  tree_predict_by_opcode (bb);
1751
}
1752
 
1753
/* Predict branch probabilities and estimate profile of the tree CFG.
1754
   This function can be called from the loop optimizers to recompute
1755
   the profile information.  */
1756
 
1757
void
1758
tree_estimate_probability (void)
1759
{
1760
  basic_block bb;
1761
 
1762
  add_noreturn_fake_exit_edges ();
1763
  connect_infinite_loops_to_exit ();
1764
  /* We use loop_niter_by_eval, which requires that the loops have
1765
     preheaders.  */
1766
  create_preheaders (CP_SIMPLE_PREHEADERS);
1767
  calculate_dominance_info (CDI_POST_DOMINATORS);
1768
 
1769
  bb_predictions = pointer_map_create ();
1770
  tree_bb_level_predictions ();
1771
  record_loop_exits ();
1772
 
1773
  if (number_of_loops () > 1)
1774
    predict_loops ();
1775
 
1776
  FOR_EACH_BB (bb)
1777
    tree_estimate_probability_bb (bb);
1778
 
1779
  FOR_EACH_BB (bb)
1780
    combine_predictions_for_bb (bb);
1781
 
1782
#ifdef ENABLE_CHECKING
1783
  pointer_map_traverse (bb_predictions, assert_is_empty, NULL);
1784
#endif
1785
  pointer_map_destroy (bb_predictions);
1786
  bb_predictions = NULL;
1787
 
1788
  estimate_bb_frequencies ();
1789
  free_dominance_info (CDI_POST_DOMINATORS);
1790
  remove_fake_exit_edges ();
1791
}
1792
 
1793
/* Predict branch probabilities and estimate profile of the tree CFG.
1794
   This is the driver function for PASS_PROFILE.  */
1795
 
1796
static unsigned int
1797
tree_estimate_probability_driver (void)
1798
{
1799
  unsigned nb_loops;
1800
 
1801
  loop_optimizer_init (0);
1802
  if (dump_file && (dump_flags & TDF_DETAILS))
1803
    flow_loops_dump (dump_file, NULL, 0);
1804
 
1805
  mark_irreducible_loops ();
1806
 
1807
  nb_loops = number_of_loops ();
1808
  if (nb_loops > 1)
1809
    scev_initialize ();
1810
 
1811
  tree_estimate_probability ();
1812
 
1813
  if (nb_loops > 1)
1814
    scev_finalize ();
1815
 
1816
  loop_optimizer_finalize ();
1817
  if (dump_file && (dump_flags & TDF_DETAILS))
1818
    gimple_dump_cfg (dump_file, dump_flags);
1819
  if (profile_status == PROFILE_ABSENT)
1820
    profile_status = PROFILE_GUESSED;
1821
  return 0;
1822
}
1823
 
1824
/* Predict edges to successors of CUR whose sources are not postdominated by
1825
   BB by PRED and recurse to all postdominators.  */
1826
 
1827
static void
1828
predict_paths_for_bb (basic_block cur, basic_block bb,
1829
                      enum br_predictor pred,
1830
                      enum prediction taken,
1831
                      bitmap visited)
1832
{
1833
  edge e;
1834
  edge_iterator ei;
1835
  basic_block son;
1836
 
1837
  /* We are looking for all edges forming edge cut induced by
1838
     set of all blocks postdominated by BB.  */
1839
  FOR_EACH_EDGE (e, ei, cur->preds)
1840
    if (e->src->index >= NUM_FIXED_BLOCKS
1841
        && !dominated_by_p (CDI_POST_DOMINATORS, e->src, bb))
1842
    {
1843
      edge e2;
1844
      edge_iterator ei2;
1845
      bool found = false;
1846
 
1847
      /* Ignore fake edges and eh, we predict them as not taken anyway.  */
1848
      if (e->flags & (EDGE_EH | EDGE_FAKE))
1849
        continue;
1850
      gcc_assert (bb == cur || dominated_by_p (CDI_POST_DOMINATORS, cur, bb));
1851
 
1852
      /* See if there is an edge from e->src that is not abnormal
1853
         and does not lead to BB.  */
1854
      FOR_EACH_EDGE (e2, ei2, e->src->succs)
1855
        if (e2 != e
1856
            && !(e2->flags & (EDGE_EH | EDGE_FAKE))
1857
            && !dominated_by_p (CDI_POST_DOMINATORS, e2->dest, bb))
1858
          {
1859
            found = true;
1860
            break;
1861
          }
1862
 
1863
      /* If there is non-abnormal path leaving e->src, predict edge
1864
         using predictor.  Otherwise we need to look for paths
1865
         leading to e->src.
1866
 
1867
         The second may lead to infinite loop in the case we are predicitng
1868
         regions that are only reachable by abnormal edges.  We simply
1869
         prevent visiting given BB twice.  */
1870
      if (found)
1871
        predict_edge_def (e, pred, taken);
1872
      else if (bitmap_set_bit (visited, e->src->index))
1873
        predict_paths_for_bb (e->src, e->src, pred, taken, visited);
1874
    }
1875
  for (son = first_dom_son (CDI_POST_DOMINATORS, cur);
1876
       son;
1877
       son = next_dom_son (CDI_POST_DOMINATORS, son))
1878
    predict_paths_for_bb (son, bb, pred, taken, visited);
1879
}
1880
 
1881
/* Sets branch probabilities according to PREDiction and
1882
   FLAGS.  */
1883
 
1884
static void
1885
predict_paths_leading_to (basic_block bb, enum br_predictor pred,
1886
                          enum prediction taken)
1887
{
1888
  bitmap visited = BITMAP_ALLOC (NULL);
1889
  predict_paths_for_bb (bb, bb, pred, taken, visited);
1890
  BITMAP_FREE (visited);
1891
}
1892
 
1893
/* Like predict_paths_leading_to but take edge instead of basic block.  */
1894
 
1895
static void
1896
predict_paths_leading_to_edge (edge e, enum br_predictor pred,
1897
                               enum prediction taken)
1898
{
1899
  bool has_nonloop_edge = false;
1900
  edge_iterator ei;
1901
  edge e2;
1902
 
1903
  basic_block bb = e->src;
1904
  FOR_EACH_EDGE (e2, ei, bb->succs)
1905
    if (e2->dest != e->src && e2->dest != e->dest
1906
        && !(e->flags & (EDGE_EH | EDGE_FAKE))
1907
        && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e2->dest))
1908
      {
1909
        has_nonloop_edge = true;
1910
        break;
1911
      }
1912
  if (!has_nonloop_edge)
1913
    {
1914
      bitmap visited = BITMAP_ALLOC (NULL);
1915
      predict_paths_for_bb (bb, bb, pred, taken, visited);
1916
      BITMAP_FREE (visited);
1917
    }
1918
  else
1919
    predict_edge_def (e, pred, taken);
1920
}
1921
 
1922
/* This is used to carry information about basic blocks.  It is
1923
   attached to the AUX field of the standard CFG block.  */
1924
 
1925
typedef struct block_info_def
1926
{
1927
  /* Estimated frequency of execution of basic_block.  */
1928
  sreal frequency;
1929
 
1930
  /* To keep queue of basic blocks to process.  */
1931
  basic_block next;
1932
 
1933
  /* Number of predecessors we need to visit first.  */
1934
  int npredecessors;
1935
} *block_info;
1936
 
1937
/* Similar information for edges.  */
1938
typedef struct edge_info_def
1939
{
1940
  /* In case edge is a loopback edge, the probability edge will be reached
1941
     in case header is.  Estimated number of iterations of the loop can be
1942
     then computed as 1 / (1 - back_edge_prob).  */
1943
  sreal back_edge_prob;
1944
  /* True if the edge is a loopback edge in the natural loop.  */
1945
  unsigned int back_edge:1;
1946
} *edge_info;
1947
 
1948
#define BLOCK_INFO(B)   ((block_info) (B)->aux)
1949
#define EDGE_INFO(E)    ((edge_info) (E)->aux)
1950
 
1951
/* Helper function for estimate_bb_frequencies.
1952
   Propagate the frequencies in blocks marked in
1953
   TOVISIT, starting in HEAD.  */
1954
 
1955
static void
1956
propagate_freq (basic_block head, bitmap tovisit)
1957
{
1958
  basic_block bb;
1959
  basic_block last;
1960
  unsigned i;
1961
  edge e;
1962
  basic_block nextbb;
1963
  bitmap_iterator bi;
1964
 
1965
  /* For each basic block we need to visit count number of his predecessors
1966
     we need to visit first.  */
1967
  EXECUTE_IF_SET_IN_BITMAP (tovisit, 0, i, bi)
1968
    {
1969
      edge_iterator ei;
1970
      int count = 0;
1971
 
1972
      bb = BASIC_BLOCK (i);
1973
 
1974
      FOR_EACH_EDGE (e, ei, bb->preds)
1975
        {
1976
          bool visit = bitmap_bit_p (tovisit, e->src->index);
1977
 
1978
          if (visit && !(e->flags & EDGE_DFS_BACK))
1979
            count++;
1980
          else if (visit && dump_file && !EDGE_INFO (e)->back_edge)
1981
            fprintf (dump_file,
1982
                     "Irreducible region hit, ignoring edge to %i->%i\n",
1983
                     e->src->index, bb->index);
1984
        }
1985
      BLOCK_INFO (bb)->npredecessors = count;
1986
      /* When function never returns, we will never process exit block.  */
1987
      if (!count && bb == EXIT_BLOCK_PTR)
1988
        bb->count = bb->frequency = 0;
1989
    }
1990
 
1991
  memcpy (&BLOCK_INFO (head)->frequency, &real_one, sizeof (real_one));
1992
  last = head;
1993
  for (bb = head; bb; bb = nextbb)
1994
    {
1995
      edge_iterator ei;
1996
      sreal cyclic_probability, frequency;
1997
 
1998
      memcpy (&cyclic_probability, &real_zero, sizeof (real_zero));
1999
      memcpy (&frequency, &real_zero, sizeof (real_zero));
2000
 
2001
      nextbb = BLOCK_INFO (bb)->next;
2002
      BLOCK_INFO (bb)->next = NULL;
2003
 
2004
      /* Compute frequency of basic block.  */
2005
      if (bb != head)
2006
        {
2007
#ifdef ENABLE_CHECKING
2008
          FOR_EACH_EDGE (e, ei, bb->preds)
2009
            gcc_assert (!bitmap_bit_p (tovisit, e->src->index)
2010
                        || (e->flags & EDGE_DFS_BACK));
2011
#endif
2012
 
2013
          FOR_EACH_EDGE (e, ei, bb->preds)
2014
            if (EDGE_INFO (e)->back_edge)
2015
              {
2016
                sreal_add (&cyclic_probability, &cyclic_probability,
2017
                           &EDGE_INFO (e)->back_edge_prob);
2018
              }
2019
            else if (!(e->flags & EDGE_DFS_BACK))
2020
              {
2021
                sreal tmp;
2022
 
2023
                /*  frequency += (e->probability
2024
                                  * BLOCK_INFO (e->src)->frequency /
2025
                                  REG_BR_PROB_BASE);  */
2026
 
2027
                sreal_init (&tmp, e->probability, 0);
2028
                sreal_mul (&tmp, &tmp, &BLOCK_INFO (e->src)->frequency);
2029
                sreal_mul (&tmp, &tmp, &real_inv_br_prob_base);
2030
                sreal_add (&frequency, &frequency, &tmp);
2031
              }
2032
 
2033
          if (sreal_compare (&cyclic_probability, &real_zero) == 0)
2034
            {
2035
              memcpy (&BLOCK_INFO (bb)->frequency, &frequency,
2036
                      sizeof (frequency));
2037
            }
2038
          else
2039
            {
2040
              if (sreal_compare (&cyclic_probability, &real_almost_one) > 0)
2041
                {
2042
                  memcpy (&cyclic_probability, &real_almost_one,
2043
                          sizeof (real_almost_one));
2044
                }
2045
 
2046
              /* BLOCK_INFO (bb)->frequency = frequency
2047
                                              / (1 - cyclic_probability) */
2048
 
2049
              sreal_sub (&cyclic_probability, &real_one, &cyclic_probability);
2050
              sreal_div (&BLOCK_INFO (bb)->frequency,
2051
                         &frequency, &cyclic_probability);
2052
            }
2053
        }
2054
 
2055
      bitmap_clear_bit (tovisit, bb->index);
2056
 
2057
      e = find_edge (bb, head);
2058
      if (e)
2059
        {
2060
          sreal tmp;
2061
 
2062
          /* EDGE_INFO (e)->back_edge_prob
2063
             = ((e->probability * BLOCK_INFO (bb)->frequency)
2064
             / REG_BR_PROB_BASE); */
2065
 
2066
          sreal_init (&tmp, e->probability, 0);
2067
          sreal_mul (&tmp, &tmp, &BLOCK_INFO (bb)->frequency);
2068
          sreal_mul (&EDGE_INFO (e)->back_edge_prob,
2069
                     &tmp, &real_inv_br_prob_base);
2070
        }
2071
 
2072
      /* Propagate to successor blocks.  */
2073
      FOR_EACH_EDGE (e, ei, bb->succs)
2074
        if (!(e->flags & EDGE_DFS_BACK)
2075
            && BLOCK_INFO (e->dest)->npredecessors)
2076
          {
2077
            BLOCK_INFO (e->dest)->npredecessors--;
2078
            if (!BLOCK_INFO (e->dest)->npredecessors)
2079
              {
2080
                if (!nextbb)
2081
                  nextbb = e->dest;
2082
                else
2083
                  BLOCK_INFO (last)->next = e->dest;
2084
 
2085
                last = e->dest;
2086
              }
2087
          }
2088
    }
2089
}
2090
 
2091
/* Estimate probabilities of loopback edges in loops at same nest level.  */
2092
 
2093
static void
2094
estimate_loops_at_level (struct loop *first_loop)
2095
{
2096
  struct loop *loop;
2097
 
2098
  for (loop = first_loop; loop; loop = loop->next)
2099
    {
2100
      edge e;
2101
      basic_block *bbs;
2102
      unsigned i;
2103
      bitmap tovisit = BITMAP_ALLOC (NULL);
2104
 
2105
      estimate_loops_at_level (loop->inner);
2106
 
2107
      /* Find current loop back edge and mark it.  */
2108
      e = loop_latch_edge (loop);
2109
      EDGE_INFO (e)->back_edge = 1;
2110
 
2111
      bbs = get_loop_body (loop);
2112
      for (i = 0; i < loop->num_nodes; i++)
2113
        bitmap_set_bit (tovisit, bbs[i]->index);
2114
      free (bbs);
2115
      propagate_freq (loop->header, tovisit);
2116
      BITMAP_FREE (tovisit);
2117
    }
2118
}
2119
 
2120
/* Propagates frequencies through structure of loops.  */
2121
 
2122
static void
2123
estimate_loops (void)
2124
{
2125
  bitmap tovisit = BITMAP_ALLOC (NULL);
2126
  basic_block bb;
2127
 
2128
  /* Start by estimating the frequencies in the loops.  */
2129
  if (number_of_loops () > 1)
2130
    estimate_loops_at_level (current_loops->tree_root->inner);
2131
 
2132
  /* Now propagate the frequencies through all the blocks.  */
2133
  FOR_ALL_BB (bb)
2134
    {
2135
      bitmap_set_bit (tovisit, bb->index);
2136
    }
2137
  propagate_freq (ENTRY_BLOCK_PTR, tovisit);
2138
  BITMAP_FREE (tovisit);
2139
}
2140
 
2141
/* Convert counts measured by profile driven feedback to frequencies.
2142
   Return nonzero iff there was any nonzero execution count.  */
2143
 
2144
int
2145
counts_to_freqs (void)
2146
{
2147
  gcov_type count_max, true_count_max = 0;
2148
  basic_block bb;
2149
 
2150
  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
2151
    true_count_max = MAX (bb->count, true_count_max);
2152
 
2153
  count_max = MAX (true_count_max, 1);
2154
  FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
2155
    bb->frequency = (bb->count * BB_FREQ_MAX + count_max / 2) / count_max;
2156
 
2157
  return true_count_max;
2158
}
2159
 
2160
/* Return true if function is likely to be expensive, so there is no point to
2161
   optimize performance of prologue, epilogue or do inlining at the expense
2162
   of code size growth.  THRESHOLD is the limit of number of instructions
2163
   function can execute at average to be still considered not expensive.  */
2164
 
2165
bool
2166
expensive_function_p (int threshold)
2167
{
2168
  unsigned int sum = 0;
2169
  basic_block bb;
2170
  unsigned int limit;
2171
 
2172
  /* We can not compute accurately for large thresholds due to scaled
2173
     frequencies.  */
2174
  gcc_assert (threshold <= BB_FREQ_MAX);
2175
 
2176
  /* Frequencies are out of range.  This either means that function contains
2177
     internal loop executing more than BB_FREQ_MAX times or profile feedback
2178
     is available and function has not been executed at all.  */
2179
  if (ENTRY_BLOCK_PTR->frequency == 0)
2180
    return true;
2181
 
2182
  /* Maximally BB_FREQ_MAX^2 so overflow won't happen.  */
2183
  limit = ENTRY_BLOCK_PTR->frequency * threshold;
2184
  FOR_EACH_BB (bb)
2185
    {
2186
      rtx insn;
2187
 
2188
      for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
2189
           insn = NEXT_INSN (insn))
2190
        if (active_insn_p (insn))
2191
          {
2192
            sum += bb->frequency;
2193
            if (sum > limit)
2194
              return true;
2195
        }
2196
    }
2197
 
2198
  return false;
2199
}
2200
 
2201
/* Estimate basic blocks frequency by given branch probabilities.  */
2202
 
2203
void
2204
estimate_bb_frequencies (void)
2205
{
2206
  basic_block bb;
2207
  sreal freq_max;
2208
 
2209
  if (profile_status != PROFILE_READ || !counts_to_freqs ())
2210
    {
2211
      static int real_values_initialized = 0;
2212
 
2213
      if (!real_values_initialized)
2214
        {
2215
          real_values_initialized = 1;
2216
          sreal_init (&real_zero, 0, 0);
2217
          sreal_init (&real_one, 1, 0);
2218
          sreal_init (&real_br_prob_base, REG_BR_PROB_BASE, 0);
2219
          sreal_init (&real_bb_freq_max, BB_FREQ_MAX, 0);
2220
          sreal_init (&real_one_half, 1, -1);
2221
          sreal_div (&real_inv_br_prob_base, &real_one, &real_br_prob_base);
2222
          sreal_sub (&real_almost_one, &real_one, &real_inv_br_prob_base);
2223
        }
2224
 
2225
      mark_dfs_back_edges ();
2226
 
2227
      single_succ_edge (ENTRY_BLOCK_PTR)->probability = REG_BR_PROB_BASE;
2228
 
2229
      /* Set up block info for each basic block.  */
2230
      alloc_aux_for_blocks (sizeof (struct block_info_def));
2231
      alloc_aux_for_edges (sizeof (struct edge_info_def));
2232
      FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
2233
        {
2234
          edge e;
2235
          edge_iterator ei;
2236
 
2237
          FOR_EACH_EDGE (e, ei, bb->succs)
2238
            {
2239
              sreal_init (&EDGE_INFO (e)->back_edge_prob, e->probability, 0);
2240
              sreal_mul (&EDGE_INFO (e)->back_edge_prob,
2241
                         &EDGE_INFO (e)->back_edge_prob,
2242
                         &real_inv_br_prob_base);
2243
            }
2244
        }
2245
 
2246
      /* First compute probabilities locally for each loop from innermost
2247
         to outermost to examine probabilities for back edges.  */
2248
      estimate_loops ();
2249
 
2250
      memcpy (&freq_max, &real_zero, sizeof (real_zero));
2251
      FOR_EACH_BB (bb)
2252
        if (sreal_compare (&freq_max, &BLOCK_INFO (bb)->frequency) < 0)
2253
          memcpy (&freq_max, &BLOCK_INFO (bb)->frequency, sizeof (freq_max));
2254
 
2255
      sreal_div (&freq_max, &real_bb_freq_max, &freq_max);
2256
      FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
2257
        {
2258
          sreal tmp;
2259
 
2260
          sreal_mul (&tmp, &BLOCK_INFO (bb)->frequency, &freq_max);
2261
          sreal_add (&tmp, &tmp, &real_one_half);
2262
          bb->frequency = sreal_to_int (&tmp);
2263
        }
2264
 
2265
      free_aux_for_blocks ();
2266
      free_aux_for_edges ();
2267
    }
2268
  compute_function_frequency ();
2269
}
2270
 
2271
/* Decide whether function is hot, cold or unlikely executed.  */
2272
void
2273
compute_function_frequency (void)
2274
{
2275
  basic_block bb;
2276
  struct cgraph_node *node = cgraph_get_node (current_function_decl);
2277
  if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
2278
      || MAIN_NAME_P (DECL_NAME (current_function_decl)))
2279
    node->only_called_at_startup = true;
2280
  if (DECL_STATIC_DESTRUCTOR (current_function_decl))
2281
    node->only_called_at_exit = true;
2282
 
2283
  if (!profile_info || !flag_branch_probabilities)
2284
    {
2285
      int flags = flags_from_decl_or_type (current_function_decl);
2286
      if (lookup_attribute ("cold", DECL_ATTRIBUTES (current_function_decl))
2287
          != NULL)
2288
        node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED;
2289
      else if (lookup_attribute ("hot", DECL_ATTRIBUTES (current_function_decl))
2290
               != NULL)
2291
        node->frequency = NODE_FREQUENCY_HOT;
2292
      else if (flags & ECF_NORETURN)
2293
        node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2294
      else if (MAIN_NAME_P (DECL_NAME (current_function_decl)))
2295
        node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2296
      else if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
2297
               || DECL_STATIC_DESTRUCTOR (current_function_decl))
2298
        node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2299
      return;
2300
    }
2301
  node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED;
2302
  FOR_EACH_BB (bb)
2303
    {
2304
      if (maybe_hot_bb_p (bb))
2305
        {
2306
          node->frequency = NODE_FREQUENCY_HOT;
2307
          return;
2308
        }
2309
      if (!probably_never_executed_bb_p (bb))
2310
        node->frequency = NODE_FREQUENCY_NORMAL;
2311
    }
2312
}
2313
 
2314
static bool
2315
gate_estimate_probability (void)
2316
{
2317
  return flag_guess_branch_prob;
2318
}
2319
 
2320
/* Build PREDICT_EXPR.  */
2321
tree
2322
build_predict_expr (enum br_predictor predictor, enum prediction taken)
2323
{
2324
  tree t = build1 (PREDICT_EXPR, void_type_node,
2325
                   build_int_cst (integer_type_node, predictor));
2326
  SET_PREDICT_EXPR_OUTCOME (t, taken);
2327
  return t;
2328
}
2329
 
2330
const char *
2331
predictor_name (enum br_predictor predictor)
2332
{
2333
  return predictor_info[predictor].name;
2334
}
2335
 
2336
struct gimple_opt_pass pass_profile =
2337
{
2338
 {
2339
  GIMPLE_PASS,
2340
  "profile_estimate",                   /* name */
2341
  gate_estimate_probability,            /* gate */
2342
  tree_estimate_probability_driver,     /* execute */
2343
  NULL,                                 /* sub */
2344
  NULL,                                 /* next */
2345
  0,                                     /* static_pass_number */
2346
  TV_BRANCH_PROB,                       /* tv_id */
2347
  PROP_cfg,                             /* properties_required */
2348
  0,                                     /* properties_provided */
2349
  0,                                     /* properties_destroyed */
2350
  0,                                     /* todo_flags_start */
2351
  TODO_ggc_collect | TODO_verify_ssa                    /* todo_flags_finish */
2352
 }
2353
};
2354
 
2355
struct gimple_opt_pass pass_strip_predict_hints =
2356
{
2357
 {
2358
  GIMPLE_PASS,
2359
  "*strip_predict_hints",               /* name */
2360
  NULL,                                 /* gate */
2361
  strip_predict_hints,                  /* execute */
2362
  NULL,                                 /* sub */
2363
  NULL,                                 /* next */
2364
  0,                                     /* static_pass_number */
2365
  TV_BRANCH_PROB,                       /* tv_id */
2366
  PROP_cfg,                             /* properties_required */
2367
  0,                                     /* properties_provided */
2368
  0,                                     /* properties_destroyed */
2369
  0,                                     /* todo_flags_start */
2370
  TODO_ggc_collect | TODO_verify_ssa                    /* todo_flags_finish */
2371
 }
2372
};
2373
 
2374
/* Rebuild function frequencies.  Passes are in general expected to
2375
   maintain profile by hand, however in some cases this is not possible:
2376
   for example when inlining several functions with loops freuqencies might run
2377
   out of scale and thus needs to be recomputed.  */
2378
 
2379
void
2380
rebuild_frequencies (void)
2381
{
2382
  timevar_push (TV_REBUILD_FREQUENCIES);
2383
  if (profile_status == PROFILE_GUESSED)
2384
    {
2385
      loop_optimizer_init (0);
2386
      add_noreturn_fake_exit_edges ();
2387
      mark_irreducible_loops ();
2388
      connect_infinite_loops_to_exit ();
2389
      estimate_bb_frequencies ();
2390
      remove_fake_exit_edges ();
2391
      loop_optimizer_finalize ();
2392
    }
2393
  else if (profile_status == PROFILE_READ)
2394
    counts_to_freqs ();
2395
  else
2396
    gcc_unreachable ();
2397
  timevar_pop (TV_REBUILD_FREQUENCIES);
2398
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.