OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-dev/] [or1k-gcc/] [gcc/] [ira-lives.c] - Blame information for rev 715

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 684 jeremybenn
/* IRA processing allocno lives to build allocno live ranges.
2
   Copyright (C) 2006, 2007, 2008, 2009, 2010
3
   Free Software Foundation, Inc.
4
   Contributed by Vladimir Makarov <vmakarov@redhat.com>.
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify it under
9
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
12
 
13
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14
WARRANTY; without even the implied warranty of MERCHANTABILITY or
15
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16
for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
#include "config.h"
23
#include "system.h"
24
#include "coretypes.h"
25
#include "tm.h"
26
#include "regs.h"
27
#include "rtl.h"
28
#include "tm_p.h"
29
#include "target.h"
30
#include "flags.h"
31
#include "except.h"
32
#include "hard-reg-set.h"
33
#include "basic-block.h"
34
#include "insn-config.h"
35
#include "recog.h"
36
#include "diagnostic-core.h"
37
#include "params.h"
38
#include "df.h"
39
#include "sbitmap.h"
40
#include "sparseset.h"
41
#include "ira-int.h"
42
 
43
/* The code in this file is similar to one in global but the code
44
   works on the allocno basis and creates live ranges instead of
45
   pseudo-register conflicts.  */
46
 
47
/* Program points are enumerated by numbers from range
48
   0..IRA_MAX_POINT-1.  There are approximately two times more program
49
   points than insns.  Program points are places in the program where
50
   liveness info can be changed.  In most general case (there are more
51
   complicated cases too) some program points correspond to places
52
   where input operand dies and other ones correspond to places where
53
   output operands are born.  */
54
int ira_max_point;
55
 
56
/* Arrays of size IRA_MAX_POINT mapping a program point to the allocno
57
   live ranges with given start/finish point.  */
58
live_range_t *ira_start_point_ranges, *ira_finish_point_ranges;
59
 
60
/* Number of the current program point.  */
61
static int curr_point;
62
 
63
/* Point where register pressure excess started or -1 if there is no
64
   register pressure excess.  Excess pressure for a register class at
65
   some point means that there are more allocnos of given register
66
   class living at the point than number of hard-registers of the
67
   class available for the allocation.  It is defined only for
68
   pressure classes.  */
69
static int high_pressure_start_point[N_REG_CLASSES];
70
 
71
/* Objects live at current point in the scan.  */
72
static sparseset objects_live;
73
 
74
/* A temporary bitmap used in functions that wish to avoid visiting an allocno
75
   multiple times.  */
76
static sparseset allocnos_processed;
77
 
78
/* Set of hard regs (except eliminable ones) currently live.  */
79
static HARD_REG_SET hard_regs_live;
80
 
81
/* The loop tree node corresponding to the current basic block.  */
82
static ira_loop_tree_node_t curr_bb_node;
83
 
84
/* The number of the last processed call.  */
85
static int last_call_num;
86
/* The number of last call at which given allocno was saved.  */
87
static int *allocno_saved_at_call;
88
 
89
/* Record the birth of hard register REGNO, updating hard_regs_live and
90
   hard reg conflict information for living allocnos.  */
91
static void
92
make_hard_regno_born (int regno)
93
{
94
  unsigned int i;
95
 
96
  SET_HARD_REG_BIT (hard_regs_live, regno);
97
  EXECUTE_IF_SET_IN_SPARSESET (objects_live, i)
98
    {
99
      ira_object_t obj = ira_object_id_map[i];
100
 
101
      SET_HARD_REG_BIT (OBJECT_CONFLICT_HARD_REGS (obj), regno);
102
      SET_HARD_REG_BIT (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj), regno);
103
    }
104
}
105
 
106
/* Process the death of hard register REGNO.  This updates
107
   hard_regs_live.  */
108
static void
109
make_hard_regno_dead (int regno)
110
{
111
  CLEAR_HARD_REG_BIT (hard_regs_live, regno);
112
}
113
 
114
/* Record the birth of object OBJ.  Set a bit for it in objects_live,
115
   start a new live range for it if necessary and update hard register
116
   conflicts.  */
117
static void
118
make_object_born (ira_object_t obj)
119
{
120
  live_range_t lr = OBJECT_LIVE_RANGES (obj);
121
 
122
  sparseset_set_bit (objects_live, OBJECT_CONFLICT_ID (obj));
123
  IOR_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj), hard_regs_live);
124
  IOR_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj), hard_regs_live);
125
 
126
  if (lr == NULL
127
      || (lr->finish != curr_point && lr->finish + 1 != curr_point))
128
    ira_add_live_range_to_object (obj, curr_point, -1);
129
}
130
 
131
/* Update ALLOCNO_EXCESS_PRESSURE_POINTS_NUM for the allocno
132
   associated with object OBJ.  */
133
static void
134
update_allocno_pressure_excess_length (ira_object_t obj)
135
{
136
  ira_allocno_t a = OBJECT_ALLOCNO (obj);
137
  int start, i;
138
  enum reg_class aclass, pclass, cl;
139
  live_range_t p;
140
 
141
  aclass = ALLOCNO_CLASS (a);
142
  pclass = ira_pressure_class_translate[aclass];
143
  for (i = 0;
144
       (cl = ira_reg_class_super_classes[pclass][i]) != LIM_REG_CLASSES;
145
       i++)
146
    {
147
      if (! ira_reg_pressure_class_p[cl])
148
        continue;
149
      if (high_pressure_start_point[cl] < 0)
150
        continue;
151
      p = OBJECT_LIVE_RANGES (obj);
152
      ira_assert (p != NULL);
153
      start = (high_pressure_start_point[cl] > p->start
154
               ? high_pressure_start_point[cl] : p->start);
155
      ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a) += curr_point - start + 1;
156
    }
157
}
158
 
159
/* Process the death of object OBJ, which is associated with allocno
160
   A.  This finishes the current live range for it.  */
161
static void
162
make_object_dead (ira_object_t obj)
163
{
164
  live_range_t lr;
165
 
166
  sparseset_clear_bit (objects_live, OBJECT_CONFLICT_ID (obj));
167
  lr = OBJECT_LIVE_RANGES (obj);
168
  ira_assert (lr != NULL);
169
  lr->finish = curr_point;
170
  update_allocno_pressure_excess_length (obj);
171
}
172
 
173
/* The current register pressures for each pressure class for the current
174
   basic block.  */
175
static int curr_reg_pressure[N_REG_CLASSES];
176
 
177
/* Record that register pressure for PCLASS increased by N registers.
178
   Update the current register pressure, maximal register pressure for
179
   the current BB and the start point of the register pressure
180
   excess.  */
181
static void
182
inc_register_pressure (enum reg_class pclass, int n)
183
{
184
  int i;
185
  enum reg_class cl;
186
 
187
  for (i = 0;
188
       (cl = ira_reg_class_super_classes[pclass][i]) != LIM_REG_CLASSES;
189
       i++)
190
    {
191
      if (! ira_reg_pressure_class_p[cl])
192
        continue;
193
      curr_reg_pressure[cl] += n;
194
      if (high_pressure_start_point[cl] < 0
195
          && (curr_reg_pressure[cl] > ira_available_class_regs[cl]))
196
        high_pressure_start_point[cl] = curr_point;
197
      if (curr_bb_node->reg_pressure[cl] < curr_reg_pressure[cl])
198
        curr_bb_node->reg_pressure[cl] = curr_reg_pressure[cl];
199
    }
200
}
201
 
202
/* Record that register pressure for PCLASS has decreased by NREGS
203
   registers; update current register pressure, start point of the
204
   register pressure excess, and register pressure excess length for
205
   living allocnos.  */
206
 
207
static void
208
dec_register_pressure (enum reg_class pclass, int nregs)
209
{
210
  int i;
211
  unsigned int j;
212
  enum reg_class cl;
213
  bool set_p = false;
214
 
215
  for (i = 0;
216
       (cl = ira_reg_class_super_classes[pclass][i]) != LIM_REG_CLASSES;
217
       i++)
218
    {
219
      if (! ira_reg_pressure_class_p[cl])
220
        continue;
221
      curr_reg_pressure[cl] -= nregs;
222
      ira_assert (curr_reg_pressure[cl] >= 0);
223
      if (high_pressure_start_point[cl] >= 0
224
          && curr_reg_pressure[cl] <= ira_available_class_regs[cl])
225
        set_p = true;
226
    }
227
  if (set_p)
228
    {
229
      EXECUTE_IF_SET_IN_SPARSESET (objects_live, j)
230
        update_allocno_pressure_excess_length (ira_object_id_map[j]);
231
      for (i = 0;
232
           (cl = ira_reg_class_super_classes[pclass][i]) != LIM_REG_CLASSES;
233
           i++)
234
        {
235
          if (! ira_reg_pressure_class_p[cl])
236
            continue;
237
          if (high_pressure_start_point[cl] >= 0
238
              && curr_reg_pressure[cl] <= ira_available_class_regs[cl])
239
            high_pressure_start_point[cl] = -1;
240
        }
241
    }
242
}
243
 
244
/* Mark the pseudo register REGNO as live.  Update all information about
245
   live ranges and register pressure.  */
246
static void
247
mark_pseudo_regno_live (int regno)
248
{
249
  ira_allocno_t a = ira_curr_regno_allocno_map[regno];
250
  enum reg_class pclass;
251
  int i, n, nregs;
252
 
253
  if (a == NULL)
254
    return;
255
 
256
  /* Invalidate because it is referenced.  */
257
  allocno_saved_at_call[ALLOCNO_NUM (a)] = 0;
258
 
259
  n = ALLOCNO_NUM_OBJECTS (a);
260
  pclass = ira_pressure_class_translate[ALLOCNO_CLASS (a)];
261
  nregs = ira_reg_class_max_nregs[ALLOCNO_CLASS (a)][ALLOCNO_MODE (a)];
262
  if (n > 1)
263
    {
264
      /* We track every subobject separately.  */
265
      gcc_assert (nregs == n);
266
      nregs = 1;
267
    }
268
 
269
  for (i = 0; i < n; i++)
270
    {
271
      ira_object_t obj = ALLOCNO_OBJECT (a, i);
272
 
273
      if (sparseset_bit_p (objects_live, OBJECT_CONFLICT_ID (obj)))
274
        continue;
275
 
276
      inc_register_pressure (pclass, nregs);
277
      make_object_born (obj);
278
    }
279
}
280
 
281
/* Like mark_pseudo_regno_live, but try to only mark one subword of
282
   the pseudo as live.  SUBWORD indicates which; a value of 0
283
   indicates the low part.  */
284
static void
285
mark_pseudo_regno_subword_live (int regno, int subword)
286
{
287
  ira_allocno_t a = ira_curr_regno_allocno_map[regno];
288
  int n;
289
  enum reg_class pclass;
290
  ira_object_t obj;
291
 
292
  if (a == NULL)
293
    return;
294
 
295
  /* Invalidate because it is referenced.  */
296
  allocno_saved_at_call[ALLOCNO_NUM (a)] = 0;
297
 
298
  n = ALLOCNO_NUM_OBJECTS (a);
299
  if (n == 1)
300
    {
301
      mark_pseudo_regno_live (regno);
302
      return;
303
    }
304
 
305
  pclass = ira_pressure_class_translate[ALLOCNO_CLASS (a)];
306
  gcc_assert
307
    (n == ira_reg_class_max_nregs[ALLOCNO_CLASS (a)][ALLOCNO_MODE (a)]);
308
  obj = ALLOCNO_OBJECT (a, subword);
309
 
310
  if (sparseset_bit_p (objects_live, OBJECT_CONFLICT_ID (obj)))
311
    return;
312
 
313
  inc_register_pressure (pclass, 1);
314
  make_object_born (obj);
315
}
316
 
317
/* Mark the register REG as live.  Store a 1 in hard_regs_live for
318
   this register, record how many consecutive hardware registers it
319
   actually needs.  */
320
static void
321
mark_hard_reg_live (rtx reg)
322
{
323
  int regno = REGNO (reg);
324
 
325
  if (! TEST_HARD_REG_BIT (ira_no_alloc_regs, regno))
326
    {
327
      int last = regno + hard_regno_nregs[regno][GET_MODE (reg)];
328
      enum reg_class aclass, pclass;
329
 
330
      while (regno < last)
331
        {
332
          if (! TEST_HARD_REG_BIT (hard_regs_live, regno)
333
              && ! TEST_HARD_REG_BIT (eliminable_regset, regno))
334
            {
335
              aclass = ira_hard_regno_allocno_class[regno];
336
              pclass = ira_pressure_class_translate[aclass];
337
              inc_register_pressure (pclass, 1);
338
              make_hard_regno_born (regno);
339
            }
340
          regno++;
341
        }
342
    }
343
}
344
 
345
/* Mark a pseudo, or one of its subwords, as live.  REGNO is the pseudo's
346
   register number; ORIG_REG is the access in the insn, which may be a
347
   subreg.  */
348
static void
349
mark_pseudo_reg_live (rtx orig_reg, unsigned regno)
350
{
351
  if (df_read_modify_subreg_p (orig_reg))
352
    {
353
      mark_pseudo_regno_subword_live (regno,
354
                                      subreg_lowpart_p (orig_reg) ? 0 : 1);
355
    }
356
  else
357
    mark_pseudo_regno_live (regno);
358
}
359
 
360
/* Mark the register referenced by use or def REF as live.  */
361
static void
362
mark_ref_live (df_ref ref)
363
{
364
  rtx reg = DF_REF_REG (ref);
365
  rtx orig_reg = reg;
366
 
367
  if (GET_CODE (reg) == SUBREG)
368
    reg = SUBREG_REG (reg);
369
 
370
  if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
371
    mark_pseudo_reg_live (orig_reg, REGNO (reg));
372
  else
373
    mark_hard_reg_live (reg);
374
}
375
 
376
/* Mark the pseudo register REGNO as dead.  Update all information about
377
   live ranges and register pressure.  */
378
static void
379
mark_pseudo_regno_dead (int regno)
380
{
381
  ira_allocno_t a = ira_curr_regno_allocno_map[regno];
382
  int n, i, nregs;
383
  enum reg_class cl;
384
 
385
  if (a == NULL)
386
    return;
387
 
388
  /* Invalidate because it is referenced.  */
389
  allocno_saved_at_call[ALLOCNO_NUM (a)] = 0;
390
 
391
  n = ALLOCNO_NUM_OBJECTS (a);
392
  cl = ira_pressure_class_translate[ALLOCNO_CLASS (a)];
393
  nregs = ira_reg_class_max_nregs[ALLOCNO_CLASS (a)][ALLOCNO_MODE (a)];
394
  if (n > 1)
395
    {
396
      /* We track every subobject separately.  */
397
      gcc_assert (nregs == n);
398
      nregs = 1;
399
    }
400
  for (i = 0; i < n; i++)
401
    {
402
      ira_object_t obj = ALLOCNO_OBJECT (a, i);
403
      if (!sparseset_bit_p (objects_live, OBJECT_CONFLICT_ID (obj)))
404
        continue;
405
 
406
      dec_register_pressure (cl, nregs);
407
      make_object_dead (obj);
408
    }
409
}
410
 
411
/* Like mark_pseudo_regno_dead, but called when we know that only part of the
412
   register dies.  SUBWORD indicates which; a value of 0 indicates the low part.  */
413
static void
414
mark_pseudo_regno_subword_dead (int regno, int subword)
415
{
416
  ira_allocno_t a = ira_curr_regno_allocno_map[regno];
417
  int n;
418
  enum reg_class cl;
419
  ira_object_t obj;
420
 
421
  if (a == NULL)
422
    return;
423
 
424
  /* Invalidate because it is referenced.  */
425
  allocno_saved_at_call[ALLOCNO_NUM (a)] = 0;
426
 
427
  n = ALLOCNO_NUM_OBJECTS (a);
428
  if (n == 1)
429
    /* The allocno as a whole doesn't die in this case.  */
430
    return;
431
 
432
  cl = ira_pressure_class_translate[ALLOCNO_CLASS (a)];
433
  gcc_assert
434
    (n == ira_reg_class_max_nregs[ALLOCNO_CLASS (a)][ALLOCNO_MODE (a)]);
435
 
436
  obj = ALLOCNO_OBJECT (a, subword);
437
  if (!sparseset_bit_p (objects_live, OBJECT_CONFLICT_ID (obj)))
438
    return;
439
 
440
  dec_register_pressure (cl, 1);
441
  make_object_dead (obj);
442
}
443
 
444
/* Mark the hard register REG as dead.  Store a 0 in hard_regs_live for the
445
   register.  */
446
static void
447
mark_hard_reg_dead (rtx reg)
448
{
449
  int regno = REGNO (reg);
450
 
451
  if (! TEST_HARD_REG_BIT (ira_no_alloc_regs, regno))
452
    {
453
      int last = regno + hard_regno_nregs[regno][GET_MODE (reg)];
454
      enum reg_class aclass, pclass;
455
 
456
      while (regno < last)
457
        {
458
          if (TEST_HARD_REG_BIT (hard_regs_live, regno))
459
            {
460
              aclass = ira_hard_regno_allocno_class[regno];
461
              pclass = ira_pressure_class_translate[aclass];
462
              dec_register_pressure (pclass, 1);
463
              make_hard_regno_dead (regno);
464
            }
465
          regno++;
466
        }
467
    }
468
}
469
 
470
/* Mark a pseudo, or one of its subwords, as dead.  REGNO is the pseudo's
471
   register number; ORIG_REG is the access in the insn, which may be a
472
   subreg.  */
473
static void
474
mark_pseudo_reg_dead (rtx orig_reg, unsigned regno)
475
{
476
  if (df_read_modify_subreg_p (orig_reg))
477
    {
478
      mark_pseudo_regno_subword_dead (regno,
479
                                      subreg_lowpart_p (orig_reg) ? 0 : 1);
480
    }
481
  else
482
    mark_pseudo_regno_dead (regno);
483
}
484
 
485
/* Mark the register referenced by definition DEF as dead, if the
486
   definition is a total one.  */
487
static void
488
mark_ref_dead (df_ref def)
489
{
490
  rtx reg = DF_REF_REG (def);
491
  rtx orig_reg = reg;
492
 
493
  if (DF_REF_FLAGS_IS_SET (def, DF_REF_CONDITIONAL))
494
    return;
495
 
496
  if (GET_CODE (reg) == SUBREG)
497
    reg = SUBREG_REG (reg);
498
 
499
  if (DF_REF_FLAGS_IS_SET (def, DF_REF_PARTIAL)
500
      && (GET_CODE (orig_reg) != SUBREG
501
          || REGNO (reg) < FIRST_PSEUDO_REGISTER
502
          || !df_read_modify_subreg_p (orig_reg)))
503
    return;
504
 
505
  if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
506
    mark_pseudo_reg_dead (orig_reg, REGNO (reg));
507
  else
508
    mark_hard_reg_dead (reg);
509
}
510
 
511
/* If REG is a pseudo or a subreg of it, and the class of its allocno
512
   intersects CL, make a conflict with pseudo DREG.  ORIG_DREG is the
513
   rtx actually accessed, it may be indentical to DREG or a subreg of it.
514
   Advance the current program point before making the conflict if
515
   ADVANCE_P.  Return TRUE if we will need to advance the current
516
   program point.  */
517
static bool
518
make_pseudo_conflict (rtx reg, enum reg_class cl, rtx dreg, rtx orig_dreg,
519
                      bool advance_p)
520
{
521
  rtx orig_reg = reg;
522
  ira_allocno_t a;
523
 
524
  if (GET_CODE (reg) == SUBREG)
525
    reg = SUBREG_REG (reg);
526
 
527
  if (! REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
528
    return advance_p;
529
 
530
  a = ira_curr_regno_allocno_map[REGNO (reg)];
531
  if (! reg_classes_intersect_p (cl, ALLOCNO_CLASS (a)))
532
    return advance_p;
533
 
534
  if (advance_p)
535
    curr_point++;
536
 
537
  mark_pseudo_reg_live (orig_reg, REGNO (reg));
538
  mark_pseudo_reg_live (orig_dreg, REGNO (dreg));
539
  mark_pseudo_reg_dead (orig_reg, REGNO (reg));
540
  mark_pseudo_reg_dead (orig_dreg, REGNO (dreg));
541
 
542
  return false;
543
}
544
 
545
/* Check and make if necessary conflicts for pseudo DREG of class
546
   DEF_CL of the current insn with input operand USE of class USE_CL.
547
   ORIG_DREG is the rtx actually accessed, it may be indentical to
548
   DREG or a subreg of it.  Advance the current program point before
549
   making the conflict if ADVANCE_P.  Return TRUE if we will need to
550
   advance the current program point.  */
551
static bool
552
check_and_make_def_use_conflict (rtx dreg, rtx orig_dreg,
553
                                 enum reg_class def_cl, int use,
554
                                 enum reg_class use_cl, bool advance_p)
555
{
556
  if (! reg_classes_intersect_p (def_cl, use_cl))
557
    return advance_p;
558
 
559
  advance_p = make_pseudo_conflict (recog_data.operand[use],
560
                                    use_cl, dreg, orig_dreg, advance_p);
561
 
562
  /* Reload may end up swapping commutative operands, so you
563
     have to take both orderings into account.  The
564
     constraints for the two operands can be completely
565
     different.  (Indeed, if the constraints for the two
566
     operands are the same for all alternatives, there's no
567
     point marking them as commutative.)  */
568
  if (use < recog_data.n_operands - 1
569
      && recog_data.constraints[use][0] == '%')
570
    advance_p
571
      = make_pseudo_conflict (recog_data.operand[use + 1],
572
                              use_cl, dreg, orig_dreg, advance_p);
573
  if (use >= 1
574
      && recog_data.constraints[use - 1][0] == '%')
575
    advance_p
576
      = make_pseudo_conflict (recog_data.operand[use - 1],
577
                              use_cl, dreg, orig_dreg, advance_p);
578
  return advance_p;
579
}
580
 
581
/* Check and make if necessary conflicts for definition DEF of class
582
   DEF_CL of the current insn with input operands.  Process only
583
   constraints of alternative ALT.  */
584
static void
585
check_and_make_def_conflict (int alt, int def, enum reg_class def_cl)
586
{
587
  int use, use_match;
588
  ira_allocno_t a;
589
  enum reg_class use_cl, acl;
590
  bool advance_p;
591
  rtx dreg = recog_data.operand[def];
592
  rtx orig_dreg = dreg;
593
 
594
  if (def_cl == NO_REGS)
595
    return;
596
 
597
  if (GET_CODE (dreg) == SUBREG)
598
    dreg = SUBREG_REG (dreg);
599
 
600
  if (! REG_P (dreg) || REGNO (dreg) < FIRST_PSEUDO_REGISTER)
601
    return;
602
 
603
  a = ira_curr_regno_allocno_map[REGNO (dreg)];
604
  acl = ALLOCNO_CLASS (a);
605
  if (! reg_classes_intersect_p (acl, def_cl))
606
    return;
607
 
608
  advance_p = true;
609
 
610
  for (use = 0; use < recog_data.n_operands; use++)
611
    {
612
      int alt1;
613
 
614
      if (use == def || recog_data.operand_type[use] == OP_OUT)
615
        continue;
616
 
617
      if (recog_op_alt[use][alt].anything_ok)
618
        use_cl = ALL_REGS;
619
      else
620
        use_cl = recog_op_alt[use][alt].cl;
621
 
622
      /* If there's any alternative that allows USE to match DEF, do not
623
         record a conflict.  If that causes us to create an invalid
624
         instruction due to the earlyclobber, reload must fix it up.  */
625
      for (alt1 = 0; alt1 < recog_data.n_alternatives; alt1++)
626
        if (recog_op_alt[use][alt1].matches == def
627
            || (use < recog_data.n_operands - 1
628
                && recog_data.constraints[use][0] == '%'
629
                && recog_op_alt[use + 1][alt1].matches == def)
630
            || (use >= 1
631
                && recog_data.constraints[use - 1][0] == '%'
632
                && recog_op_alt[use - 1][alt1].matches == def))
633
          break;
634
 
635
      if (alt1 < recog_data.n_alternatives)
636
        continue;
637
 
638
      advance_p = check_and_make_def_use_conflict (dreg, orig_dreg, def_cl,
639
                                                   use, use_cl, advance_p);
640
 
641
      if ((use_match = recog_op_alt[use][alt].matches) >= 0)
642
        {
643
          if (use_match == def)
644
            continue;
645
 
646
          if (recog_op_alt[use_match][alt].anything_ok)
647
            use_cl = ALL_REGS;
648
          else
649
            use_cl = recog_op_alt[use_match][alt].cl;
650
          advance_p = check_and_make_def_use_conflict (dreg, orig_dreg, def_cl,
651
                                                       use, use_cl, advance_p);
652
        }
653
    }
654
}
655
 
656
/* Make conflicts of early clobber pseudo registers of the current
657
   insn with its inputs.  Avoid introducing unnecessary conflicts by
658
   checking classes of the constraints and pseudos because otherwise
659
   significant code degradation is possible for some targets.  */
660
static void
661
make_early_clobber_and_input_conflicts (void)
662
{
663
  int alt;
664
  int def, def_match;
665
  enum reg_class def_cl;
666
 
667
  for (alt = 0; alt < recog_data.n_alternatives; alt++)
668
    for (def = 0; def < recog_data.n_operands; def++)
669
      {
670
        def_cl = NO_REGS;
671
        if (recog_op_alt[def][alt].earlyclobber)
672
          {
673
            if (recog_op_alt[def][alt].anything_ok)
674
              def_cl = ALL_REGS;
675
            else
676
              def_cl = recog_op_alt[def][alt].cl;
677
            check_and_make_def_conflict (alt, def, def_cl);
678
          }
679
        if ((def_match = recog_op_alt[def][alt].matches) >= 0
680
            && (recog_op_alt[def_match][alt].earlyclobber
681
                || recog_op_alt[def][alt].earlyclobber))
682
          {
683
            if (recog_op_alt[def_match][alt].anything_ok)
684
              def_cl = ALL_REGS;
685
            else
686
              def_cl = recog_op_alt[def_match][alt].cl;
687
            check_and_make_def_conflict (alt, def, def_cl);
688
          }
689
      }
690
}
691
 
692
/* Mark early clobber hard registers of the current INSN as live (if
693
   LIVE_P) or dead.  Return true if there are such registers.  */
694
static bool
695
mark_hard_reg_early_clobbers (rtx insn, bool live_p)
696
{
697
  df_ref *def_rec;
698
  bool set_p = false;
699
 
700
  for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
701
    if (DF_REF_FLAGS_IS_SET (*def_rec, DF_REF_MUST_CLOBBER))
702
      {
703
        rtx dreg = DF_REF_REG (*def_rec);
704
 
705
        if (GET_CODE (dreg) == SUBREG)
706
          dreg = SUBREG_REG (dreg);
707
        if (! REG_P (dreg) || REGNO (dreg) >= FIRST_PSEUDO_REGISTER)
708
          continue;
709
 
710
        /* Hard register clobbers are believed to be early clobber
711
           because there is no way to say that non-operand hard
712
           register clobbers are not early ones.  */
713
        if (live_p)
714
          mark_ref_live (*def_rec);
715
        else
716
          mark_ref_dead (*def_rec);
717
        set_p = true;
718
      }
719
 
720
  return set_p;
721
}
722
 
723
/* Checks that CONSTRAINTS permits to use only one hard register.  If
724
   it is so, the function returns the class of the hard register.
725
   Otherwise it returns NO_REGS.  */
726
static enum reg_class
727
single_reg_class (const char *constraints, rtx op, rtx equiv_const)
728
{
729
  int curr_alt, c;
730
  bool ignore_p;
731
  enum reg_class cl, next_cl;
732
 
733
  cl = NO_REGS;
734
  for (ignore_p = false, curr_alt = 0;
735
       (c = *constraints);
736
       constraints += CONSTRAINT_LEN (c, constraints))
737
    if (c == '#' || !recog_data.alternative_enabled_p[curr_alt])
738
      ignore_p = true;
739
    else if (c == ',')
740
      {
741
        curr_alt++;
742
        ignore_p = false;
743
      }
744
    else if (! ignore_p)
745
      switch (c)
746
        {
747
        case ' ':
748
        case '\t':
749
        case '=':
750
        case '+':
751
        case '*':
752
        case '&':
753
        case '%':
754
        case '!':
755
        case '?':
756
          break;
757
        case 'i':
758
          if (CONSTANT_P (op)
759
              || (equiv_const != NULL_RTX && CONSTANT_P (equiv_const)))
760
            return NO_REGS;
761
          break;
762
 
763
        case 'n':
764
          if (CONST_INT_P (op)
765
              || (GET_CODE (op) == CONST_DOUBLE && GET_MODE (op) == VOIDmode)
766
              || (equiv_const != NULL_RTX
767
                  && (CONST_INT_P (equiv_const)
768
                      || (GET_CODE (equiv_const) == CONST_DOUBLE
769
                          && GET_MODE (equiv_const) == VOIDmode))))
770
            return NO_REGS;
771
          break;
772
 
773
        case 's':
774
          if ((CONSTANT_P (op) && !CONST_INT_P (op)
775
               && (GET_CODE (op) != CONST_DOUBLE || GET_MODE (op) != VOIDmode))
776
              || (equiv_const != NULL_RTX
777
                  && CONSTANT_P (equiv_const)
778
                  && !CONST_INT_P (equiv_const)
779
                  && (GET_CODE (equiv_const) != CONST_DOUBLE
780
                      || GET_MODE (equiv_const) != VOIDmode)))
781
            return NO_REGS;
782
          break;
783
 
784
        case 'I':
785
        case 'J':
786
        case 'K':
787
        case 'L':
788
        case 'M':
789
        case 'N':
790
        case 'O':
791
        case 'P':
792
          if ((CONST_INT_P (op)
793
               && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, constraints))
794
              || (equiv_const != NULL_RTX
795
                  && CONST_INT_P (equiv_const)
796
                  && CONST_OK_FOR_CONSTRAINT_P (INTVAL (equiv_const),
797
                                                c, constraints)))
798
            return NO_REGS;
799
          break;
800
 
801
        case 'E':
802
        case 'F':
803
          if (GET_CODE (op) == CONST_DOUBLE
804
              || (GET_CODE (op) == CONST_VECTOR
805
                  && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT)
806
              || (equiv_const != NULL_RTX
807
                  && (GET_CODE (equiv_const) == CONST_DOUBLE
808
                      || (GET_CODE (equiv_const) == CONST_VECTOR
809
                          && (GET_MODE_CLASS (GET_MODE (equiv_const))
810
                              == MODE_VECTOR_FLOAT)))))
811
            return NO_REGS;
812
          break;
813
 
814
        case 'G':
815
        case 'H':
816
          if ((GET_CODE (op) == CONST_DOUBLE
817
               && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, constraints))
818
              || (equiv_const != NULL_RTX
819
                  && GET_CODE (equiv_const) == CONST_DOUBLE
820
                  && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (equiv_const,
821
                                                       c, constraints)))
822
            return NO_REGS;
823
          /* ??? what about memory */
824
        case 'r':
825
        case 'a': case 'b': case 'c': case 'd': case 'e': case 'f':
826
        case 'h': case 'j': case 'k': case 'l':
827
        case 'q': case 't': case 'u':
828
        case 'v': case 'w': case 'x': case 'y': case 'z':
829
        case 'A': case 'B': case 'C': case 'D':
830
        case 'Q': case 'R': case 'S': case 'T': case 'U':
831
        case 'W': case 'Y': case 'Z':
832
          next_cl = (c == 'r'
833
                     ? GENERAL_REGS
834
                     : REG_CLASS_FROM_CONSTRAINT (c, constraints));
835
          if ((cl != NO_REGS && next_cl != cl)
836
              || (ira_available_class_regs[next_cl]
837
                  > ira_reg_class_max_nregs[next_cl][GET_MODE (op)]))
838
            return NO_REGS;
839
          cl = next_cl;
840
          break;
841
 
842
        case '0': case '1': case '2': case '3': case '4':
843
        case '5': case '6': case '7': case '8': case '9':
844
          next_cl
845
            = single_reg_class (recog_data.constraints[c - '0'],
846
                                recog_data.operand[c - '0'], NULL_RTX);
847
          if ((cl != NO_REGS && next_cl != cl)
848
              || next_cl == NO_REGS
849
              || (ira_available_class_regs[next_cl]
850
                  > ira_reg_class_max_nregs[next_cl][GET_MODE (op)]))
851
            return NO_REGS;
852
          cl = next_cl;
853
          break;
854
 
855
        default:
856
          return NO_REGS;
857
        }
858
  return cl;
859
}
860
 
861
/* The function checks that operand OP_NUM of the current insn can use
862
   only one hard register.  If it is so, the function returns the
863
   class of the hard register.  Otherwise it returns NO_REGS.  */
864
static enum reg_class
865
single_reg_operand_class (int op_num)
866
{
867
  if (op_num < 0 || recog_data.n_alternatives == 0)
868
    return NO_REGS;
869
  return single_reg_class (recog_data.constraints[op_num],
870
                           recog_data.operand[op_num], NULL_RTX);
871
}
872
 
873
/* The function sets up hard register set *SET to hard registers which
874
   might be used by insn reloads because the constraints are too
875
   strict.  */
876
void
877
ira_implicitly_set_insn_hard_regs (HARD_REG_SET *set)
878
{
879
  int i, curr_alt, c, regno = 0;
880
  bool ignore_p;
881
  enum reg_class cl;
882
  rtx op;
883
  enum machine_mode mode;
884
 
885
  CLEAR_HARD_REG_SET (*set);
886
  for (i = 0; i < recog_data.n_operands; i++)
887
    {
888
      op = recog_data.operand[i];
889
 
890
      if (GET_CODE (op) == SUBREG)
891
        op = SUBREG_REG (op);
892
 
893
      if (GET_CODE (op) == SCRATCH
894
          || (REG_P (op) && (regno = REGNO (op)) >= FIRST_PSEUDO_REGISTER))
895
        {
896
          const char *p = recog_data.constraints[i];
897
 
898
          mode = (GET_CODE (op) == SCRATCH
899
                  ? GET_MODE (op) : PSEUDO_REGNO_MODE (regno));
900
          cl = NO_REGS;
901
          for (ignore_p = false, curr_alt = 0;
902
               (c = *p);
903
               p += CONSTRAINT_LEN (c, p))
904
            if (c == '#' || !recog_data.alternative_enabled_p[curr_alt])
905
              ignore_p = true;
906
            else if (c == ',')
907
              {
908
                curr_alt++;
909
                ignore_p = false;
910
              }
911
            else if (! ignore_p)
912
              switch (c)
913
                {
914
                case 'r':
915
                case 'a': case 'b': case 'c': case 'd': case 'e': case 'f':
916
                case 'h': case 'j': case 'k': case 'l':
917
                case 'q': case 't': case 'u':
918
                case 'v': case 'w': case 'x': case 'y': case 'z':
919
                case 'A': case 'B': case 'C': case 'D':
920
                case 'Q': case 'R': case 'S': case 'T': case 'U':
921
                case 'W': case 'Y': case 'Z':
922
                  cl = (c == 'r'
923
                        ? GENERAL_REGS
924
                        : REG_CLASS_FROM_CONSTRAINT (c, p));
925
                  if (cl != NO_REGS
926
                      /* There is no register pressure problem if all of the
927
                         regs in this class are fixed.  */
928
                      && ira_available_class_regs[cl] != 0
929
                      && (ira_available_class_regs[cl]
930
                          <= ira_reg_class_max_nregs[cl][mode]))
931
                    IOR_HARD_REG_SET (*set, reg_class_contents[cl]);
932
                  break;
933
                }
934
        }
935
    }
936
}
937
/* Processes input operands, if IN_P, or output operands otherwise of
938
   the current insn with FREQ to find allocno which can use only one
939
   hard register and makes other currently living allocnos conflicting
940
   with the hard register.  */
941
static void
942
process_single_reg_class_operands (bool in_p, int freq)
943
{
944
  int i, regno;
945
  unsigned int px;
946
  enum reg_class cl;
947
  rtx operand;
948
  ira_allocno_t operand_a, a;
949
 
950
  for (i = 0; i < recog_data.n_operands; i++)
951
    {
952
      operand = recog_data.operand[i];
953
      if (in_p && recog_data.operand_type[i] != OP_IN
954
          && recog_data.operand_type[i] != OP_INOUT)
955
        continue;
956
      if (! in_p && recog_data.operand_type[i] != OP_OUT
957
          && recog_data.operand_type[i] != OP_INOUT)
958
        continue;
959
      cl = single_reg_operand_class (i);
960
      if (cl == NO_REGS)
961
        continue;
962
 
963
      operand_a = NULL;
964
 
965
      if (GET_CODE (operand) == SUBREG)
966
        operand = SUBREG_REG (operand);
967
 
968
      if (REG_P (operand)
969
          && (regno = REGNO (operand)) >= FIRST_PSEUDO_REGISTER)
970
        {
971
          enum reg_class aclass;
972
 
973
          operand_a = ira_curr_regno_allocno_map[regno];
974
          aclass = ALLOCNO_CLASS (operand_a);
975
          if (ira_class_subset_p[cl][aclass]
976
              && ira_class_hard_regs_num[cl] != 0)
977
            {
978
              /* View the desired allocation of OPERAND as:
979
 
980
                    (REG:YMODE YREGNO),
981
 
982
                 a simplification of:
983
 
984
                    (subreg:YMODE (reg:XMODE XREGNO) OFFSET).  */
985
              enum machine_mode ymode, xmode;
986
              int xregno, yregno;
987
              HOST_WIDE_INT offset;
988
 
989
              xmode = recog_data.operand_mode[i];
990
              xregno = ira_class_hard_regs[cl][0];
991
              ymode = ALLOCNO_MODE (operand_a);
992
              offset = subreg_lowpart_offset (ymode, xmode);
993
              yregno = simplify_subreg_regno (xregno, xmode, offset, ymode);
994
              if (yregno >= 0
995
                  && ira_class_hard_reg_index[aclass][yregno] >= 0)
996
                {
997
                  int cost;
998
 
999
                  ira_allocate_and_set_costs
1000
                    (&ALLOCNO_CONFLICT_HARD_REG_COSTS (operand_a),
1001
                     aclass, 0);
1002
                  ira_init_register_move_cost_if_necessary (xmode);
1003
                  cost = freq * (in_p
1004
                                 ? ira_register_move_cost[xmode][aclass][cl]
1005
                                 : ira_register_move_cost[xmode][cl][aclass]);
1006
                  ALLOCNO_CONFLICT_HARD_REG_COSTS (operand_a)
1007
                    [ira_class_hard_reg_index[aclass][yregno]] -= cost;
1008
                }
1009
            }
1010
        }
1011
 
1012
      EXECUTE_IF_SET_IN_SPARSESET (objects_live, px)
1013
        {
1014
          ira_object_t obj = ira_object_id_map[px];
1015
          a = OBJECT_ALLOCNO (obj);
1016
          if (a != operand_a)
1017
            {
1018
              /* We could increase costs of A instead of making it
1019
                 conflicting with the hard register.  But it works worse
1020
                 because it will be spilled in reload in anyway.  */
1021
              IOR_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj),
1022
                                reg_class_contents[cl]);
1023
              IOR_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj),
1024
                                reg_class_contents[cl]);
1025
            }
1026
        }
1027
    }
1028
}
1029
 
1030
/* Return true when one of the predecessor edges of BB is marked with
1031
   EDGE_ABNORMAL_CALL or EDGE_EH.  */
1032
static bool
1033
bb_has_abnormal_call_pred (basic_block bb)
1034
{
1035
  edge e;
1036
  edge_iterator ei;
1037
 
1038
  FOR_EACH_EDGE (e, ei, bb->preds)
1039
    {
1040
      if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
1041
        return true;
1042
    }
1043
  return false;
1044
}
1045
 
1046
/* Process insns of the basic block given by its LOOP_TREE_NODE to
1047
   update allocno live ranges, allocno hard register conflicts,
1048
   intersected calls, and register pressure info for allocnos for the
1049
   basic block for and regions containing the basic block.  */
1050
static void
1051
process_bb_node_lives (ira_loop_tree_node_t loop_tree_node)
1052
{
1053
  int i, freq;
1054
  unsigned int j;
1055
  basic_block bb;
1056
  rtx insn;
1057
  bitmap_iterator bi;
1058
  bitmap reg_live_out;
1059
  unsigned int px;
1060
  bool set_p;
1061
 
1062
  bb = loop_tree_node->bb;
1063
  if (bb != NULL)
1064
    {
1065
      for (i = 0; i < ira_pressure_classes_num; i++)
1066
        {
1067
          curr_reg_pressure[ira_pressure_classes[i]] = 0;
1068
          high_pressure_start_point[ira_pressure_classes[i]] = -1;
1069
        }
1070
      curr_bb_node = loop_tree_node;
1071
      reg_live_out = DF_LR_OUT (bb);
1072
      sparseset_clear (objects_live);
1073
      REG_SET_TO_HARD_REG_SET (hard_regs_live, reg_live_out);
1074
      AND_COMPL_HARD_REG_SET (hard_regs_live, eliminable_regset);
1075
      AND_COMPL_HARD_REG_SET (hard_regs_live, ira_no_alloc_regs);
1076
      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1077
        if (TEST_HARD_REG_BIT (hard_regs_live, i))
1078
          {
1079
            enum reg_class aclass, pclass, cl;
1080
 
1081
            aclass = ira_allocno_class_translate[REGNO_REG_CLASS (i)];
1082
            pclass = ira_pressure_class_translate[aclass];
1083
            for (j = 0;
1084
                 (cl = ira_reg_class_super_classes[pclass][j])
1085
                   != LIM_REG_CLASSES;
1086
                 j++)
1087
              {
1088
                if (! ira_reg_pressure_class_p[cl])
1089
                  continue;
1090
                curr_reg_pressure[cl]++;
1091
                if (curr_bb_node->reg_pressure[cl] < curr_reg_pressure[cl])
1092
                  curr_bb_node->reg_pressure[cl] = curr_reg_pressure[cl];
1093
                ira_assert (curr_reg_pressure[cl]
1094
                            <= ira_available_class_regs[cl]);
1095
              }
1096
          }
1097
      EXECUTE_IF_SET_IN_BITMAP (reg_live_out, FIRST_PSEUDO_REGISTER, j, bi)
1098
        mark_pseudo_regno_live (j);
1099
 
1100
      freq = REG_FREQ_FROM_BB (bb);
1101
      if (freq == 0)
1102
        freq = 1;
1103
 
1104
      /* Invalidate all allocno_saved_at_call entries.  */
1105
      last_call_num++;
1106
 
1107
      /* Scan the code of this basic block, noting which allocnos and
1108
         hard regs are born or die.
1109
 
1110
         Note that this loop treats uninitialized values as live until
1111
         the beginning of the block.  For example, if an instruction
1112
         uses (reg:DI foo), and only (subreg:SI (reg:DI foo) 0) is ever
1113
         set, FOO will remain live until the beginning of the block.
1114
         Likewise if FOO is not set at all.  This is unnecessarily
1115
         pessimistic, but it probably doesn't matter much in practice.  */
1116
      FOR_BB_INSNS_REVERSE (bb, insn)
1117
        {
1118
          df_ref *def_rec, *use_rec;
1119
          bool call_p;
1120
 
1121
          if (!NONDEBUG_INSN_P (insn))
1122
            continue;
1123
 
1124
          if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
1125
            fprintf (ira_dump_file, "   Insn %u(l%d): point = %d\n",
1126
                     INSN_UID (insn), loop_tree_node->parent->loop_num,
1127
                     curr_point);
1128
 
1129
          /* Mark each defined value as live.  We need to do this for
1130
             unused values because they still conflict with quantities
1131
             that are live at the time of the definition.
1132
 
1133
             Ignore DF_REF_MAY_CLOBBERs on a call instruction.  Such
1134
             references represent the effect of the called function
1135
             on a call-clobbered register.  Marking the register as
1136
             live would stop us from allocating it to a call-crossing
1137
             allocno.  */
1138
          call_p = CALL_P (insn);
1139
          for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
1140
            if (!call_p || !DF_REF_FLAGS_IS_SET (*def_rec, DF_REF_MAY_CLOBBER))
1141
              mark_ref_live (*def_rec);
1142
 
1143
          /* If INSN has multiple outputs, then any value used in one
1144
             of the outputs conflicts with the other outputs.  Model this
1145
             by making the used value live during the output phase.
1146
 
1147
             It is unsafe to use !single_set here since it will ignore
1148
             an unused output.  Just because an output is unused does
1149
             not mean the compiler can assume the side effect will not
1150
             occur.  Consider if ALLOCNO appears in the address of an
1151
             output and we reload the output.  If we allocate ALLOCNO
1152
             to the same hard register as an unused output we could
1153
             set the hard register before the output reload insn.  */
1154
          if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
1155
            for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
1156
              {
1157
                int i;
1158
                rtx reg;
1159
 
1160
                reg = DF_REF_REG (*use_rec);
1161
                for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
1162
                  {
1163
                    rtx set;
1164
 
1165
                    set = XVECEXP (PATTERN (insn), 0, i);
1166
                    if (GET_CODE (set) == SET
1167
                        && reg_overlap_mentioned_p (reg, SET_DEST (set)))
1168
                      {
1169
                        /* After the previous loop, this is a no-op if
1170
                           REG is contained within SET_DEST (SET).  */
1171
                        mark_ref_live (*use_rec);
1172
                        break;
1173
                      }
1174
                  }
1175
              }
1176
 
1177
          extract_insn (insn);
1178
          preprocess_constraints ();
1179
          process_single_reg_class_operands (false, freq);
1180
 
1181
          /* See which defined values die here.  */
1182
          for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
1183
            if (!call_p || !DF_REF_FLAGS_IS_SET (*def_rec, DF_REF_MAY_CLOBBER))
1184
              mark_ref_dead (*def_rec);
1185
 
1186
          if (call_p)
1187
            {
1188
              last_call_num++;
1189
              sparseset_clear (allocnos_processed);
1190
              /* The current set of live allocnos are live across the call.  */
1191
              EXECUTE_IF_SET_IN_SPARSESET (objects_live, i)
1192
                {
1193
                  ira_object_t obj = ira_object_id_map[i];
1194
                  ira_allocno_t a = OBJECT_ALLOCNO (obj);
1195
                  int num = ALLOCNO_NUM (a);
1196
 
1197
                  /* Don't allocate allocnos that cross setjmps or any
1198
                     call, if this function receives a nonlocal
1199
                     goto.  */
1200
                  if (cfun->has_nonlocal_label
1201
                      || find_reg_note (insn, REG_SETJMP,
1202
                                        NULL_RTX) != NULL_RTX)
1203
                    {
1204
                      SET_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj));
1205
                      SET_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj));
1206
                    }
1207
                  if (can_throw_internal (insn))
1208
                    {
1209
                      IOR_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj),
1210
                                        call_used_reg_set);
1211
                      IOR_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj),
1212
                                        call_used_reg_set);
1213
                    }
1214
 
1215
                  if (sparseset_bit_p (allocnos_processed, num))
1216
                    continue;
1217
                  sparseset_set_bit (allocnos_processed, num);
1218
 
1219
                  if (allocno_saved_at_call[num] != last_call_num)
1220
                    /* Here we are mimicking caller-save.c behaviour
1221
                       which does not save hard register at a call if
1222
                       it was saved on previous call in the same basic
1223
                       block and the hard register was not mentioned
1224
                       between the two calls.  */
1225
                    ALLOCNO_CALL_FREQ (a) += freq;
1226
                  /* Mark it as saved at the next call.  */
1227
                  allocno_saved_at_call[num] = last_call_num + 1;
1228
                  ALLOCNO_CALLS_CROSSED_NUM (a)++;
1229
                }
1230
            }
1231
 
1232
          make_early_clobber_and_input_conflicts ();
1233
 
1234
          curr_point++;
1235
 
1236
          /* Mark each used value as live.  */
1237
          for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
1238
            mark_ref_live (*use_rec);
1239
 
1240
          process_single_reg_class_operands (true, freq);
1241
 
1242
          set_p = mark_hard_reg_early_clobbers (insn, true);
1243
 
1244
          if (set_p)
1245
            {
1246
              mark_hard_reg_early_clobbers (insn, false);
1247
 
1248
              /* Mark each hard reg as live again.  For example, a
1249
                 hard register can be in clobber and in an insn
1250
                 input.  */
1251
              for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
1252
                {
1253
                  rtx ureg = DF_REF_REG (*use_rec);
1254
 
1255
                  if (GET_CODE (ureg) == SUBREG)
1256
                    ureg = SUBREG_REG (ureg);
1257
                  if (! REG_P (ureg) || REGNO (ureg) >= FIRST_PSEUDO_REGISTER)
1258
                    continue;
1259
 
1260
                  mark_ref_live (*use_rec);
1261
                }
1262
            }
1263
 
1264
          curr_point++;
1265
        }
1266
 
1267
#ifdef EH_RETURN_DATA_REGNO
1268
      if (bb_has_eh_pred (bb))
1269
        for (j = 0; ; ++j)
1270
          {
1271
            unsigned int regno = EH_RETURN_DATA_REGNO (j);
1272
            if (regno == INVALID_REGNUM)
1273
              break;
1274
            make_hard_regno_born (regno);
1275
          }
1276
#endif
1277
 
1278
      /* Allocnos can't go in stack regs at the start of a basic block
1279
         that is reached by an abnormal edge. Likewise for call
1280
         clobbered regs, because caller-save, fixup_abnormal_edges and
1281
         possibly the table driven EH machinery are not quite ready to
1282
         handle such allocnos live across such edges.  */
1283
      if (bb_has_abnormal_pred (bb))
1284
        {
1285
#ifdef STACK_REGS
1286
          EXECUTE_IF_SET_IN_SPARSESET (objects_live, px)
1287
            {
1288
              ira_allocno_t a = OBJECT_ALLOCNO (ira_object_id_map[px]);
1289
 
1290
              ALLOCNO_NO_STACK_REG_P (a) = true;
1291
              ALLOCNO_TOTAL_NO_STACK_REG_P (a) = true;
1292
            }
1293
          for (px = FIRST_STACK_REG; px <= LAST_STACK_REG; px++)
1294
            make_hard_regno_born (px);
1295
#endif
1296
          /* No need to record conflicts for call clobbered regs if we
1297
             have nonlocal labels around, as we don't ever try to
1298
             allocate such regs in this case.  */
1299
          if (!cfun->has_nonlocal_label && bb_has_abnormal_call_pred (bb))
1300
            for (px = 0; px < FIRST_PSEUDO_REGISTER; px++)
1301
              if (call_used_regs[px])
1302
                make_hard_regno_born (px);
1303
        }
1304
 
1305
      EXECUTE_IF_SET_IN_SPARSESET (objects_live, i)
1306
        make_object_dead (ira_object_id_map[i]);
1307
 
1308
      curr_point++;
1309
 
1310
    }
1311
  /* Propagate register pressure to upper loop tree nodes: */
1312
  if (loop_tree_node != ira_loop_tree_root)
1313
    for (i = 0; i < ira_pressure_classes_num; i++)
1314
      {
1315
        enum reg_class pclass;
1316
 
1317
        pclass = ira_pressure_classes[i];
1318
        if (loop_tree_node->reg_pressure[pclass]
1319
            > loop_tree_node->parent->reg_pressure[pclass])
1320
          loop_tree_node->parent->reg_pressure[pclass]
1321
            = loop_tree_node->reg_pressure[pclass];
1322
      }
1323
}
1324
 
1325
/* Create and set up IRA_START_POINT_RANGES and
1326
   IRA_FINISH_POINT_RANGES.  */
1327
static void
1328
create_start_finish_chains (void)
1329
{
1330
  ira_object_t obj;
1331
  ira_object_iterator oi;
1332
  live_range_t r;
1333
 
1334
  ira_start_point_ranges
1335
    = (live_range_t *) ira_allocate (ira_max_point * sizeof (live_range_t));
1336
  memset (ira_start_point_ranges, 0, ira_max_point * sizeof (live_range_t));
1337
  ira_finish_point_ranges
1338
    = (live_range_t *) ira_allocate (ira_max_point * sizeof (live_range_t));
1339
  memset (ira_finish_point_ranges, 0, ira_max_point * sizeof (live_range_t));
1340
  FOR_EACH_OBJECT (obj, oi)
1341
    for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next)
1342
      {
1343
        r->start_next = ira_start_point_ranges[r->start];
1344
        ira_start_point_ranges[r->start] = r;
1345
        r->finish_next = ira_finish_point_ranges[r->finish];
1346
          ira_finish_point_ranges[r->finish] = r;
1347
      }
1348
}
1349
 
1350
/* Rebuild IRA_START_POINT_RANGES and IRA_FINISH_POINT_RANGES after
1351
   new live ranges and program points were added as a result if new
1352
   insn generation.  */
1353
void
1354
ira_rebuild_start_finish_chains (void)
1355
{
1356
  ira_free (ira_finish_point_ranges);
1357
  ira_free (ira_start_point_ranges);
1358
  create_start_finish_chains ();
1359
}
1360
 
1361
/* Compress allocno live ranges by removing program points where
1362
   nothing happens.  */
1363
static void
1364
remove_some_program_points_and_update_live_ranges (void)
1365
{
1366
  unsigned i;
1367
  int n;
1368
  int *map;
1369
  ira_object_t obj;
1370
  ira_object_iterator oi;
1371
  live_range_t r;
1372
  sbitmap born_or_dead, born, dead;
1373
  sbitmap_iterator sbi;
1374
  bool born_p, dead_p, prev_born_p, prev_dead_p;
1375
 
1376
  born = sbitmap_alloc (ira_max_point);
1377
  dead = sbitmap_alloc (ira_max_point);
1378
  sbitmap_zero (born);
1379
  sbitmap_zero (dead);
1380
  FOR_EACH_OBJECT (obj, oi)
1381
    for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next)
1382
      {
1383
        ira_assert (r->start <= r->finish);
1384
        SET_BIT (born, r->start);
1385
        SET_BIT (dead, r->finish);
1386
      }
1387
 
1388
  born_or_dead = sbitmap_alloc (ira_max_point);
1389
  sbitmap_a_or_b (born_or_dead, born, dead);
1390
  map = (int *) ira_allocate (sizeof (int) * ira_max_point);
1391
  n = -1;
1392
  prev_born_p = prev_dead_p = false;
1393
  EXECUTE_IF_SET_IN_SBITMAP (born_or_dead, 0, i, sbi)
1394
    {
1395
      born_p = TEST_BIT (born, i);
1396
      dead_p = TEST_BIT (dead, i);
1397
      if ((prev_born_p && ! prev_dead_p && born_p && ! dead_p)
1398
          || (prev_dead_p && ! prev_born_p && dead_p && ! born_p))
1399
        map[i] = n;
1400
      else
1401
        map[i] = ++n;
1402
      prev_born_p = born_p;
1403
      prev_dead_p = dead_p;
1404
    }
1405
  sbitmap_free (born_or_dead);
1406
  sbitmap_free (born);
1407
  sbitmap_free (dead);
1408
  n++;
1409
  if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
1410
    fprintf (ira_dump_file, "Compressing live ranges: from %d to %d - %d%%\n",
1411
             ira_max_point, n, 100 * n / ira_max_point);
1412
  ira_max_point = n;
1413
 
1414
  FOR_EACH_OBJECT (obj, oi)
1415
    for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next)
1416
      {
1417
        r->start = map[r->start];
1418
        r->finish = map[r->finish];
1419
      }
1420
 
1421
  ira_free (map);
1422
}
1423
 
1424
/* Print live ranges R to file F.  */
1425
void
1426
ira_print_live_range_list (FILE *f, live_range_t r)
1427
{
1428
  for (; r != NULL; r = r->next)
1429
    fprintf (f, " [%d..%d]", r->start, r->finish);
1430
  fprintf (f, "\n");
1431
}
1432
 
1433
/* Print live ranges R to stderr.  */
1434
void
1435
ira_debug_live_range_list (live_range_t r)
1436
{
1437
  ira_print_live_range_list (stderr, r);
1438
}
1439
 
1440
/* Print live ranges of object OBJ to file F.  */
1441
static void
1442
print_object_live_ranges (FILE *f, ira_object_t obj)
1443
{
1444
  ira_print_live_range_list (f, OBJECT_LIVE_RANGES (obj));
1445
}
1446
 
1447
/* Print live ranges of allocno A to file F.  */
1448
static void
1449
print_allocno_live_ranges (FILE *f, ira_allocno_t a)
1450
{
1451
  int n = ALLOCNO_NUM_OBJECTS (a);
1452
  int i;
1453
 
1454
  for (i = 0; i < n; i++)
1455
    {
1456
      fprintf (f, " a%d(r%d", ALLOCNO_NUM (a), ALLOCNO_REGNO (a));
1457
      if (n > 1)
1458
        fprintf (f, " [%d]", i);
1459
      fprintf (f, "):");
1460
      print_object_live_ranges (f, ALLOCNO_OBJECT (a, i));
1461
    }
1462
}
1463
 
1464
/* Print live ranges of allocno A to stderr.  */
1465
void
1466
ira_debug_allocno_live_ranges (ira_allocno_t a)
1467
{
1468
  print_allocno_live_ranges (stderr, a);
1469
}
1470
 
1471
/* Print live ranges of all allocnos to file F.  */
1472
static void
1473
print_live_ranges (FILE *f)
1474
{
1475
  ira_allocno_t a;
1476
  ira_allocno_iterator ai;
1477
 
1478
  FOR_EACH_ALLOCNO (a, ai)
1479
    print_allocno_live_ranges (f, a);
1480
}
1481
 
1482
/* Print live ranges of all allocnos to stderr.  */
1483
void
1484
ira_debug_live_ranges (void)
1485
{
1486
  print_live_ranges (stderr);
1487
}
1488
 
1489
/* The main entry function creates live ranges, set up
1490
   CONFLICT_HARD_REGS and TOTAL_CONFLICT_HARD_REGS for objects, and
1491
   calculate register pressure info.  */
1492
void
1493
ira_create_allocno_live_ranges (void)
1494
{
1495
  objects_live = sparseset_alloc (ira_objects_num);
1496
  allocnos_processed = sparseset_alloc (ira_allocnos_num);
1497
  curr_point = 0;
1498
  last_call_num = 0;
1499
  allocno_saved_at_call
1500
    = (int *) ira_allocate (ira_allocnos_num * sizeof (int));
1501
  memset (allocno_saved_at_call, 0, ira_allocnos_num * sizeof (int));
1502
  ira_traverse_loop_tree (true, ira_loop_tree_root, NULL,
1503
                          process_bb_node_lives);
1504
  ira_max_point = curr_point;
1505
  create_start_finish_chains ();
1506
  if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
1507
    print_live_ranges (ira_dump_file);
1508
  /* Clean up.  */
1509
  ira_free (allocno_saved_at_call);
1510
  sparseset_free (objects_live);
1511
  sparseset_free (allocnos_processed);
1512
}
1513
 
1514
/* Compress allocno live ranges.  */
1515
void
1516
ira_compress_allocno_live_ranges (void)
1517
{
1518
  remove_some_program_points_and_update_live_ranges ();
1519
  ira_rebuild_start_finish_chains ();
1520
  if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
1521
    {
1522
      fprintf (ira_dump_file, "Ranges after the compression:\n");
1523
      print_live_ranges (ira_dump_file);
1524
    }
1525
}
1526
 
1527
/* Free arrays IRA_START_POINT_RANGES and IRA_FINISH_POINT_RANGES.  */
1528
void
1529
ira_finish_allocno_live_ranges (void)
1530
{
1531
  ira_free (ira_finish_point_ranges);
1532
  ira_free (ira_start_point_ranges);
1533
}

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.