OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-dev/] [or1k-gcc/] [gcc/] [cselib.c] - Blame information for rev 724

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 684 jeremybenn
/* Common subexpression elimination library for GNU compiler.
2
   Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3
   1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4
   2012 Free Software Foundation, Inc.
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify it under
9
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
12
 
13
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14
WARRANTY; without even the implied warranty of MERCHANTABILITY or
15
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16
for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
#include "config.h"
23
#include "system.h"
24
#include "coretypes.h"
25
#include "tm.h"
26
 
27
#include "rtl.h"
28
#include "tm_p.h"
29
#include "regs.h"
30
#include "hard-reg-set.h"
31
#include "flags.h"
32
#include "insn-config.h"
33
#include "recog.h"
34
#include "function.h"
35
#include "emit-rtl.h"
36
#include "diagnostic-core.h"
37
#include "output.h"
38
#include "ggc.h"
39
#include "hashtab.h"
40
#include "tree-pass.h"
41
#include "cselib.h"
42
#include "params.h"
43
#include "alloc-pool.h"
44
#include "target.h"
45
#include "bitmap.h"
46
 
47
/* A list of cselib_val structures.  */
48
struct elt_list {
49
    struct elt_list *next;
50
    cselib_val *elt;
51
};
52
 
53
static bool cselib_record_memory;
54
static bool cselib_preserve_constants;
55
static int entry_and_rtx_equal_p (const void *, const void *);
56
static hashval_t get_value_hash (const void *);
57
static struct elt_list *new_elt_list (struct elt_list *, cselib_val *);
58
static void new_elt_loc_list (cselib_val *, rtx);
59
static void unchain_one_value (cselib_val *);
60
static void unchain_one_elt_list (struct elt_list **);
61
static void unchain_one_elt_loc_list (struct elt_loc_list **);
62
static int discard_useless_locs (void **, void *);
63
static int discard_useless_values (void **, void *);
64
static void remove_useless_values (void);
65
static int rtx_equal_for_cselib_1 (rtx, rtx, enum machine_mode);
66
static unsigned int cselib_hash_rtx (rtx, int, enum machine_mode);
67
static cselib_val *new_cselib_val (unsigned int, enum machine_mode, rtx);
68
static void add_mem_for_addr (cselib_val *, cselib_val *, rtx);
69
static cselib_val *cselib_lookup_mem (rtx, int);
70
static void cselib_invalidate_regno (unsigned int, enum machine_mode);
71
static void cselib_invalidate_mem (rtx);
72
static void cselib_record_set (rtx, cselib_val *, cselib_val *);
73
static void cselib_record_sets (rtx);
74
 
75
struct expand_value_data
76
{
77
  bitmap regs_active;
78
  cselib_expand_callback callback;
79
  void *callback_arg;
80
  bool dummy;
81
};
82
 
83
static rtx cselib_expand_value_rtx_1 (rtx, struct expand_value_data *, int);
84
 
85
/* There are three ways in which cselib can look up an rtx:
86
   - for a REG, the reg_values table (which is indexed by regno) is used
87
   - for a MEM, we recursively look up its address and then follow the
88
     addr_list of that value
89
   - for everything else, we compute a hash value and go through the hash
90
     table.  Since different rtx's can still have the same hash value,
91
     this involves walking the table entries for a given value and comparing
92
     the locations of the entries with the rtx we are looking up.  */
93
 
94
/* A table that enables us to look up elts by their value.  */
95
static htab_t cselib_hash_table;
96
 
97
/* This is a global so we don't have to pass this through every function.
98
   It is used in new_elt_loc_list to set SETTING_INSN.  */
99
static rtx cselib_current_insn;
100
 
101
/* The unique id that the next create value will take.  */
102
static unsigned int next_uid;
103
 
104
/* The number of registers we had when the varrays were last resized.  */
105
static unsigned int cselib_nregs;
106
 
107
/* Count values without known locations, or with only locations that
108
   wouldn't have been known except for debug insns.  Whenever this
109
   grows too big, we remove these useless values from the table.
110
 
111
   Counting values with only debug values is a bit tricky.  We don't
112
   want to increment n_useless_values when we create a value for a
113
   debug insn, for this would get n_useless_values out of sync, but we
114
   want increment it if all locs in the list that were ever referenced
115
   in nondebug insns are removed from the list.
116
 
117
   In the general case, once we do that, we'd have to stop accepting
118
   nondebug expressions in the loc list, to avoid having two values
119
   equivalent that, without debug insns, would have been made into
120
   separate values.  However, because debug insns never introduce
121
   equivalences themselves (no assignments), the only means for
122
   growing loc lists is through nondebug assignments.  If the locs
123
   also happen to be referenced in debug insns, it will work just fine.
124
 
125
   A consequence of this is that there's at most one debug-only loc in
126
   each loc list.  If we keep it in the first entry, testing whether
127
   we have a debug-only loc list takes O(1).
128
 
129
   Furthermore, since any additional entry in a loc list containing a
130
   debug loc would have to come from an assignment (nondebug) that
131
   references both the initial debug loc and the newly-equivalent loc,
132
   the initial debug loc would be promoted to a nondebug loc, and the
133
   loc list would not contain debug locs any more.
134
 
135
   So the only case we have to be careful with in order to keep
136
   n_useless_values in sync between debug and nondebug compilations is
137
   to avoid incrementing n_useless_values when removing the single loc
138
   from a value that turns out to not appear outside debug values.  We
139
   increment n_useless_debug_values instead, and leave such values
140
   alone until, for other reasons, we garbage-collect useless
141
   values.  */
142
static int n_useless_values;
143
static int n_useless_debug_values;
144
 
145
/* Count values whose locs have been taken exclusively from debug
146
   insns for the entire life of the value.  */
147
static int n_debug_values;
148
 
149
/* Number of useless values before we remove them from the hash table.  */
150
#define MAX_USELESS_VALUES 32
151
 
152
/* This table maps from register number to values.  It does not
153
   contain pointers to cselib_val structures, but rather elt_lists.
154
   The purpose is to be able to refer to the same register in
155
   different modes.  The first element of the list defines the mode in
156
   which the register was set; if the mode is unknown or the value is
157
   no longer valid in that mode, ELT will be NULL for the first
158
   element.  */
159
static struct elt_list **reg_values;
160
static unsigned int reg_values_size;
161
#define REG_VALUES(i) reg_values[i]
162
 
163
/* The largest number of hard regs used by any entry added to the
164
   REG_VALUES table.  Cleared on each cselib_clear_table() invocation.  */
165
static unsigned int max_value_regs;
166
 
167
/* Here the set of indices I with REG_VALUES(I) != 0 is saved.  This is used
168
   in cselib_clear_table() for fast emptying.  */
169
static unsigned int *used_regs;
170
static unsigned int n_used_regs;
171
 
172
/* We pass this to cselib_invalidate_mem to invalidate all of
173
   memory for a non-const call instruction.  */
174
static GTY(()) rtx callmem;
175
 
176
/* Set by discard_useless_locs if it deleted the last location of any
177
   value.  */
178
static int values_became_useless;
179
 
180
/* Used as stop element of the containing_mem list so we can check
181
   presence in the list by checking the next pointer.  */
182
static cselib_val dummy_val;
183
 
184
/* If non-NULL, value of the eliminated arg_pointer_rtx or frame_pointer_rtx
185
   that is constant through the whole function and should never be
186
   eliminated.  */
187
static cselib_val *cfa_base_preserved_val;
188
static unsigned int cfa_base_preserved_regno = INVALID_REGNUM;
189
 
190
/* Used to list all values that contain memory reference.
191
   May or may not contain the useless values - the list is compacted
192
   each time memory is invalidated.  */
193
static cselib_val *first_containing_mem = &dummy_val;
194
static alloc_pool elt_loc_list_pool, elt_list_pool, cselib_val_pool, value_pool;
195
 
196
/* If nonnull, cselib will call this function before freeing useless
197
   VALUEs.  A VALUE is deemed useless if its "locs" field is null.  */
198
void (*cselib_discard_hook) (cselib_val *);
199
 
200
/* If nonnull, cselib will call this function before recording sets or
201
   even clobbering outputs of INSN.  All the recorded sets will be
202
   represented in the array sets[n_sets].  new_val_min can be used to
203
   tell whether values present in sets are introduced by this
204
   instruction.  */
205
void (*cselib_record_sets_hook) (rtx insn, struct cselib_set *sets,
206
                                 int n_sets);
207
 
208
#define PRESERVED_VALUE_P(RTX) \
209
  (RTL_FLAG_CHECK1("PRESERVED_VALUE_P", (RTX), VALUE)->unchanging)
210
 
211
 
212
 
213
/* Allocate a struct elt_list and fill in its two elements with the
214
   arguments.  */
215
 
216
static inline struct elt_list *
217
new_elt_list (struct elt_list *next, cselib_val *elt)
218
{
219
  struct elt_list *el;
220
  el = (struct elt_list *) pool_alloc (elt_list_pool);
221
  el->next = next;
222
  el->elt = elt;
223
  return el;
224
}
225
 
226
/* Allocate a struct elt_loc_list with LOC and prepend it to VAL's loc
227
   list.  */
228
 
229
static inline void
230
new_elt_loc_list (cselib_val *val, rtx loc)
231
{
232
  struct elt_loc_list *el, *next = val->locs;
233
 
234
  gcc_checking_assert (!next || !next->setting_insn
235
                       || !DEBUG_INSN_P (next->setting_insn)
236
                       || cselib_current_insn == next->setting_insn);
237
 
238
  /* If we're creating the first loc in a debug insn context, we've
239
     just created a debug value.  Count it.  */
240
  if (!next && cselib_current_insn && DEBUG_INSN_P (cselib_current_insn))
241
    n_debug_values++;
242
 
243
  val = canonical_cselib_val (val);
244
  next = val->locs;
245
 
246
  if (GET_CODE (loc) == VALUE)
247
    {
248
      loc = canonical_cselib_val (CSELIB_VAL_PTR (loc))->val_rtx;
249
 
250
      gcc_checking_assert (PRESERVED_VALUE_P (loc)
251
                           == PRESERVED_VALUE_P (val->val_rtx));
252
 
253
      if (val->val_rtx == loc)
254
        return;
255
      else if (val->uid > CSELIB_VAL_PTR (loc)->uid)
256
        {
257
          /* Reverse the insertion.  */
258
          new_elt_loc_list (CSELIB_VAL_PTR (loc), val->val_rtx);
259
          return;
260
        }
261
 
262
      gcc_checking_assert (val->uid < CSELIB_VAL_PTR (loc)->uid);
263
 
264
      if (CSELIB_VAL_PTR (loc)->locs)
265
        {
266
          /* Bring all locs from LOC to VAL.  */
267
          for (el = CSELIB_VAL_PTR (loc)->locs; el->next; el = el->next)
268
            {
269
              /* Adjust values that have LOC as canonical so that VAL
270
                 becomes their canonical.  */
271
              if (el->loc && GET_CODE (el->loc) == VALUE)
272
                {
273
                  gcc_checking_assert (CSELIB_VAL_PTR (el->loc)->locs->loc
274
                                       == loc);
275
                  CSELIB_VAL_PTR (el->loc)->locs->loc = val->val_rtx;
276
                }
277
            }
278
          el->next = val->locs;
279
          next = val->locs = CSELIB_VAL_PTR (loc)->locs;
280
        }
281
 
282
      if (CSELIB_VAL_PTR (loc)->addr_list)
283
        {
284
          /* Bring in addr_list into canonical node.  */
285
          struct elt_list *last = CSELIB_VAL_PTR (loc)->addr_list;
286
          while (last->next)
287
            last = last->next;
288
          last->next = val->addr_list;
289
          val->addr_list = CSELIB_VAL_PTR (loc)->addr_list;
290
          CSELIB_VAL_PTR (loc)->addr_list = NULL;
291
        }
292
 
293
      if (CSELIB_VAL_PTR (loc)->next_containing_mem != NULL
294
          && val->next_containing_mem == NULL)
295
        {
296
          /* Add VAL to the containing_mem list after LOC.  LOC will
297
             be removed when we notice it doesn't contain any
298
             MEMs.  */
299
          val->next_containing_mem = CSELIB_VAL_PTR (loc)->next_containing_mem;
300
          CSELIB_VAL_PTR (loc)->next_containing_mem = val;
301
        }
302
 
303
      /* Chain LOC back to VAL.  */
304
      el = (struct elt_loc_list *) pool_alloc (elt_loc_list_pool);
305
      el->loc = val->val_rtx;
306
      el->setting_insn = cselib_current_insn;
307
      el->next = NULL;
308
      CSELIB_VAL_PTR (loc)->locs = el;
309
    }
310
 
311
  el = (struct elt_loc_list *) pool_alloc (elt_loc_list_pool);
312
  el->loc = loc;
313
  el->setting_insn = cselib_current_insn;
314
  el->next = next;
315
  val->locs = el;
316
}
317
 
318
/* Promote loc L to a nondebug cselib_current_insn if L is marked as
319
   originating from a debug insn, maintaining the debug values
320
   count.  */
321
 
322
static inline void
323
promote_debug_loc (struct elt_loc_list *l)
324
{
325
  if (l->setting_insn && DEBUG_INSN_P (l->setting_insn)
326
      && (!cselib_current_insn || !DEBUG_INSN_P (cselib_current_insn)))
327
    {
328
      n_debug_values--;
329
      l->setting_insn = cselib_current_insn;
330
      if (cselib_preserve_constants && l->next)
331
        {
332
          gcc_assert (l->next->setting_insn
333
                      && DEBUG_INSN_P (l->next->setting_insn)
334
                      && !l->next->next);
335
          l->next->setting_insn = cselib_current_insn;
336
        }
337
      else
338
        gcc_assert (!l->next);
339
    }
340
}
341
 
342
/* The elt_list at *PL is no longer needed.  Unchain it and free its
343
   storage.  */
344
 
345
static inline void
346
unchain_one_elt_list (struct elt_list **pl)
347
{
348
  struct elt_list *l = *pl;
349
 
350
  *pl = l->next;
351
  pool_free (elt_list_pool, l);
352
}
353
 
354
/* Likewise for elt_loc_lists.  */
355
 
356
static void
357
unchain_one_elt_loc_list (struct elt_loc_list **pl)
358
{
359
  struct elt_loc_list *l = *pl;
360
 
361
  *pl = l->next;
362
  pool_free (elt_loc_list_pool, l);
363
}
364
 
365
/* Likewise for cselib_vals.  This also frees the addr_list associated with
366
   V.  */
367
 
368
static void
369
unchain_one_value (cselib_val *v)
370
{
371
  while (v->addr_list)
372
    unchain_one_elt_list (&v->addr_list);
373
 
374
  pool_free (cselib_val_pool, v);
375
}
376
 
377
/* Remove all entries from the hash table.  Also used during
378
   initialization.  */
379
 
380
void
381
cselib_clear_table (void)
382
{
383
  cselib_reset_table (1);
384
}
385
 
386
/* Return TRUE if V is a constant, a function invariant or a VALUE
387
   equivalence; FALSE otherwise.  */
388
 
389
static bool
390
invariant_or_equiv_p (cselib_val *v)
391
{
392
  struct elt_loc_list *l;
393
 
394
  if (v == cfa_base_preserved_val)
395
    return true;
396
 
397
  /* Keep VALUE equivalences around.  */
398
  for (l = v->locs; l; l = l->next)
399
    if (GET_CODE (l->loc) == VALUE)
400
      return true;
401
 
402
  if (v->locs != NULL
403
      && v->locs->next == NULL)
404
    {
405
      if (CONSTANT_P (v->locs->loc)
406
          && (GET_CODE (v->locs->loc) != CONST
407
              || !references_value_p (v->locs->loc, 0)))
408
        return true;
409
      /* Although a debug expr may be bound to different expressions,
410
         we can preserve it as if it was constant, to get unification
411
         and proper merging within var-tracking.  */
412
      if (GET_CODE (v->locs->loc) == DEBUG_EXPR
413
          || GET_CODE (v->locs->loc) == DEBUG_IMPLICIT_PTR
414
          || GET_CODE (v->locs->loc) == ENTRY_VALUE
415
          || GET_CODE (v->locs->loc) == DEBUG_PARAMETER_REF)
416
        return true;
417
 
418
      /* (plus (value V) (const_int C)) is invariant iff V is invariant.  */
419
      if (GET_CODE (v->locs->loc) == PLUS
420
          && CONST_INT_P (XEXP (v->locs->loc, 1))
421
          && GET_CODE (XEXP (v->locs->loc, 0)) == VALUE
422
          && invariant_or_equiv_p (CSELIB_VAL_PTR (XEXP (v->locs->loc, 0))))
423
        return true;
424
    }
425
 
426
  return false;
427
}
428
 
429
/* Remove from hash table all VALUEs except constants, function
430
   invariants and VALUE equivalences.  */
431
 
432
static int
433
preserve_constants_and_equivs (void **x, void *info ATTRIBUTE_UNUSED)
434
{
435
  cselib_val *v = (cselib_val *)*x;
436
 
437
  if (!invariant_or_equiv_p (v))
438
    htab_clear_slot (cselib_hash_table, x);
439
  return 1;
440
}
441
 
442
/* Remove all entries from the hash table, arranging for the next
443
   value to be numbered NUM.  */
444
 
445
void
446
cselib_reset_table (unsigned int num)
447
{
448
  unsigned int i;
449
 
450
  max_value_regs = 0;
451
 
452
  if (cfa_base_preserved_val)
453
    {
454
      unsigned int regno = cfa_base_preserved_regno;
455
      unsigned int new_used_regs = 0;
456
      for (i = 0; i < n_used_regs; i++)
457
        if (used_regs[i] == regno)
458
          {
459
            new_used_regs = 1;
460
            continue;
461
          }
462
        else
463
          REG_VALUES (used_regs[i]) = 0;
464
      gcc_assert (new_used_regs == 1);
465
      n_used_regs = new_used_regs;
466
      used_regs[0] = regno;
467
      max_value_regs
468
        = hard_regno_nregs[regno][GET_MODE (cfa_base_preserved_val->locs->loc)];
469
    }
470
  else
471
    {
472
      for (i = 0; i < n_used_regs; i++)
473
        REG_VALUES (used_regs[i]) = 0;
474
      n_used_regs = 0;
475
    }
476
 
477
  if (cselib_preserve_constants)
478
    htab_traverse (cselib_hash_table, preserve_constants_and_equivs, NULL);
479
  else
480
    htab_empty (cselib_hash_table);
481
 
482
  n_useless_values = 0;
483
  n_useless_debug_values = 0;
484
  n_debug_values = 0;
485
 
486
  next_uid = num;
487
 
488
  first_containing_mem = &dummy_val;
489
}
490
 
491
/* Return the number of the next value that will be generated.  */
492
 
493
unsigned int
494
cselib_get_next_uid (void)
495
{
496
  return next_uid;
497
}
498
 
499
/* See the documentation of cselib_find_slot below.  */
500
static enum machine_mode find_slot_memmode;
501
 
502
/* Search for X, whose hashcode is HASH, in CSELIB_HASH_TABLE,
503
   INSERTing if requested.  When X is part of the address of a MEM,
504
   MEMMODE should specify the mode of the MEM.  While searching the
505
   table, MEMMODE is held in FIND_SLOT_MEMMODE, so that autoinc RTXs
506
   in X can be resolved.  */
507
 
508
static void **
509
cselib_find_slot (rtx x, hashval_t hash, enum insert_option insert,
510
                  enum machine_mode memmode)
511
{
512
  void **slot;
513
  find_slot_memmode = memmode;
514
  slot = htab_find_slot_with_hash (cselib_hash_table, x, hash, insert);
515
  find_slot_memmode = VOIDmode;
516
  return slot;
517
}
518
 
519
/* The equality test for our hash table.  The first argument ENTRY is a table
520
   element (i.e. a cselib_val), while the second arg X is an rtx.  We know
521
   that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a
522
   CONST of an appropriate mode.  */
523
 
524
static int
525
entry_and_rtx_equal_p (const void *entry, const void *x_arg)
526
{
527
  struct elt_loc_list *l;
528
  const cselib_val *const v = (const cselib_val *) entry;
529
  rtx x = CONST_CAST_RTX ((const_rtx)x_arg);
530
  enum machine_mode mode = GET_MODE (x);
531
 
532
  gcc_assert (!CONST_INT_P (x) && GET_CODE (x) != CONST_FIXED
533
              && (mode != VOIDmode || GET_CODE (x) != CONST_DOUBLE));
534
 
535
  if (mode != GET_MODE (v->val_rtx))
536
    return 0;
537
 
538
  /* Unwrap X if necessary.  */
539
  if (GET_CODE (x) == CONST
540
      && (CONST_INT_P (XEXP (x, 0))
541
          || GET_CODE (XEXP (x, 0)) == CONST_FIXED
542
          || GET_CODE (XEXP (x, 0)) == CONST_DOUBLE))
543
    x = XEXP (x, 0);
544
 
545
  /* We don't guarantee that distinct rtx's have different hash values,
546
     so we need to do a comparison.  */
547
  for (l = v->locs; l; l = l->next)
548
    if (rtx_equal_for_cselib_1 (l->loc, x, find_slot_memmode))
549
      {
550
        promote_debug_loc (l);
551
        return 1;
552
      }
553
 
554
  return 0;
555
}
556
 
557
/* The hash function for our hash table.  The value is always computed with
558
   cselib_hash_rtx when adding an element; this function just extracts the
559
   hash value from a cselib_val structure.  */
560
 
561
static hashval_t
562
get_value_hash (const void *entry)
563
{
564
  const cselib_val *const v = (const cselib_val *) entry;
565
  return v->hash;
566
}
567
 
568
/* Return true if X contains a VALUE rtx.  If ONLY_USELESS is set, we
569
   only return true for values which point to a cselib_val whose value
570
   element has been set to zero, which implies the cselib_val will be
571
   removed.  */
572
 
573
int
574
references_value_p (const_rtx x, int only_useless)
575
{
576
  const enum rtx_code code = GET_CODE (x);
577
  const char *fmt = GET_RTX_FORMAT (code);
578
  int i, j;
579
 
580
  if (GET_CODE (x) == VALUE
581
      && (! only_useless ||
582
          (CSELIB_VAL_PTR (x)->locs == 0 && !PRESERVED_VALUE_P (x))))
583
    return 1;
584
 
585
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
586
    {
587
      if (fmt[i] == 'e' && references_value_p (XEXP (x, i), only_useless))
588
        return 1;
589
      else if (fmt[i] == 'E')
590
        for (j = 0; j < XVECLEN (x, i); j++)
591
          if (references_value_p (XVECEXP (x, i, j), only_useless))
592
            return 1;
593
    }
594
 
595
  return 0;
596
}
597
 
598
/* For all locations found in X, delete locations that reference useless
599
   values (i.e. values without any location).  Called through
600
   htab_traverse.  */
601
 
602
static int
603
discard_useless_locs (void **x, void *info ATTRIBUTE_UNUSED)
604
{
605
  cselib_val *v = (cselib_val *)*x;
606
  struct elt_loc_list **p = &v->locs;
607
  bool had_locs = v->locs != NULL;
608
  rtx setting_insn = v->locs ? v->locs->setting_insn : NULL;
609
 
610
  while (*p)
611
    {
612
      if (references_value_p ((*p)->loc, 1))
613
        unchain_one_elt_loc_list (p);
614
      else
615
        p = &(*p)->next;
616
    }
617
 
618
  if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
619
    {
620
      if (setting_insn && DEBUG_INSN_P (setting_insn))
621
        n_useless_debug_values++;
622
      else
623
        n_useless_values++;
624
      values_became_useless = 1;
625
    }
626
  return 1;
627
}
628
 
629
/* If X is a value with no locations, remove it from the hashtable.  */
630
 
631
static int
632
discard_useless_values (void **x, void *info ATTRIBUTE_UNUSED)
633
{
634
  cselib_val *v = (cselib_val *)*x;
635
 
636
  if (v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
637
    {
638
      if (cselib_discard_hook)
639
        cselib_discard_hook (v);
640
 
641
      CSELIB_VAL_PTR (v->val_rtx) = NULL;
642
      htab_clear_slot (cselib_hash_table, x);
643
      unchain_one_value (v);
644
      n_useless_values--;
645
    }
646
 
647
  return 1;
648
}
649
 
650
/* Clean out useless values (i.e. those which no longer have locations
651
   associated with them) from the hash table.  */
652
 
653
static void
654
remove_useless_values (void)
655
{
656
  cselib_val **p, *v;
657
 
658
  /* First pass: eliminate locations that reference the value.  That in
659
     turn can make more values useless.  */
660
  do
661
    {
662
      values_became_useless = 0;
663
      htab_traverse (cselib_hash_table, discard_useless_locs, 0);
664
    }
665
  while (values_became_useless);
666
 
667
  /* Second pass: actually remove the values.  */
668
 
669
  p = &first_containing_mem;
670
  for (v = *p; v != &dummy_val; v = v->next_containing_mem)
671
    if (v->locs && v == canonical_cselib_val (v))
672
      {
673
        *p = v;
674
        p = &(*p)->next_containing_mem;
675
      }
676
  *p = &dummy_val;
677
 
678
  n_useless_values += n_useless_debug_values;
679
  n_debug_values -= n_useless_debug_values;
680
  n_useless_debug_values = 0;
681
 
682
  htab_traverse (cselib_hash_table, discard_useless_values, 0);
683
 
684
  gcc_assert (!n_useless_values);
685
}
686
 
687
/* Arrange for a value to not be removed from the hash table even if
688
   it becomes useless.  */
689
 
690
void
691
cselib_preserve_value (cselib_val *v)
692
{
693
  PRESERVED_VALUE_P (v->val_rtx) = 1;
694
}
695
 
696
/* Test whether a value is preserved.  */
697
 
698
bool
699
cselib_preserved_value_p (cselib_val *v)
700
{
701
  return PRESERVED_VALUE_P (v->val_rtx);
702
}
703
 
704
/* Arrange for a REG value to be assumed constant through the whole function,
705
   never invalidated and preserved across cselib_reset_table calls.  */
706
 
707
void
708
cselib_preserve_cfa_base_value (cselib_val *v, unsigned int regno)
709
{
710
  if (cselib_preserve_constants
711
      && v->locs
712
      && REG_P (v->locs->loc))
713
    {
714
      cfa_base_preserved_val = v;
715
      cfa_base_preserved_regno = regno;
716
    }
717
}
718
 
719
/* Clean all non-constant expressions in the hash table, but retain
720
   their values.  */
721
 
722
void
723
cselib_preserve_only_values (void)
724
{
725
  int i;
726
 
727
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
728
    cselib_invalidate_regno (i, reg_raw_mode[i]);
729
 
730
  cselib_invalidate_mem (callmem);
731
 
732
  remove_useless_values ();
733
 
734
  gcc_assert (first_containing_mem == &dummy_val);
735
}
736
 
737
/* Return the mode in which a register was last set.  If X is not a
738
   register, return its mode.  If the mode in which the register was
739
   set is not known, or the value was already clobbered, return
740
   VOIDmode.  */
741
 
742
enum machine_mode
743
cselib_reg_set_mode (const_rtx x)
744
{
745
  if (!REG_P (x))
746
    return GET_MODE (x);
747
 
748
  if (REG_VALUES (REGNO (x)) == NULL
749
      || REG_VALUES (REGNO (x))->elt == NULL)
750
    return VOIDmode;
751
 
752
  return GET_MODE (REG_VALUES (REGNO (x))->elt->val_rtx);
753
}
754
 
755
/* Return nonzero if we can prove that X and Y contain the same value, taking
756
   our gathered information into account.  */
757
 
758
int
759
rtx_equal_for_cselib_p (rtx x, rtx y)
760
{
761
  return rtx_equal_for_cselib_1 (x, y, VOIDmode);
762
}
763
 
764
/* If x is a PLUS or an autoinc operation, expand the operation,
765
   storing the offset, if any, in *OFF.  */
766
 
767
static rtx
768
autoinc_split (rtx x, rtx *off, enum machine_mode memmode)
769
{
770
  switch (GET_CODE (x))
771
    {
772
    case PLUS:
773
      *off = XEXP (x, 1);
774
      return XEXP (x, 0);
775
 
776
    case PRE_DEC:
777
      if (memmode == VOIDmode)
778
        return x;
779
 
780
      *off = GEN_INT (-GET_MODE_SIZE (memmode));
781
      return XEXP (x, 0);
782
      break;
783
 
784
    case PRE_INC:
785
      if (memmode == VOIDmode)
786
        return x;
787
 
788
      *off = GEN_INT (GET_MODE_SIZE (memmode));
789
      return XEXP (x, 0);
790
 
791
    case PRE_MODIFY:
792
      return XEXP (x, 1);
793
 
794
    case POST_DEC:
795
    case POST_INC:
796
    case POST_MODIFY:
797
      return XEXP (x, 0);
798
 
799
    default:
800
      return x;
801
    }
802
}
803
 
804
/* Return nonzero if we can prove that X and Y contain the same value,
805
   taking our gathered information into account.  MEMMODE holds the
806
   mode of the enclosing MEM, if any, as required to deal with autoinc
807
   addressing modes.  If X and Y are not (known to be) part of
808
   addresses, MEMMODE should be VOIDmode.  */
809
 
810
static int
811
rtx_equal_for_cselib_1 (rtx x, rtx y, enum machine_mode memmode)
812
{
813
  enum rtx_code code;
814
  const char *fmt;
815
  int i;
816
 
817
  if (REG_P (x) || MEM_P (x))
818
    {
819
      cselib_val *e = cselib_lookup (x, GET_MODE (x), 0, memmode);
820
 
821
      if (e)
822
        x = e->val_rtx;
823
    }
824
 
825
  if (REG_P (y) || MEM_P (y))
826
    {
827
      cselib_val *e = cselib_lookup (y, GET_MODE (y), 0, memmode);
828
 
829
      if (e)
830
        y = e->val_rtx;
831
    }
832
 
833
  if (x == y)
834
    return 1;
835
 
836
  if (GET_CODE (x) == VALUE)
837
    {
838
      cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (x));
839
      struct elt_loc_list *l;
840
 
841
      if (GET_CODE (y) == VALUE)
842
        return e == canonical_cselib_val (CSELIB_VAL_PTR (y));
843
 
844
      for (l = e->locs; l; l = l->next)
845
        {
846
          rtx t = l->loc;
847
 
848
          /* Avoid infinite recursion.  We know we have the canonical
849
             value, so we can just skip any values in the equivalence
850
             list.  */
851
          if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
852
            continue;
853
          else if (rtx_equal_for_cselib_1 (t, y, memmode))
854
            return 1;
855
        }
856
 
857
      return 0;
858
    }
859
  else if (GET_CODE (y) == VALUE)
860
    {
861
      cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (y));
862
      struct elt_loc_list *l;
863
 
864
      for (l = e->locs; l; l = l->next)
865
        {
866
          rtx t = l->loc;
867
 
868
          if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
869
            continue;
870
          else if (rtx_equal_for_cselib_1 (x, t, memmode))
871
            return 1;
872
        }
873
 
874
      return 0;
875
    }
876
 
877
  if (GET_MODE (x) != GET_MODE (y))
878
    return 0;
879
 
880
  if (GET_CODE (x) != GET_CODE (y))
881
    {
882
      rtx xorig = x, yorig = y;
883
      rtx xoff = NULL, yoff = NULL;
884
 
885
      x = autoinc_split (x, &xoff, memmode);
886
      y = autoinc_split (y, &yoff, memmode);
887
 
888
      if (!xoff != !yoff)
889
        return 0;
890
 
891
      if (xoff && !rtx_equal_for_cselib_1 (xoff, yoff, memmode))
892
        return 0;
893
 
894
      /* Don't recurse if nothing changed.  */
895
      if (x != xorig || y != yorig)
896
        return rtx_equal_for_cselib_1 (x, y, memmode);
897
 
898
      return 0;
899
    }
900
 
901
  /* These won't be handled correctly by the code below.  */
902
  switch (GET_CODE (x))
903
    {
904
    case CONST_DOUBLE:
905
    case CONST_FIXED:
906
    case DEBUG_EXPR:
907
      return 0;
908
 
909
    case DEBUG_IMPLICIT_PTR:
910
      return DEBUG_IMPLICIT_PTR_DECL (x)
911
             == DEBUG_IMPLICIT_PTR_DECL (y);
912
 
913
    case DEBUG_PARAMETER_REF:
914
      return DEBUG_PARAMETER_REF_DECL (x)
915
             == DEBUG_PARAMETER_REF_DECL (y);
916
 
917
    case ENTRY_VALUE:
918
      /* ENTRY_VALUEs are function invariant, it is thus undesirable to
919
         use rtx_equal_for_cselib_1 to compare the operands.  */
920
      return rtx_equal_p (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
921
 
922
    case LABEL_REF:
923
      return XEXP (x, 0) == XEXP (y, 0);
924
 
925
    case MEM:
926
      /* We have to compare any autoinc operations in the addresses
927
         using this MEM's mode.  */
928
      return rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 0), GET_MODE (x));
929
 
930
    default:
931
      break;
932
    }
933
 
934
  code = GET_CODE (x);
935
  fmt = GET_RTX_FORMAT (code);
936
 
937
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
938
    {
939
      int j;
940
 
941
      switch (fmt[i])
942
        {
943
        case 'w':
944
          if (XWINT (x, i) != XWINT (y, i))
945
            return 0;
946
          break;
947
 
948
        case 'n':
949
        case 'i':
950
          if (XINT (x, i) != XINT (y, i))
951
            return 0;
952
          break;
953
 
954
        case 'V':
955
        case 'E':
956
          /* Two vectors must have the same length.  */
957
          if (XVECLEN (x, i) != XVECLEN (y, i))
958
            return 0;
959
 
960
          /* And the corresponding elements must match.  */
961
          for (j = 0; j < XVECLEN (x, i); j++)
962
            if (! rtx_equal_for_cselib_1 (XVECEXP (x, i, j),
963
                                          XVECEXP (y, i, j), memmode))
964
              return 0;
965
          break;
966
 
967
        case 'e':
968
          if (i == 1
969
              && targetm.commutative_p (x, UNKNOWN)
970
              && rtx_equal_for_cselib_1 (XEXP (x, 1), XEXP (y, 0), memmode)
971
              && rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 1), memmode))
972
            return 1;
973
          if (! rtx_equal_for_cselib_1 (XEXP (x, i), XEXP (y, i), memmode))
974
            return 0;
975
          break;
976
 
977
        case 'S':
978
        case 's':
979
          if (strcmp (XSTR (x, i), XSTR (y, i)))
980
            return 0;
981
          break;
982
 
983
        case 'u':
984
          /* These are just backpointers, so they don't matter.  */
985
          break;
986
 
987
        case '0':
988
        case 't':
989
          break;
990
 
991
          /* It is believed that rtx's at this level will never
992
             contain anything but integers and other rtx's,
993
             except for within LABEL_REFs and SYMBOL_REFs.  */
994
        default:
995
          gcc_unreachable ();
996
        }
997
    }
998
  return 1;
999
}
1000
 
1001
/* We need to pass down the mode of constants through the hash table
1002
   functions.  For that purpose, wrap them in a CONST of the appropriate
1003
   mode.  */
1004
static rtx
1005
wrap_constant (enum machine_mode mode, rtx x)
1006
{
1007
  if (!CONST_INT_P (x) && GET_CODE (x) != CONST_FIXED
1008
      && (GET_CODE (x) != CONST_DOUBLE || GET_MODE (x) != VOIDmode))
1009
    return x;
1010
  gcc_assert (mode != VOIDmode);
1011
  return gen_rtx_CONST (mode, x);
1012
}
1013
 
1014
/* Hash an rtx.  Return 0 if we couldn't hash the rtx.
1015
   For registers and memory locations, we look up their cselib_val structure
1016
   and return its VALUE element.
1017
   Possible reasons for return 0 are: the object is volatile, or we couldn't
1018
   find a register or memory location in the table and CREATE is zero.  If
1019
   CREATE is nonzero, table elts are created for regs and mem.
1020
   N.B. this hash function returns the same hash value for RTXes that
1021
   differ only in the order of operands, thus it is suitable for comparisons
1022
   that take commutativity into account.
1023
   If we wanted to also support associative rules, we'd have to use a different
1024
   strategy to avoid returning spurious 0, e.g. return ~(~0U >> 1) .
1025
   MEMMODE indicates the mode of an enclosing MEM, and it's only
1026
   used to compute autoinc values.
1027
   We used to have a MODE argument for hashing for CONST_INTs, but that
1028
   didn't make sense, since it caused spurious hash differences between
1029
    (set (reg:SI 1) (const_int))
1030
    (plus:SI (reg:SI 2) (reg:SI 1))
1031
   and
1032
    (plus:SI (reg:SI 2) (const_int))
1033
   If the mode is important in any context, it must be checked specifically
1034
   in a comparison anyway, since relying on hash differences is unsafe.  */
1035
 
1036
static unsigned int
1037
cselib_hash_rtx (rtx x, int create, enum machine_mode memmode)
1038
{
1039
  cselib_val *e;
1040
  int i, j;
1041
  enum rtx_code code;
1042
  const char *fmt;
1043
  unsigned int hash = 0;
1044
 
1045
  code = GET_CODE (x);
1046
  hash += (unsigned) code + (unsigned) GET_MODE (x);
1047
 
1048
  switch (code)
1049
    {
1050
    case VALUE:
1051
      e = CSELIB_VAL_PTR (x);
1052
      return e->hash;
1053
 
1054
    case MEM:
1055
    case REG:
1056
      e = cselib_lookup (x, GET_MODE (x), create, memmode);
1057
      if (! e)
1058
        return 0;
1059
 
1060
      return e->hash;
1061
 
1062
    case DEBUG_EXPR:
1063
      hash += ((unsigned) DEBUG_EXPR << 7)
1064
              + DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x));
1065
      return hash ? hash : (unsigned int) DEBUG_EXPR;
1066
 
1067
    case DEBUG_IMPLICIT_PTR:
1068
      hash += ((unsigned) DEBUG_IMPLICIT_PTR << 7)
1069
              + DECL_UID (DEBUG_IMPLICIT_PTR_DECL (x));
1070
      return hash ? hash : (unsigned int) DEBUG_IMPLICIT_PTR;
1071
 
1072
    case DEBUG_PARAMETER_REF:
1073
      hash += ((unsigned) DEBUG_PARAMETER_REF << 7)
1074
              + DECL_UID (DEBUG_PARAMETER_REF_DECL (x));
1075
      return hash ? hash : (unsigned int) DEBUG_PARAMETER_REF;
1076
 
1077
    case ENTRY_VALUE:
1078
      /* ENTRY_VALUEs are function invariant, thus try to avoid
1079
         recursing on argument if ENTRY_VALUE is one of the
1080
         forms emitted by expand_debug_expr, otherwise
1081
         ENTRY_VALUE hash would depend on the current value
1082
         in some register or memory.  */
1083
      if (REG_P (ENTRY_VALUE_EXP (x)))
1084
        hash += (unsigned int) REG
1085
                + (unsigned int) GET_MODE (ENTRY_VALUE_EXP (x))
1086
                + (unsigned int) REGNO (ENTRY_VALUE_EXP (x));
1087
      else if (MEM_P (ENTRY_VALUE_EXP (x))
1088
               && REG_P (XEXP (ENTRY_VALUE_EXP (x), 0)))
1089
        hash += (unsigned int) MEM
1090
                + (unsigned int) GET_MODE (XEXP (ENTRY_VALUE_EXP (x), 0))
1091
                + (unsigned int) REGNO (XEXP (ENTRY_VALUE_EXP (x), 0));
1092
      else
1093
        hash += cselib_hash_rtx (ENTRY_VALUE_EXP (x), create, memmode);
1094
      return hash ? hash : (unsigned int) ENTRY_VALUE;
1095
 
1096
    case CONST_INT:
1097
      hash += ((unsigned) CONST_INT << 7) + INTVAL (x);
1098
      return hash ? hash : (unsigned int) CONST_INT;
1099
 
1100
    case CONST_DOUBLE:
1101
      /* This is like the general case, except that it only counts
1102
         the integers representing the constant.  */
1103
      hash += (unsigned) code + (unsigned) GET_MODE (x);
1104
      if (GET_MODE (x) != VOIDmode)
1105
        hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
1106
      else
1107
        hash += ((unsigned) CONST_DOUBLE_LOW (x)
1108
                 + (unsigned) CONST_DOUBLE_HIGH (x));
1109
      return hash ? hash : (unsigned int) CONST_DOUBLE;
1110
 
1111
    case CONST_FIXED:
1112
      hash += (unsigned int) code + (unsigned int) GET_MODE (x);
1113
      hash += fixed_hash (CONST_FIXED_VALUE (x));
1114
      return hash ? hash : (unsigned int) CONST_FIXED;
1115
 
1116
    case CONST_VECTOR:
1117
      {
1118
        int units;
1119
        rtx elt;
1120
 
1121
        units = CONST_VECTOR_NUNITS (x);
1122
 
1123
        for (i = 0; i < units; ++i)
1124
          {
1125
            elt = CONST_VECTOR_ELT (x, i);
1126
            hash += cselib_hash_rtx (elt, 0, memmode);
1127
          }
1128
 
1129
        return hash;
1130
      }
1131
 
1132
      /* Assume there is only one rtx object for any given label.  */
1133
    case LABEL_REF:
1134
      /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1135
         differences and differences between each stage's debugging dumps.  */
1136
      hash += (((unsigned int) LABEL_REF << 7)
1137
               + CODE_LABEL_NUMBER (XEXP (x, 0)));
1138
      return hash ? hash : (unsigned int) LABEL_REF;
1139
 
1140
    case SYMBOL_REF:
1141
      {
1142
        /* Don't hash on the symbol's address to avoid bootstrap differences.
1143
           Different hash values may cause expressions to be recorded in
1144
           different orders and thus different registers to be used in the
1145
           final assembler.  This also avoids differences in the dump files
1146
           between various stages.  */
1147
        unsigned int h = 0;
1148
        const unsigned char *p = (const unsigned char *) XSTR (x, 0);
1149
 
1150
        while (*p)
1151
          h += (h << 7) + *p++; /* ??? revisit */
1152
 
1153
        hash += ((unsigned int) SYMBOL_REF << 7) + h;
1154
        return hash ? hash : (unsigned int) SYMBOL_REF;
1155
      }
1156
 
1157
    case PRE_DEC:
1158
    case PRE_INC:
1159
      /* We can't compute these without knowing the MEM mode.  */
1160
      gcc_assert (memmode != VOIDmode);
1161
      i = GET_MODE_SIZE (memmode);
1162
      if (code == PRE_DEC)
1163
        i = -i;
1164
      /* Adjust the hash so that (mem:MEMMODE (pre_* (reg))) hashes
1165
         like (mem:MEMMODE (plus (reg) (const_int I))).  */
1166
      hash += (unsigned) PLUS - (unsigned)code
1167
        + cselib_hash_rtx (XEXP (x, 0), create, memmode)
1168
        + cselib_hash_rtx (GEN_INT (i), create, memmode);
1169
      return hash ? hash : 1 + (unsigned) PLUS;
1170
 
1171
    case PRE_MODIFY:
1172
      gcc_assert (memmode != VOIDmode);
1173
      return cselib_hash_rtx (XEXP (x, 1), create, memmode);
1174
 
1175
    case POST_DEC:
1176
    case POST_INC:
1177
    case POST_MODIFY:
1178
      gcc_assert (memmode != VOIDmode);
1179
      return cselib_hash_rtx (XEXP (x, 0), create, memmode);
1180
 
1181
    case PC:
1182
    case CC0:
1183
    case CALL:
1184
    case UNSPEC_VOLATILE:
1185
      return 0;
1186
 
1187
    case ASM_OPERANDS:
1188
      if (MEM_VOLATILE_P (x))
1189
        return 0;
1190
 
1191
      break;
1192
 
1193
    default:
1194
      break;
1195
    }
1196
 
1197
  i = GET_RTX_LENGTH (code) - 1;
1198
  fmt = GET_RTX_FORMAT (code);
1199
  for (; i >= 0; i--)
1200
    {
1201
      switch (fmt[i])
1202
        {
1203
        case 'e':
1204
          {
1205
            rtx tem = XEXP (x, i);
1206
            unsigned int tem_hash = cselib_hash_rtx (tem, create, memmode);
1207
 
1208
            if (tem_hash == 0)
1209
              return 0;
1210
 
1211
            hash += tem_hash;
1212
          }
1213
          break;
1214
        case 'E':
1215
          for (j = 0; j < XVECLEN (x, i); j++)
1216
            {
1217
              unsigned int tem_hash
1218
                = cselib_hash_rtx (XVECEXP (x, i, j), create, memmode);
1219
 
1220
              if (tem_hash == 0)
1221
                return 0;
1222
 
1223
              hash += tem_hash;
1224
            }
1225
          break;
1226
 
1227
        case 's':
1228
          {
1229
            const unsigned char *p = (const unsigned char *) XSTR (x, i);
1230
 
1231
            if (p)
1232
              while (*p)
1233
                hash += *p++;
1234
            break;
1235
          }
1236
 
1237
        case 'i':
1238
          hash += XINT (x, i);
1239
          break;
1240
 
1241
        case '0':
1242
        case 't':
1243
          /* unused */
1244
          break;
1245
 
1246
        default:
1247
          gcc_unreachable ();
1248
        }
1249
    }
1250
 
1251
  return hash ? hash : 1 + (unsigned int) GET_CODE (x);
1252
}
1253
 
1254
/* Create a new value structure for VALUE and initialize it.  The mode of the
1255
   value is MODE.  */
1256
 
1257
static inline cselib_val *
1258
new_cselib_val (unsigned int hash, enum machine_mode mode, rtx x)
1259
{
1260
  cselib_val *e = (cselib_val *) pool_alloc (cselib_val_pool);
1261
 
1262
  gcc_assert (hash);
1263
  gcc_assert (next_uid);
1264
 
1265
  e->hash = hash;
1266
  e->uid = next_uid++;
1267
  /* We use an alloc pool to allocate this RTL construct because it
1268
     accounts for about 8% of the overall memory usage.  We know
1269
     precisely when we can have VALUE RTXen (when cselib is active)
1270
     so we don't need to put them in garbage collected memory.
1271
     ??? Why should a VALUE be an RTX in the first place?  */
1272
  e->val_rtx = (rtx) pool_alloc (value_pool);
1273
  memset (e->val_rtx, 0, RTX_HDR_SIZE);
1274
  PUT_CODE (e->val_rtx, VALUE);
1275
  PUT_MODE (e->val_rtx, mode);
1276
  CSELIB_VAL_PTR (e->val_rtx) = e;
1277
  e->addr_list = 0;
1278
  e->locs = 0;
1279
  e->next_containing_mem = 0;
1280
 
1281
  if (dump_file && (dump_flags & TDF_CSELIB))
1282
    {
1283
      fprintf (dump_file, "cselib value %u:%u ", e->uid, hash);
1284
      if (flag_dump_noaddr || flag_dump_unnumbered)
1285
        fputs ("# ", dump_file);
1286
      else
1287
        fprintf (dump_file, "%p ", (void*)e);
1288
      print_rtl_single (dump_file, x);
1289
      fputc ('\n', dump_file);
1290
    }
1291
 
1292
  return e;
1293
}
1294
 
1295
/* ADDR_ELT is a value that is used as address.  MEM_ELT is the value that
1296
   contains the data at this address.  X is a MEM that represents the
1297
   value.  Update the two value structures to represent this situation.  */
1298
 
1299
static void
1300
add_mem_for_addr (cselib_val *addr_elt, cselib_val *mem_elt, rtx x)
1301
{
1302
  struct elt_loc_list *l;
1303
 
1304
  addr_elt = canonical_cselib_val (addr_elt);
1305
  mem_elt = canonical_cselib_val (mem_elt);
1306
 
1307
  /* Avoid duplicates.  */
1308
  for (l = mem_elt->locs; l; l = l->next)
1309
    if (MEM_P (l->loc)
1310
        && CSELIB_VAL_PTR (XEXP (l->loc, 0)) == addr_elt)
1311
      {
1312
        promote_debug_loc (l);
1313
        return;
1314
      }
1315
 
1316
  addr_elt->addr_list = new_elt_list (addr_elt->addr_list, mem_elt);
1317
  new_elt_loc_list (mem_elt,
1318
                    replace_equiv_address_nv (x, addr_elt->val_rtx));
1319
  if (mem_elt->next_containing_mem == NULL)
1320
    {
1321
      mem_elt->next_containing_mem = first_containing_mem;
1322
      first_containing_mem = mem_elt;
1323
    }
1324
}
1325
 
1326
/* Subroutine of cselib_lookup.  Return a value for X, which is a MEM rtx.
1327
   If CREATE, make a new one if we haven't seen it before.  */
1328
 
1329
static cselib_val *
1330
cselib_lookup_mem (rtx x, int create)
1331
{
1332
  enum machine_mode mode = GET_MODE (x);
1333
  enum machine_mode addr_mode;
1334
  void **slot;
1335
  cselib_val *addr;
1336
  cselib_val *mem_elt;
1337
  struct elt_list *l;
1338
 
1339
  if (MEM_VOLATILE_P (x) || mode == BLKmode
1340
      || !cselib_record_memory
1341
      || (FLOAT_MODE_P (mode) && flag_float_store))
1342
    return 0;
1343
 
1344
  addr_mode = GET_MODE (XEXP (x, 0));
1345
  if (addr_mode == VOIDmode)
1346
    addr_mode = Pmode;
1347
 
1348
  /* Look up the value for the address.  */
1349
  addr = cselib_lookup (XEXP (x, 0), addr_mode, create, mode);
1350
  if (! addr)
1351
    return 0;
1352
 
1353
  addr = canonical_cselib_val (addr);
1354
  /* Find a value that describes a value of our mode at that address.  */
1355
  for (l = addr->addr_list; l; l = l->next)
1356
    if (GET_MODE (l->elt->val_rtx) == mode)
1357
      {
1358
        promote_debug_loc (l->elt->locs);
1359
        return l->elt;
1360
      }
1361
 
1362
  if (! create)
1363
    return 0;
1364
 
1365
  mem_elt = new_cselib_val (next_uid, mode, x);
1366
  add_mem_for_addr (addr, mem_elt, x);
1367
  slot = cselib_find_slot (wrap_constant (mode, x), mem_elt->hash,
1368
                           INSERT, mode);
1369
  *slot = mem_elt;
1370
  return mem_elt;
1371
}
1372
 
1373
/* Search thru the possible substitutions in P.  We prefer a non reg
1374
   substitution because this allows us to expand the tree further.  If
1375
   we find, just a reg, take the lowest regno.  There may be several
1376
   non-reg results, we just take the first one because they will all
1377
   expand to the same place.  */
1378
 
1379
static rtx
1380
expand_loc (struct elt_loc_list *p, struct expand_value_data *evd,
1381
            int max_depth)
1382
{
1383
  rtx reg_result = NULL;
1384
  unsigned int regno = UINT_MAX;
1385
  struct elt_loc_list *p_in = p;
1386
 
1387
  for (; p; p = p->next)
1388
    {
1389
      /* Return these right away to avoid returning stack pointer based
1390
         expressions for frame pointer and vice versa, which is something
1391
         that would confuse DSE.  See the comment in cselib_expand_value_rtx_1
1392
         for more details.  */
1393
      if (REG_P (p->loc)
1394
          && (REGNO (p->loc) == STACK_POINTER_REGNUM
1395
              || REGNO (p->loc) == FRAME_POINTER_REGNUM
1396
              || REGNO (p->loc) == HARD_FRAME_POINTER_REGNUM
1397
              || REGNO (p->loc) == cfa_base_preserved_regno))
1398
        return p->loc;
1399
      /* Avoid infinite recursion trying to expand a reg into a
1400
         the same reg.  */
1401
      if ((REG_P (p->loc))
1402
          && (REGNO (p->loc) < regno)
1403
          && !bitmap_bit_p (evd->regs_active, REGNO (p->loc)))
1404
        {
1405
          reg_result = p->loc;
1406
          regno = REGNO (p->loc);
1407
        }
1408
      /* Avoid infinite recursion and do not try to expand the
1409
         value.  */
1410
      else if (GET_CODE (p->loc) == VALUE
1411
               && CSELIB_VAL_PTR (p->loc)->locs == p_in)
1412
        continue;
1413
      else if (!REG_P (p->loc))
1414
        {
1415
          rtx result, note;
1416
          if (dump_file && (dump_flags & TDF_CSELIB))
1417
            {
1418
              print_inline_rtx (dump_file, p->loc, 0);
1419
              fprintf (dump_file, "\n");
1420
            }
1421
          if (GET_CODE (p->loc) == LO_SUM
1422
              && GET_CODE (XEXP (p->loc, 1)) == SYMBOL_REF
1423
              && p->setting_insn
1424
              && (note = find_reg_note (p->setting_insn, REG_EQUAL, NULL_RTX))
1425
              && XEXP (note, 0) == XEXP (p->loc, 1))
1426
            return XEXP (p->loc, 1);
1427
          result = cselib_expand_value_rtx_1 (p->loc, evd, max_depth - 1);
1428
          if (result)
1429
            return result;
1430
        }
1431
 
1432
    }
1433
 
1434
  if (regno != UINT_MAX)
1435
    {
1436
      rtx result;
1437
      if (dump_file && (dump_flags & TDF_CSELIB))
1438
        fprintf (dump_file, "r%d\n", regno);
1439
 
1440
      result = cselib_expand_value_rtx_1 (reg_result, evd, max_depth - 1);
1441
      if (result)
1442
        return result;
1443
    }
1444
 
1445
  if (dump_file && (dump_flags & TDF_CSELIB))
1446
    {
1447
      if (reg_result)
1448
        {
1449
          print_inline_rtx (dump_file, reg_result, 0);
1450
          fprintf (dump_file, "\n");
1451
        }
1452
      else
1453
        fprintf (dump_file, "NULL\n");
1454
    }
1455
  return reg_result;
1456
}
1457
 
1458
 
1459
/* Forward substitute and expand an expression out to its roots.
1460
   This is the opposite of common subexpression.  Because local value
1461
   numbering is such a weak optimization, the expanded expression is
1462
   pretty much unique (not from a pointer equals point of view but
1463
   from a tree shape point of view.
1464
 
1465
   This function returns NULL if the expansion fails.  The expansion
1466
   will fail if there is no value number for one of the operands or if
1467
   one of the operands has been overwritten between the current insn
1468
   and the beginning of the basic block.  For instance x has no
1469
   expansion in:
1470
 
1471
   r1 <- r1 + 3
1472
   x <- r1 + 8
1473
 
1474
   REGS_ACTIVE is a scratch bitmap that should be clear when passing in.
1475
   It is clear on return.  */
1476
 
1477
rtx
1478
cselib_expand_value_rtx (rtx orig, bitmap regs_active, int max_depth)
1479
{
1480
  struct expand_value_data evd;
1481
 
1482
  evd.regs_active = regs_active;
1483
  evd.callback = NULL;
1484
  evd.callback_arg = NULL;
1485
  evd.dummy = false;
1486
 
1487
  return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1488
}
1489
 
1490
/* Same as cselib_expand_value_rtx, but using a callback to try to
1491
   resolve some expressions.  The CB function should return ORIG if it
1492
   can't or does not want to deal with a certain RTX.  Any other
1493
   return value, including NULL, will be used as the expansion for
1494
   VALUE, without any further changes.  */
1495
 
1496
rtx
1497
cselib_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1498
                            cselib_expand_callback cb, void *data)
1499
{
1500
  struct expand_value_data evd;
1501
 
1502
  evd.regs_active = regs_active;
1503
  evd.callback = cb;
1504
  evd.callback_arg = data;
1505
  evd.dummy = false;
1506
 
1507
  return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1508
}
1509
 
1510
/* Similar to cselib_expand_value_rtx_cb, but no rtxs are actually copied
1511
   or simplified.  Useful to find out whether cselib_expand_value_rtx_cb
1512
   would return NULL or non-NULL, without allocating new rtx.  */
1513
 
1514
bool
1515
cselib_dummy_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1516
                                  cselib_expand_callback cb, void *data)
1517
{
1518
  struct expand_value_data evd;
1519
 
1520
  evd.regs_active = regs_active;
1521
  evd.callback = cb;
1522
  evd.callback_arg = data;
1523
  evd.dummy = true;
1524
 
1525
  return cselib_expand_value_rtx_1 (orig, &evd, max_depth) != NULL;
1526
}
1527
 
1528
/* Internal implementation of cselib_expand_value_rtx and
1529
   cselib_expand_value_rtx_cb.  */
1530
 
1531
static rtx
1532
cselib_expand_value_rtx_1 (rtx orig, struct expand_value_data *evd,
1533
                           int max_depth)
1534
{
1535
  rtx copy, scopy;
1536
  int i, j;
1537
  RTX_CODE code;
1538
  const char *format_ptr;
1539
  enum machine_mode mode;
1540
 
1541
  code = GET_CODE (orig);
1542
 
1543
  /* For the context of dse, if we end up expand into a huge tree, we
1544
     will not have a useful address, so we might as well just give up
1545
     quickly.  */
1546
  if (max_depth <= 0)
1547
    return NULL;
1548
 
1549
  switch (code)
1550
    {
1551
    case REG:
1552
      {
1553
        struct elt_list *l = REG_VALUES (REGNO (orig));
1554
 
1555
        if (l && l->elt == NULL)
1556
          l = l->next;
1557
        for (; l; l = l->next)
1558
          if (GET_MODE (l->elt->val_rtx) == GET_MODE (orig))
1559
            {
1560
              rtx result;
1561
              unsigned regno = REGNO (orig);
1562
 
1563
              /* The only thing that we are not willing to do (this
1564
                 is requirement of dse and if others potential uses
1565
                 need this function we should add a parm to control
1566
                 it) is that we will not substitute the
1567
                 STACK_POINTER_REGNUM, FRAME_POINTER or the
1568
                 HARD_FRAME_POINTER.
1569
 
1570
                 These expansions confuses the code that notices that
1571
                 stores into the frame go dead at the end of the
1572
                 function and that the frame is not effected by calls
1573
                 to subroutines.  If you allow the
1574
                 STACK_POINTER_REGNUM substitution, then dse will
1575
                 think that parameter pushing also goes dead which is
1576
                 wrong.  If you allow the FRAME_POINTER or the
1577
                 HARD_FRAME_POINTER then you lose the opportunity to
1578
                 make the frame assumptions.  */
1579
              if (regno == STACK_POINTER_REGNUM
1580
                  || regno == FRAME_POINTER_REGNUM
1581
                  || regno == HARD_FRAME_POINTER_REGNUM
1582
                  || regno == cfa_base_preserved_regno)
1583
                return orig;
1584
 
1585
              bitmap_set_bit (evd->regs_active, regno);
1586
 
1587
              if (dump_file && (dump_flags & TDF_CSELIB))
1588
                fprintf (dump_file, "expanding: r%d into: ", regno);
1589
 
1590
              result = expand_loc (l->elt->locs, evd, max_depth);
1591
              bitmap_clear_bit (evd->regs_active, regno);
1592
 
1593
              if (result)
1594
                return result;
1595
              else
1596
                return orig;
1597
            }
1598
      }
1599
 
1600
    case CONST_INT:
1601
    case CONST_DOUBLE:
1602
    case CONST_VECTOR:
1603
    case SYMBOL_REF:
1604
    case CODE_LABEL:
1605
    case PC:
1606
    case CC0:
1607
    case SCRATCH:
1608
      /* SCRATCH must be shared because they represent distinct values.  */
1609
      return orig;
1610
    case CLOBBER:
1611
      if (REG_P (XEXP (orig, 0)) && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0))))
1612
        return orig;
1613
      break;
1614
 
1615
    case CONST:
1616
      if (shared_const_p (orig))
1617
        return orig;
1618
      break;
1619
 
1620
    case SUBREG:
1621
      {
1622
        rtx subreg;
1623
 
1624
        if (evd->callback)
1625
          {
1626
            subreg = evd->callback (orig, evd->regs_active, max_depth,
1627
                                    evd->callback_arg);
1628
            if (subreg != orig)
1629
              return subreg;
1630
          }
1631
 
1632
        subreg = cselib_expand_value_rtx_1 (SUBREG_REG (orig), evd,
1633
                                            max_depth - 1);
1634
        if (!subreg)
1635
          return NULL;
1636
        scopy = simplify_gen_subreg (GET_MODE (orig), subreg,
1637
                                     GET_MODE (SUBREG_REG (orig)),
1638
                                     SUBREG_BYTE (orig));
1639
        if (scopy == NULL
1640
            || (GET_CODE (scopy) == SUBREG
1641
                && !REG_P (SUBREG_REG (scopy))
1642
                && !MEM_P (SUBREG_REG (scopy))))
1643
          return NULL;
1644
 
1645
        return scopy;
1646
      }
1647
 
1648
    case VALUE:
1649
      {
1650
        rtx result;
1651
 
1652
        if (dump_file && (dump_flags & TDF_CSELIB))
1653
          {
1654
            fputs ("\nexpanding ", dump_file);
1655
            print_rtl_single (dump_file, orig);
1656
            fputs (" into...", dump_file);
1657
          }
1658
 
1659
        if (evd->callback)
1660
          {
1661
            result = evd->callback (orig, evd->regs_active, max_depth,
1662
                                    evd->callback_arg);
1663
 
1664
            if (result != orig)
1665
              return result;
1666
          }
1667
 
1668
        result = expand_loc (CSELIB_VAL_PTR (orig)->locs, evd, max_depth);
1669
        return result;
1670
      }
1671
 
1672
    case DEBUG_EXPR:
1673
      if (evd->callback)
1674
        return evd->callback (orig, evd->regs_active, max_depth,
1675
                              evd->callback_arg);
1676
      return orig;
1677
 
1678
    default:
1679
      break;
1680
    }
1681
 
1682
  /* Copy the various flags, fields, and other information.  We assume
1683
     that all fields need copying, and then clear the fields that should
1684
     not be copied.  That is the sensible default behavior, and forces
1685
     us to explicitly document why we are *not* copying a flag.  */
1686
  if (evd->dummy)
1687
    copy = NULL;
1688
  else
1689
    copy = shallow_copy_rtx (orig);
1690
 
1691
  format_ptr = GET_RTX_FORMAT (code);
1692
 
1693
  for (i = 0; i < GET_RTX_LENGTH (code); i++)
1694
    switch (*format_ptr++)
1695
      {
1696
      case 'e':
1697
        if (XEXP (orig, i) != NULL)
1698
          {
1699
            rtx result = cselib_expand_value_rtx_1 (XEXP (orig, i), evd,
1700
                                                    max_depth - 1);
1701
            if (!result)
1702
              return NULL;
1703
            if (copy)
1704
              XEXP (copy, i) = result;
1705
          }
1706
        break;
1707
 
1708
      case 'E':
1709
      case 'V':
1710
        if (XVEC (orig, i) != NULL)
1711
          {
1712
            if (copy)
1713
              XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
1714
            for (j = 0; j < XVECLEN (orig, i); j++)
1715
              {
1716
                rtx result = cselib_expand_value_rtx_1 (XVECEXP (orig, i, j),
1717
                                                        evd, max_depth - 1);
1718
                if (!result)
1719
                  return NULL;
1720
                if (copy)
1721
                  XVECEXP (copy, i, j) = result;
1722
              }
1723
          }
1724
        break;
1725
 
1726
      case 't':
1727
      case 'w':
1728
      case 'i':
1729
      case 's':
1730
      case 'S':
1731
      case 'T':
1732
      case 'u':
1733
      case 'B':
1734
      case '0':
1735
        /* These are left unchanged.  */
1736
        break;
1737
 
1738
      default:
1739
        gcc_unreachable ();
1740
      }
1741
 
1742
  if (evd->dummy)
1743
    return orig;
1744
 
1745
  mode = GET_MODE (copy);
1746
  /* If an operand has been simplified into CONST_INT, which doesn't
1747
     have a mode and the mode isn't derivable from whole rtx's mode,
1748
     try simplify_*_operation first with mode from original's operand
1749
     and as a fallback wrap CONST_INT into gen_rtx_CONST.  */
1750
  scopy = copy;
1751
  switch (GET_RTX_CLASS (code))
1752
    {
1753
    case RTX_UNARY:
1754
      if (CONST_INT_P (XEXP (copy, 0))
1755
          && GET_MODE (XEXP (orig, 0)) != VOIDmode)
1756
        {
1757
          scopy = simplify_unary_operation (code, mode, XEXP (copy, 0),
1758
                                            GET_MODE (XEXP (orig, 0)));
1759
          if (scopy)
1760
            return scopy;
1761
        }
1762
      break;
1763
    case RTX_COMM_ARITH:
1764
    case RTX_BIN_ARITH:
1765
      /* These expressions can derive operand modes from the whole rtx's mode.  */
1766
      break;
1767
    case RTX_TERNARY:
1768
    case RTX_BITFIELD_OPS:
1769
      if (CONST_INT_P (XEXP (copy, 0))
1770
          && GET_MODE (XEXP (orig, 0)) != VOIDmode)
1771
        {
1772
          scopy = simplify_ternary_operation (code, mode,
1773
                                              GET_MODE (XEXP (orig, 0)),
1774
                                              XEXP (copy, 0), XEXP (copy, 1),
1775
                                              XEXP (copy, 2));
1776
          if (scopy)
1777
            return scopy;
1778
        }
1779
      break;
1780
    case RTX_COMPARE:
1781
    case RTX_COMM_COMPARE:
1782
      if (CONST_INT_P (XEXP (copy, 0))
1783
          && GET_MODE (XEXP (copy, 1)) == VOIDmode
1784
          && (GET_MODE (XEXP (orig, 0)) != VOIDmode
1785
              || GET_MODE (XEXP (orig, 1)) != VOIDmode))
1786
        {
1787
          scopy = simplify_relational_operation (code, mode,
1788
                                                 (GET_MODE (XEXP (orig, 0))
1789
                                                  != VOIDmode)
1790
                                                 ? GET_MODE (XEXP (orig, 0))
1791
                                                 : GET_MODE (XEXP (orig, 1)),
1792
                                                 XEXP (copy, 0),
1793
                                                 XEXP (copy, 1));
1794
          if (scopy)
1795
            return scopy;
1796
        }
1797
      break;
1798
    default:
1799
      break;
1800
    }
1801
  scopy = simplify_rtx (copy);
1802
  if (scopy)
1803
    return scopy;
1804
  return copy;
1805
}
1806
 
1807
/* Walk rtx X and replace all occurrences of REG and MEM subexpressions
1808
   with VALUE expressions.  This way, it becomes independent of changes
1809
   to registers and memory.
1810
   X isn't actually modified; if modifications are needed, new rtl is
1811
   allocated.  However, the return value can share rtl with X.
1812
   If X is within a MEM, MEMMODE must be the mode of the MEM.  */
1813
 
1814
rtx
1815
cselib_subst_to_values (rtx x, enum machine_mode memmode)
1816
{
1817
  enum rtx_code code = GET_CODE (x);
1818
  const char *fmt = GET_RTX_FORMAT (code);
1819
  cselib_val *e;
1820
  struct elt_list *l;
1821
  rtx copy = x;
1822
  int i;
1823
 
1824
  switch (code)
1825
    {
1826
    case REG:
1827
      l = REG_VALUES (REGNO (x));
1828
      if (l && l->elt == NULL)
1829
        l = l->next;
1830
      for (; l; l = l->next)
1831
        if (GET_MODE (l->elt->val_rtx) == GET_MODE (x))
1832
          return l->elt->val_rtx;
1833
 
1834
      gcc_unreachable ();
1835
 
1836
    case MEM:
1837
      e = cselib_lookup_mem (x, 0);
1838
      /* This used to happen for autoincrements, but we deal with them
1839
         properly now.  Remove the if stmt for the next release.  */
1840
      if (! e)
1841
        {
1842
          /* Assign a value that doesn't match any other.  */
1843
          e = new_cselib_val (next_uid, GET_MODE (x), x);
1844
        }
1845
      return e->val_rtx;
1846
 
1847
    case ENTRY_VALUE:
1848
      e = cselib_lookup (x, GET_MODE (x), 0, memmode);
1849
      if (! e)
1850
        break;
1851
      return e->val_rtx;
1852
 
1853
    case CONST_DOUBLE:
1854
    case CONST_VECTOR:
1855
    case CONST_INT:
1856
    case CONST_FIXED:
1857
      return x;
1858
 
1859
    case PRE_DEC:
1860
    case PRE_INC:
1861
      gcc_assert (memmode != VOIDmode);
1862
      i = GET_MODE_SIZE (memmode);
1863
      if (code == PRE_DEC)
1864
        i = -i;
1865
      return cselib_subst_to_values (plus_constant (XEXP (x, 0), i),
1866
                                     memmode);
1867
 
1868
    case PRE_MODIFY:
1869
      gcc_assert (memmode != VOIDmode);
1870
      return cselib_subst_to_values (XEXP (x, 1), memmode);
1871
 
1872
    case POST_DEC:
1873
    case POST_INC:
1874
    case POST_MODIFY:
1875
      gcc_assert (memmode != VOIDmode);
1876
      return cselib_subst_to_values (XEXP (x, 0), memmode);
1877
 
1878
    default:
1879
      break;
1880
    }
1881
 
1882
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1883
    {
1884
      if (fmt[i] == 'e')
1885
        {
1886
          rtx t = cselib_subst_to_values (XEXP (x, i), memmode);
1887
 
1888
          if (t != XEXP (x, i))
1889
            {
1890
              if (x == copy)
1891
                copy = shallow_copy_rtx (x);
1892
              XEXP (copy, i) = t;
1893
            }
1894
        }
1895
      else if (fmt[i] == 'E')
1896
        {
1897
          int j;
1898
 
1899
          for (j = 0; j < XVECLEN (x, i); j++)
1900
            {
1901
              rtx t = cselib_subst_to_values (XVECEXP (x, i, j), memmode);
1902
 
1903
              if (t != XVECEXP (x, i, j))
1904
                {
1905
                  if (XVEC (x, i) == XVEC (copy, i))
1906
                    {
1907
                      if (x == copy)
1908
                        copy = shallow_copy_rtx (x);
1909
                      XVEC (copy, i) = shallow_copy_rtvec (XVEC (x, i));
1910
                    }
1911
                  XVECEXP (copy, i, j) = t;
1912
                }
1913
            }
1914
        }
1915
    }
1916
 
1917
  return copy;
1918
}
1919
 
1920
/* Wrapper for cselib_subst_to_values, that indicates X is in INSN.  */
1921
 
1922
rtx
1923
cselib_subst_to_values_from_insn (rtx x, enum machine_mode memmode, rtx insn)
1924
{
1925
  rtx ret;
1926
  gcc_assert (!cselib_current_insn);
1927
  cselib_current_insn = insn;
1928
  ret = cselib_subst_to_values (x, memmode);
1929
  cselib_current_insn = NULL;
1930
  return ret;
1931
}
1932
 
1933
/* Look up the rtl expression X in our tables and return the value it
1934
   has.  If CREATE is zero, we return NULL if we don't know the value.
1935
   Otherwise, we create a new one if possible, using mode MODE if X
1936
   doesn't have a mode (i.e. because it's a constant).  When X is part
1937
   of an address, MEMMODE should be the mode of the enclosing MEM if
1938
   we're tracking autoinc expressions.  */
1939
 
1940
static cselib_val *
1941
cselib_lookup_1 (rtx x, enum machine_mode mode,
1942
                 int create, enum machine_mode memmode)
1943
{
1944
  void **slot;
1945
  cselib_val *e;
1946
  unsigned int hashval;
1947
 
1948
  if (GET_MODE (x) != VOIDmode)
1949
    mode = GET_MODE (x);
1950
 
1951
  if (GET_CODE (x) == VALUE)
1952
    return CSELIB_VAL_PTR (x);
1953
 
1954
  if (REG_P (x))
1955
    {
1956
      struct elt_list *l;
1957
      unsigned int i = REGNO (x);
1958
 
1959
      l = REG_VALUES (i);
1960
      if (l && l->elt == NULL)
1961
        l = l->next;
1962
      for (; l; l = l->next)
1963
        if (mode == GET_MODE (l->elt->val_rtx))
1964
          {
1965
            promote_debug_loc (l->elt->locs);
1966
            return l->elt;
1967
          }
1968
 
1969
      if (! create)
1970
        return 0;
1971
 
1972
      if (i < FIRST_PSEUDO_REGISTER)
1973
        {
1974
          unsigned int n = hard_regno_nregs[i][mode];
1975
 
1976
          if (n > max_value_regs)
1977
            max_value_regs = n;
1978
        }
1979
 
1980
      e = new_cselib_val (next_uid, GET_MODE (x), x);
1981
      new_elt_loc_list (e, x);
1982
      if (REG_VALUES (i) == 0)
1983
        {
1984
          /* Maintain the invariant that the first entry of
1985
             REG_VALUES, if present, must be the value used to set the
1986
             register, or NULL.  */
1987
          used_regs[n_used_regs++] = i;
1988
          REG_VALUES (i) = new_elt_list (REG_VALUES (i), NULL);
1989
        }
1990
      else if (cselib_preserve_constants
1991
               && GET_MODE_CLASS (mode) == MODE_INT)
1992
        {
1993
          /* During var-tracking, try harder to find equivalences
1994
             for SUBREGs.  If a setter sets say a DImode register
1995
             and user uses that register only in SImode, add a lowpart
1996
             subreg location.  */
1997
          struct elt_list *lwider = NULL;
1998
          l = REG_VALUES (i);
1999
          if (l && l->elt == NULL)
2000
            l = l->next;
2001
          for (; l; l = l->next)
2002
            if (GET_MODE_CLASS (GET_MODE (l->elt->val_rtx)) == MODE_INT
2003
                && GET_MODE_SIZE (GET_MODE (l->elt->val_rtx))
2004
                   > GET_MODE_SIZE (mode)
2005
                && (lwider == NULL
2006
                    || GET_MODE_SIZE (GET_MODE (l->elt->val_rtx))
2007
                       < GET_MODE_SIZE (GET_MODE (lwider->elt->val_rtx))))
2008
              {
2009
                struct elt_loc_list *el;
2010
                if (i < FIRST_PSEUDO_REGISTER
2011
                    && hard_regno_nregs[i][GET_MODE (l->elt->val_rtx)] != 1)
2012
                  continue;
2013
                for (el = l->elt->locs; el; el = el->next)
2014
                  if (!REG_P (el->loc))
2015
                    break;
2016
                if (el)
2017
                  lwider = l;
2018
              }
2019
          if (lwider)
2020
            {
2021
              rtx sub = lowpart_subreg (mode, lwider->elt->val_rtx,
2022
                                        GET_MODE (lwider->elt->val_rtx));
2023
              if (sub)
2024
                new_elt_loc_list (e, sub);
2025
            }
2026
        }
2027
      REG_VALUES (i)->next = new_elt_list (REG_VALUES (i)->next, e);
2028
      slot = cselib_find_slot (x, e->hash, INSERT, memmode);
2029
      *slot = e;
2030
      return e;
2031
    }
2032
 
2033
  if (MEM_P (x))
2034
    return cselib_lookup_mem (x, create);
2035
 
2036
  hashval = cselib_hash_rtx (x, create, memmode);
2037
  /* Can't even create if hashing is not possible.  */
2038
  if (! hashval)
2039
    return 0;
2040
 
2041
  slot = cselib_find_slot (wrap_constant (mode, x), hashval,
2042
                           create ? INSERT : NO_INSERT, memmode);
2043
  if (slot == 0)
2044
    return 0;
2045
 
2046
  e = (cselib_val *) *slot;
2047
  if (e)
2048
    return e;
2049
 
2050
  e = new_cselib_val (hashval, mode, x);
2051
 
2052
  /* We have to fill the slot before calling cselib_subst_to_values:
2053
     the hash table is inconsistent until we do so, and
2054
     cselib_subst_to_values will need to do lookups.  */
2055
  *slot = (void *) e;
2056
  new_elt_loc_list (e, cselib_subst_to_values (x, memmode));
2057
  return e;
2058
}
2059
 
2060
/* Wrapper for cselib_lookup, that indicates X is in INSN.  */
2061
 
2062
cselib_val *
2063
cselib_lookup_from_insn (rtx x, enum machine_mode mode,
2064
                         int create, enum machine_mode memmode, rtx insn)
2065
{
2066
  cselib_val *ret;
2067
 
2068
  gcc_assert (!cselib_current_insn);
2069
  cselib_current_insn = insn;
2070
 
2071
  ret = cselib_lookup (x, mode, create, memmode);
2072
 
2073
  cselib_current_insn = NULL;
2074
 
2075
  return ret;
2076
}
2077
 
2078
/* Wrapper for cselib_lookup_1, that logs the lookup result and
2079
   maintains invariants related with debug insns.  */
2080
 
2081
cselib_val *
2082
cselib_lookup (rtx x, enum machine_mode mode,
2083
               int create, enum machine_mode memmode)
2084
{
2085
  cselib_val *ret = cselib_lookup_1 (x, mode, create, memmode);
2086
 
2087
  /* ??? Should we return NULL if we're not to create an entry, the
2088
     found loc is a debug loc and cselib_current_insn is not DEBUG?
2089
     If so, we should also avoid converting val to non-DEBUG; probably
2090
     easiest setting cselib_current_insn to NULL before the call
2091
     above.  */
2092
 
2093
  if (dump_file && (dump_flags & TDF_CSELIB))
2094
    {
2095
      fputs ("cselib lookup ", dump_file);
2096
      print_inline_rtx (dump_file, x, 2);
2097
      fprintf (dump_file, " => %u:%u\n",
2098
               ret ? ret->uid : 0,
2099
               ret ? ret->hash : 0);
2100
    }
2101
 
2102
  return ret;
2103
}
2104
 
2105
/* Invalidate any entries in reg_values that overlap REGNO.  This is called
2106
   if REGNO is changing.  MODE is the mode of the assignment to REGNO, which
2107
   is used to determine how many hard registers are being changed.  If MODE
2108
   is VOIDmode, then only REGNO is being changed; this is used when
2109
   invalidating call clobbered registers across a call.  */
2110
 
2111
static void
2112
cselib_invalidate_regno (unsigned int regno, enum machine_mode mode)
2113
{
2114
  unsigned int endregno;
2115
  unsigned int i;
2116
 
2117
  /* If we see pseudos after reload, something is _wrong_.  */
2118
  gcc_assert (!reload_completed || regno < FIRST_PSEUDO_REGISTER
2119
              || reg_renumber[regno] < 0);
2120
 
2121
  /* Determine the range of registers that must be invalidated.  For
2122
     pseudos, only REGNO is affected.  For hard regs, we must take MODE
2123
     into account, and we must also invalidate lower register numbers
2124
     if they contain values that overlap REGNO.  */
2125
  if (regno < FIRST_PSEUDO_REGISTER)
2126
    {
2127
      gcc_assert (mode != VOIDmode);
2128
 
2129
      if (regno < max_value_regs)
2130
        i = 0;
2131
      else
2132
        i = regno - max_value_regs;
2133
 
2134
      endregno = end_hard_regno (mode, regno);
2135
    }
2136
  else
2137
    {
2138
      i = regno;
2139
      endregno = regno + 1;
2140
    }
2141
 
2142
  for (; i < endregno; i++)
2143
    {
2144
      struct elt_list **l = &REG_VALUES (i);
2145
 
2146
      /* Go through all known values for this reg; if it overlaps the range
2147
         we're invalidating, remove the value.  */
2148
      while (*l)
2149
        {
2150
          cselib_val *v = (*l)->elt;
2151
          bool had_locs;
2152
          rtx setting_insn;
2153
          struct elt_loc_list **p;
2154
          unsigned int this_last = i;
2155
 
2156
          if (i < FIRST_PSEUDO_REGISTER && v != NULL)
2157
            this_last = end_hard_regno (GET_MODE (v->val_rtx), i) - 1;
2158
 
2159
          if (this_last < regno || v == NULL
2160
              || (v == cfa_base_preserved_val
2161
                  && i == cfa_base_preserved_regno))
2162
            {
2163
              l = &(*l)->next;
2164
              continue;
2165
            }
2166
 
2167
          /* We have an overlap.  */
2168
          if (*l == REG_VALUES (i))
2169
            {
2170
              /* Maintain the invariant that the first entry of
2171
                 REG_VALUES, if present, must be the value used to set
2172
                 the register, or NULL.  This is also nice because
2173
                 then we won't push the same regno onto user_regs
2174
                 multiple times.  */
2175
              (*l)->elt = NULL;
2176
              l = &(*l)->next;
2177
            }
2178
          else
2179
            unchain_one_elt_list (l);
2180
 
2181
          v = canonical_cselib_val (v);
2182
 
2183
          had_locs = v->locs != NULL;
2184
          setting_insn = v->locs ? v->locs->setting_insn : NULL;
2185
 
2186
          /* Now, we clear the mapping from value to reg.  It must exist, so
2187
             this code will crash intentionally if it doesn't.  */
2188
          for (p = &v->locs; ; p = &(*p)->next)
2189
            {
2190
              rtx x = (*p)->loc;
2191
 
2192
              if (REG_P (x) && REGNO (x) == i)
2193
                {
2194
                  unchain_one_elt_loc_list (p);
2195
                  break;
2196
                }
2197
            }
2198
 
2199
          if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
2200
            {
2201
              if (setting_insn && DEBUG_INSN_P (setting_insn))
2202
                n_useless_debug_values++;
2203
              else
2204
                n_useless_values++;
2205
            }
2206
        }
2207
    }
2208
}
2209
 
2210
/* Invalidate any locations in the table which are changed because of a
2211
   store to MEM_RTX.  If this is called because of a non-const call
2212
   instruction, MEM_RTX is (mem:BLK const0_rtx).  */
2213
 
2214
static void
2215
cselib_invalidate_mem (rtx mem_rtx)
2216
{
2217
  cselib_val **vp, *v, *next;
2218
  int num_mems = 0;
2219
  rtx mem_addr;
2220
 
2221
  mem_addr = canon_rtx (get_addr (XEXP (mem_rtx, 0)));
2222
  mem_rtx = canon_rtx (mem_rtx);
2223
 
2224
  vp = &first_containing_mem;
2225
  for (v = *vp; v != &dummy_val; v = next)
2226
    {
2227
      bool has_mem = false;
2228
      struct elt_loc_list **p = &v->locs;
2229
      bool had_locs = v->locs != NULL;
2230
      rtx setting_insn = v->locs ? v->locs->setting_insn : NULL;
2231
 
2232
      while (*p)
2233
        {
2234
          rtx x = (*p)->loc;
2235
          cselib_val *addr;
2236
          struct elt_list **mem_chain;
2237
 
2238
          /* MEMs may occur in locations only at the top level; below
2239
             that every MEM or REG is substituted by its VALUE.  */
2240
          if (!MEM_P (x))
2241
            {
2242
              p = &(*p)->next;
2243
              continue;
2244
            }
2245
          if (num_mems < PARAM_VALUE (PARAM_MAX_CSELIB_MEMORY_LOCATIONS)
2246
              && ! canon_true_dependence (mem_rtx, GET_MODE (mem_rtx),
2247
                                          mem_addr, x, NULL_RTX))
2248
            {
2249
              has_mem = true;
2250
              num_mems++;
2251
              p = &(*p)->next;
2252
              continue;
2253
            }
2254
 
2255
          /* This one overlaps.  */
2256
          /* We must have a mapping from this MEM's address to the
2257
             value (E).  Remove that, too.  */
2258
          addr = cselib_lookup (XEXP (x, 0), VOIDmode, 0, GET_MODE (x));
2259
          addr = canonical_cselib_val (addr);
2260
          gcc_checking_assert (v == canonical_cselib_val (v));
2261
          mem_chain = &addr->addr_list;
2262
          for (;;)
2263
            {
2264
              cselib_val *canon = canonical_cselib_val ((*mem_chain)->elt);
2265
 
2266
              if (canon == v)
2267
                {
2268
                  unchain_one_elt_list (mem_chain);
2269
                  break;
2270
                }
2271
 
2272
              /* Record canonicalized elt.  */
2273
              (*mem_chain)->elt = canon;
2274
 
2275
              mem_chain = &(*mem_chain)->next;
2276
            }
2277
 
2278
          unchain_one_elt_loc_list (p);
2279
        }
2280
 
2281
      if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
2282
        {
2283
          if (setting_insn && DEBUG_INSN_P (setting_insn))
2284
            n_useless_debug_values++;
2285
          else
2286
            n_useless_values++;
2287
        }
2288
 
2289
      next = v->next_containing_mem;
2290
      if (has_mem)
2291
        {
2292
          *vp = v;
2293
          vp = &(*vp)->next_containing_mem;
2294
        }
2295
      else
2296
        v->next_containing_mem = NULL;
2297
    }
2298
  *vp = &dummy_val;
2299
}
2300
 
2301
/* Invalidate DEST, which is being assigned to or clobbered.  */
2302
 
2303
void
2304
cselib_invalidate_rtx (rtx dest)
2305
{
2306
  while (GET_CODE (dest) == SUBREG
2307
         || GET_CODE (dest) == ZERO_EXTRACT
2308
         || GET_CODE (dest) == STRICT_LOW_PART)
2309
    dest = XEXP (dest, 0);
2310
 
2311
  if (REG_P (dest))
2312
    cselib_invalidate_regno (REGNO (dest), GET_MODE (dest));
2313
  else if (MEM_P (dest))
2314
    cselib_invalidate_mem (dest);
2315
}
2316
 
2317
/* A wrapper for cselib_invalidate_rtx to be called via note_stores.  */
2318
 
2319
static void
2320
cselib_invalidate_rtx_note_stores (rtx dest, const_rtx ignore ATTRIBUTE_UNUSED,
2321
                                   void *data ATTRIBUTE_UNUSED)
2322
{
2323
  cselib_invalidate_rtx (dest);
2324
}
2325
 
2326
/* Record the result of a SET instruction.  DEST is being set; the source
2327
   contains the value described by SRC_ELT.  If DEST is a MEM, DEST_ADDR_ELT
2328
   describes its address.  */
2329
 
2330
static void
2331
cselib_record_set (rtx dest, cselib_val *src_elt, cselib_val *dest_addr_elt)
2332
{
2333
  int dreg = REG_P (dest) ? (int) REGNO (dest) : -1;
2334
 
2335
  if (src_elt == 0 || side_effects_p (dest))
2336
    return;
2337
 
2338
  if (dreg >= 0)
2339
    {
2340
      if (dreg < FIRST_PSEUDO_REGISTER)
2341
        {
2342
          unsigned int n = hard_regno_nregs[dreg][GET_MODE (dest)];
2343
 
2344
          if (n > max_value_regs)
2345
            max_value_regs = n;
2346
        }
2347
 
2348
      if (REG_VALUES (dreg) == 0)
2349
        {
2350
          used_regs[n_used_regs++] = dreg;
2351
          REG_VALUES (dreg) = new_elt_list (REG_VALUES (dreg), src_elt);
2352
        }
2353
      else
2354
        {
2355
          /* The register should have been invalidated.  */
2356
          gcc_assert (REG_VALUES (dreg)->elt == 0);
2357
          REG_VALUES (dreg)->elt = src_elt;
2358
        }
2359
 
2360
      if (src_elt->locs == 0 && !PRESERVED_VALUE_P (src_elt->val_rtx))
2361
        n_useless_values--;
2362
      new_elt_loc_list (src_elt, dest);
2363
    }
2364
  else if (MEM_P (dest) && dest_addr_elt != 0
2365
           && cselib_record_memory)
2366
    {
2367
      if (src_elt->locs == 0 && !PRESERVED_VALUE_P (src_elt->val_rtx))
2368
        n_useless_values--;
2369
      add_mem_for_addr (dest_addr_elt, src_elt, dest);
2370
    }
2371
}
2372
 
2373
/* Make ELT and X's VALUE equivalent to each other at INSN.  */
2374
 
2375
void
2376
cselib_add_permanent_equiv (cselib_val *elt, rtx x, rtx insn)
2377
{
2378
  cselib_val *nelt;
2379
  rtx save_cselib_current_insn = cselib_current_insn;
2380
 
2381
  gcc_checking_assert (elt);
2382
  gcc_checking_assert (PRESERVED_VALUE_P (elt->val_rtx));
2383
  gcc_checking_assert (!side_effects_p (x));
2384
 
2385
  cselib_current_insn = insn;
2386
 
2387
  nelt = cselib_lookup (x, GET_MODE (elt->val_rtx), 1, VOIDmode);
2388
 
2389
  if (nelt != elt)
2390
    {
2391
      if (!PRESERVED_VALUE_P (nelt->val_rtx))
2392
        cselib_preserve_value (nelt);
2393
 
2394
      new_elt_loc_list (nelt, elt->val_rtx);
2395
    }
2396
 
2397
  cselib_current_insn = save_cselib_current_insn;
2398
}
2399
 
2400
/* There is no good way to determine how many elements there can be
2401
   in a PARALLEL.  Since it's fairly cheap, use a really large number.  */
2402
#define MAX_SETS (FIRST_PSEUDO_REGISTER * 2)
2403
 
2404
struct cselib_record_autoinc_data
2405
{
2406
  struct cselib_set *sets;
2407
  int n_sets;
2408
};
2409
 
2410
/* Callback for for_each_inc_dec.  Records in ARG the SETs implied by
2411
   autoinc RTXs: SRC plus SRCOFF if non-NULL is stored in DEST.  */
2412
 
2413
static int
2414
cselib_record_autoinc_cb (rtx mem ATTRIBUTE_UNUSED, rtx op ATTRIBUTE_UNUSED,
2415
                          rtx dest, rtx src, rtx srcoff, void *arg)
2416
{
2417
  struct cselib_record_autoinc_data *data;
2418
  data = (struct cselib_record_autoinc_data *)arg;
2419
 
2420
  data->sets[data->n_sets].dest = dest;
2421
 
2422
  if (srcoff)
2423
    data->sets[data->n_sets].src = gen_rtx_PLUS (GET_MODE (src), src, srcoff);
2424
  else
2425
    data->sets[data->n_sets].src = src;
2426
 
2427
  data->n_sets++;
2428
 
2429
  return -1;
2430
}
2431
 
2432
/* Record the effects of any sets and autoincs in INSN.  */
2433
static void
2434
cselib_record_sets (rtx insn)
2435
{
2436
  int n_sets = 0;
2437
  int i;
2438
  struct cselib_set sets[MAX_SETS];
2439
  rtx body = PATTERN (insn);
2440
  rtx cond = 0;
2441
  int n_sets_before_autoinc;
2442
  struct cselib_record_autoinc_data data;
2443
 
2444
  body = PATTERN (insn);
2445
  if (GET_CODE (body) == COND_EXEC)
2446
    {
2447
      cond = COND_EXEC_TEST (body);
2448
      body = COND_EXEC_CODE (body);
2449
    }
2450
 
2451
  /* Find all sets.  */
2452
  if (GET_CODE (body) == SET)
2453
    {
2454
      sets[0].src = SET_SRC (body);
2455
      sets[0].dest = SET_DEST (body);
2456
      n_sets = 1;
2457
    }
2458
  else if (GET_CODE (body) == PARALLEL)
2459
    {
2460
      /* Look through the PARALLEL and record the values being
2461
         set, if possible.  Also handle any CLOBBERs.  */
2462
      for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
2463
        {
2464
          rtx x = XVECEXP (body, 0, i);
2465
 
2466
          if (GET_CODE (x) == SET)
2467
            {
2468
              sets[n_sets].src = SET_SRC (x);
2469
              sets[n_sets].dest = SET_DEST (x);
2470
              n_sets++;
2471
            }
2472
        }
2473
    }
2474
 
2475
  if (n_sets == 1
2476
      && MEM_P (sets[0].src)
2477
      && !cselib_record_memory
2478
      && MEM_READONLY_P (sets[0].src))
2479
    {
2480
      rtx note = find_reg_equal_equiv_note (insn);
2481
 
2482
      if (note && CONSTANT_P (XEXP (note, 0)))
2483
        sets[0].src = XEXP (note, 0);
2484
    }
2485
 
2486
  data.sets = sets;
2487
  data.n_sets = n_sets_before_autoinc = n_sets;
2488
  for_each_inc_dec (&insn, cselib_record_autoinc_cb, &data);
2489
  n_sets = data.n_sets;
2490
 
2491
  /* Look up the values that are read.  Do this before invalidating the
2492
     locations that are written.  */
2493
  for (i = 0; i < n_sets; i++)
2494
    {
2495
      rtx dest = sets[i].dest;
2496
 
2497
      /* A STRICT_LOW_PART can be ignored; we'll record the equivalence for
2498
         the low part after invalidating any knowledge about larger modes.  */
2499
      if (GET_CODE (sets[i].dest) == STRICT_LOW_PART)
2500
        sets[i].dest = dest = XEXP (dest, 0);
2501
 
2502
      /* We don't know how to record anything but REG or MEM.  */
2503
      if (REG_P (dest)
2504
          || (MEM_P (dest) && cselib_record_memory))
2505
        {
2506
          rtx src = sets[i].src;
2507
          if (cond)
2508
            src = gen_rtx_IF_THEN_ELSE (GET_MODE (dest), cond, src, dest);
2509
          sets[i].src_elt = cselib_lookup (src, GET_MODE (dest), 1, VOIDmode);
2510
          if (MEM_P (dest))
2511
            {
2512
              enum machine_mode address_mode
2513
                = targetm.addr_space.address_mode (MEM_ADDR_SPACE (dest));
2514
 
2515
              sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0),
2516
                                                     address_mode, 1,
2517
                                                     GET_MODE (dest));
2518
            }
2519
          else
2520
            sets[i].dest_addr_elt = 0;
2521
        }
2522
    }
2523
 
2524
  if (cselib_record_sets_hook)
2525
    cselib_record_sets_hook (insn, sets, n_sets);
2526
 
2527
  /* Invalidate all locations written by this insn.  Note that the elts we
2528
     looked up in the previous loop aren't affected, just some of their
2529
     locations may go away.  */
2530
  note_stores (body, cselib_invalidate_rtx_note_stores, NULL);
2531
 
2532
  for (i = n_sets_before_autoinc; i < n_sets; i++)
2533
    cselib_invalidate_rtx (sets[i].dest);
2534
 
2535
  /* If this is an asm, look for duplicate sets.  This can happen when the
2536
     user uses the same value as an output multiple times.  This is valid
2537
     if the outputs are not actually used thereafter.  Treat this case as
2538
     if the value isn't actually set.  We do this by smashing the destination
2539
     to pc_rtx, so that we won't record the value later.  */
2540
  if (n_sets >= 2 && asm_noperands (body) >= 0)
2541
    {
2542
      for (i = 0; i < n_sets; i++)
2543
        {
2544
          rtx dest = sets[i].dest;
2545
          if (REG_P (dest) || MEM_P (dest))
2546
            {
2547
              int j;
2548
              for (j = i + 1; j < n_sets; j++)
2549
                if (rtx_equal_p (dest, sets[j].dest))
2550
                  {
2551
                    sets[i].dest = pc_rtx;
2552
                    sets[j].dest = pc_rtx;
2553
                  }
2554
            }
2555
        }
2556
    }
2557
 
2558
  /* Now enter the equivalences in our tables.  */
2559
  for (i = 0; i < n_sets; i++)
2560
    {
2561
      rtx dest = sets[i].dest;
2562
      if (REG_P (dest)
2563
          || (MEM_P (dest) && cselib_record_memory))
2564
        cselib_record_set (dest, sets[i].src_elt, sets[i].dest_addr_elt);
2565
    }
2566
}
2567
 
2568
/* Record the effects of INSN.  */
2569
 
2570
void
2571
cselib_process_insn (rtx insn)
2572
{
2573
  int i;
2574
  rtx x;
2575
 
2576
  cselib_current_insn = insn;
2577
 
2578
  /* Forget everything at a CODE_LABEL, a volatile asm, or a setjmp.  */
2579
  if (LABEL_P (insn)
2580
      || (CALL_P (insn)
2581
          && find_reg_note (insn, REG_SETJMP, NULL))
2582
      || (NONJUMP_INSN_P (insn)
2583
          && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
2584
          && MEM_VOLATILE_P (PATTERN (insn))))
2585
    {
2586
      cselib_reset_table (next_uid);
2587
      cselib_current_insn = NULL_RTX;
2588
      return;
2589
    }
2590
 
2591
  if (! INSN_P (insn))
2592
    {
2593
      cselib_current_insn = NULL_RTX;
2594
      return;
2595
    }
2596
 
2597
  /* If this is a call instruction, forget anything stored in a
2598
     call clobbered register, or, if this is not a const call, in
2599
     memory.  */
2600
  if (CALL_P (insn))
2601
    {
2602
      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2603
        if (call_used_regs[i]
2604
            || (REG_VALUES (i) && REG_VALUES (i)->elt
2605
                && HARD_REGNO_CALL_PART_CLOBBERED (i,
2606
                      GET_MODE (REG_VALUES (i)->elt->val_rtx))))
2607
          cselib_invalidate_regno (i, reg_raw_mode[i]);
2608
 
2609
      /* Since it is not clear how cselib is going to be used, be
2610
         conservative here and treat looping pure or const functions
2611
         as if they were regular functions.  */
2612
      if (RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)
2613
          || !(RTL_CONST_OR_PURE_CALL_P (insn)))
2614
        cselib_invalidate_mem (callmem);
2615
    }
2616
 
2617
  cselib_record_sets (insn);
2618
 
2619
  /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
2620
     after we have processed the insn.  */
2621
  if (CALL_P (insn))
2622
    for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
2623
      if (GET_CODE (XEXP (x, 0)) == CLOBBER)
2624
        cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0));
2625
 
2626
  cselib_current_insn = NULL_RTX;
2627
 
2628
  if (n_useless_values > MAX_USELESS_VALUES
2629
      /* remove_useless_values is linear in the hash table size.  Avoid
2630
         quadratic behavior for very large hashtables with very few
2631
         useless elements.  */
2632
      && ((unsigned int)n_useless_values
2633
          > (cselib_hash_table->n_elements
2634
             - cselib_hash_table->n_deleted
2635
             - n_debug_values) / 4))
2636
    remove_useless_values ();
2637
}
2638
 
2639
/* Initialize cselib for one pass.  The caller must also call
2640
   init_alias_analysis.  */
2641
 
2642
void
2643
cselib_init (int record_what)
2644
{
2645
  elt_list_pool = create_alloc_pool ("elt_list",
2646
                                     sizeof (struct elt_list), 10);
2647
  elt_loc_list_pool = create_alloc_pool ("elt_loc_list",
2648
                                         sizeof (struct elt_loc_list), 10);
2649
  cselib_val_pool = create_alloc_pool ("cselib_val_list",
2650
                                       sizeof (cselib_val), 10);
2651
  value_pool = create_alloc_pool ("value", RTX_CODE_SIZE (VALUE), 100);
2652
  cselib_record_memory = record_what & CSELIB_RECORD_MEMORY;
2653
  cselib_preserve_constants = record_what & CSELIB_PRESERVE_CONSTANTS;
2654
 
2655
  /* (mem:BLK (scratch)) is a special mechanism to conflict with everything,
2656
     see canon_true_dependence.  This is only created once.  */
2657
  if (! callmem)
2658
    callmem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2659
 
2660
  cselib_nregs = max_reg_num ();
2661
 
2662
  /* We preserve reg_values to allow expensive clearing of the whole thing.
2663
     Reallocate it however if it happens to be too large.  */
2664
  if (!reg_values || reg_values_size < cselib_nregs
2665
      || (reg_values_size > 10 && reg_values_size > cselib_nregs * 4))
2666
    {
2667
      free (reg_values);
2668
      /* Some space for newly emit instructions so we don't end up
2669
         reallocating in between passes.  */
2670
      reg_values_size = cselib_nregs + (63 + cselib_nregs) / 16;
2671
      reg_values = XCNEWVEC (struct elt_list *, reg_values_size);
2672
    }
2673
  used_regs = XNEWVEC (unsigned int, cselib_nregs);
2674
  n_used_regs = 0;
2675
  cselib_hash_table = htab_create (31, get_value_hash,
2676
                                   entry_and_rtx_equal_p, NULL);
2677
  next_uid = 1;
2678
}
2679
 
2680
/* Called when the current user is done with cselib.  */
2681
 
2682
void
2683
cselib_finish (void)
2684
{
2685
  cselib_discard_hook = NULL;
2686
  cselib_preserve_constants = false;
2687
  cfa_base_preserved_val = NULL;
2688
  cfa_base_preserved_regno = INVALID_REGNUM;
2689
  free_alloc_pool (elt_list_pool);
2690
  free_alloc_pool (elt_loc_list_pool);
2691
  free_alloc_pool (cselib_val_pool);
2692
  free_alloc_pool (value_pool);
2693
  cselib_clear_table ();
2694
  htab_delete (cselib_hash_table);
2695
  free (used_regs);
2696
  used_regs = 0;
2697
  cselib_hash_table = 0;
2698
  n_useless_values = 0;
2699
  n_useless_debug_values = 0;
2700
  n_debug_values = 0;
2701
  next_uid = 0;
2702
}
2703
 
2704
/* Dump the cselib_val *X to FILE *info.  */
2705
 
2706
static int
2707
dump_cselib_val (void **x, void *info)
2708
{
2709
  cselib_val *v = (cselib_val *)*x;
2710
  FILE *out = (FILE *)info;
2711
  bool need_lf = true;
2712
 
2713
  print_inline_rtx (out, v->val_rtx, 0);
2714
 
2715
  if (v->locs)
2716
    {
2717
      struct elt_loc_list *l = v->locs;
2718
      if (need_lf)
2719
        {
2720
          fputc ('\n', out);
2721
          need_lf = false;
2722
        }
2723
      fputs (" locs:", out);
2724
      do
2725
        {
2726
          if (l->setting_insn)
2727
            fprintf (out, "\n  from insn %i ",
2728
                     INSN_UID (l->setting_insn));
2729
          else
2730
            fprintf (out, "\n   ");
2731
          print_inline_rtx (out, l->loc, 4);
2732
        }
2733
      while ((l = l->next));
2734
      fputc ('\n', out);
2735
    }
2736
  else
2737
    {
2738
      fputs (" no locs", out);
2739
      need_lf = true;
2740
    }
2741
 
2742
  if (v->addr_list)
2743
    {
2744
      struct elt_list *e = v->addr_list;
2745
      if (need_lf)
2746
        {
2747
          fputc ('\n', out);
2748
          need_lf = false;
2749
        }
2750
      fputs (" addr list:", out);
2751
      do
2752
        {
2753
          fputs ("\n  ", out);
2754
          print_inline_rtx (out, e->elt->val_rtx, 2);
2755
        }
2756
      while ((e = e->next));
2757
      fputc ('\n', out);
2758
    }
2759
  else
2760
    {
2761
      fputs (" no addrs", out);
2762
      need_lf = true;
2763
    }
2764
 
2765
  if (v->next_containing_mem == &dummy_val)
2766
    fputs (" last mem\n", out);
2767
  else if (v->next_containing_mem)
2768
    {
2769
      fputs (" next mem ", out);
2770
      print_inline_rtx (out, v->next_containing_mem->val_rtx, 2);
2771
      fputc ('\n', out);
2772
    }
2773
  else if (need_lf)
2774
    fputc ('\n', out);
2775
 
2776
  return 1;
2777
}
2778
 
2779
/* Dump to OUT everything in the CSELIB table.  */
2780
 
2781
void
2782
dump_cselib_table (FILE *out)
2783
{
2784
  fprintf (out, "cselib hash table:\n");
2785
  htab_traverse (cselib_hash_table, dump_cselib_val, out);
2786
  if (first_containing_mem != &dummy_val)
2787
    {
2788
      fputs ("first mem ", out);
2789
      print_inline_rtx (out, first_containing_mem->val_rtx, 2);
2790
      fputc ('\n', out);
2791
    }
2792
  fprintf (out, "next uid %i\n", next_uid);
2793
}
2794
 
2795
#include "gt-cselib.h"

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.