OpenCores
URL https://opencores.org/ocsvn/openrisc_me/openrisc_me/trunk

Subversion Repositories openrisc_me

[/] [openrisc/] [trunk/] [gnu-src/] [gcc-4.5.1/] [gcc/] [cselib.c] - Blame information for rev 461

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 280 jeremybenn
/* Common subexpression elimination library for GNU compiler.
2
   Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3
   1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4
   Free Software Foundation, Inc.
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify it under
9
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
12
 
13
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14
WARRANTY; without even the implied warranty of MERCHANTABILITY or
15
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16
for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
#include "config.h"
23
#include "system.h"
24
#include "coretypes.h"
25
#include "tm.h"
26
 
27
#include "rtl.h"
28
#include "tm_p.h"
29
#include "regs.h"
30
#include "hard-reg-set.h"
31
#include "flags.h"
32
#include "real.h"
33
#include "insn-config.h"
34
#include "recog.h"
35
#include "function.h"
36
#include "emit-rtl.h"
37
#include "toplev.h"
38
#include "output.h"
39
#include "ggc.h"
40
#include "hashtab.h"
41
#include "tree-pass.h"
42
#include "cselib.h"
43
#include "params.h"
44
#include "alloc-pool.h"
45
#include "target.h"
46
 
47
static bool cselib_record_memory;
48
static bool cselib_preserve_constants;
49
static int entry_and_rtx_equal_p (const void *, const void *);
50
static hashval_t get_value_hash (const void *);
51
static struct elt_list *new_elt_list (struct elt_list *, cselib_val *);
52
static struct elt_loc_list *new_elt_loc_list (struct elt_loc_list *, rtx);
53
static void unchain_one_value (cselib_val *);
54
static void unchain_one_elt_list (struct elt_list **);
55
static void unchain_one_elt_loc_list (struct elt_loc_list **);
56
static int discard_useless_locs (void **, void *);
57
static int discard_useless_values (void **, void *);
58
static void remove_useless_values (void);
59
static unsigned int cselib_hash_rtx (rtx, int);
60
static cselib_val *new_cselib_val (unsigned int, enum machine_mode, rtx);
61
static void add_mem_for_addr (cselib_val *, cselib_val *, rtx);
62
static cselib_val *cselib_lookup_mem (rtx, int);
63
static void cselib_invalidate_regno (unsigned int, enum machine_mode);
64
static void cselib_invalidate_mem (rtx);
65
static void cselib_record_set (rtx, cselib_val *, cselib_val *);
66
static void cselib_record_sets (rtx);
67
 
68
struct expand_value_data
69
{
70
  bitmap regs_active;
71
  cselib_expand_callback callback;
72
  void *callback_arg;
73
  bool dummy;
74
};
75
 
76
static rtx cselib_expand_value_rtx_1 (rtx, struct expand_value_data *, int);
77
 
78
/* There are three ways in which cselib can look up an rtx:
79
   - for a REG, the reg_values table (which is indexed by regno) is used
80
   - for a MEM, we recursively look up its address and then follow the
81
     addr_list of that value
82
   - for everything else, we compute a hash value and go through the hash
83
     table.  Since different rtx's can still have the same hash value,
84
     this involves walking the table entries for a given value and comparing
85
     the locations of the entries with the rtx we are looking up.  */
86
 
87
/* A table that enables us to look up elts by their value.  */
88
static htab_t cselib_hash_table;
89
 
90
/* This is a global so we don't have to pass this through every function.
91
   It is used in new_elt_loc_list to set SETTING_INSN.  */
92
static rtx cselib_current_insn;
93
 
94
/* The unique id that the next create value will take.  */
95
static unsigned int next_uid;
96
 
97
/* The number of registers we had when the varrays were last resized.  */
98
static unsigned int cselib_nregs;
99
 
100
/* Count values without known locations, or with only locations that
101
   wouldn't have been known except for debug insns.  Whenever this
102
   grows too big, we remove these useless values from the table.
103
 
104
   Counting values with only debug values is a bit tricky.  We don't
105
   want to increment n_useless_values when we create a value for a
106
   debug insn, for this would get n_useless_values out of sync, but we
107
   want increment it if all locs in the list that were ever referenced
108
   in nondebug insns are removed from the list.
109
 
110
   In the general case, once we do that, we'd have to stop accepting
111
   nondebug expressions in the loc list, to avoid having two values
112
   equivalent that, without debug insns, would have been made into
113
   separate values.  However, because debug insns never introduce
114
   equivalences themselves (no assignments), the only means for
115
   growing loc lists is through nondebug assignments.  If the locs
116
   also happen to be referenced in debug insns, it will work just fine.
117
 
118
   A consequence of this is that there's at most one debug-only loc in
119
   each loc list.  If we keep it in the first entry, testing whether
120
   we have a debug-only loc list takes O(1).
121
 
122
   Furthermore, since any additional entry in a loc list containing a
123
   debug loc would have to come from an assignment (nondebug) that
124
   references both the initial debug loc and the newly-equivalent loc,
125
   the initial debug loc would be promoted to a nondebug loc, and the
126
   loc list would not contain debug locs any more.
127
 
128
   So the only case we have to be careful with in order to keep
129
   n_useless_values in sync between debug and nondebug compilations is
130
   to avoid incrementing n_useless_values when removing the single loc
131
   from a value that turns out to not appear outside debug values.  We
132
   increment n_useless_debug_values instead, and leave such values
133
   alone until, for other reasons, we garbage-collect useless
134
   values.  */
135
static int n_useless_values;
136
static int n_useless_debug_values;
137
 
138
/* Count values whose locs have been taken exclusively from debug
139
   insns for the entire life of the value.  */
140
static int n_debug_values;
141
 
142
/* Number of useless values before we remove them from the hash table.  */
143
#define MAX_USELESS_VALUES 32
144
 
145
/* This table maps from register number to values.  It does not
146
   contain pointers to cselib_val structures, but rather elt_lists.
147
   The purpose is to be able to refer to the same register in
148
   different modes.  The first element of the list defines the mode in
149
   which the register was set; if the mode is unknown or the value is
150
   no longer valid in that mode, ELT will be NULL for the first
151
   element.  */
152
static struct elt_list **reg_values;
153
static unsigned int reg_values_size;
154
#define REG_VALUES(i) reg_values[i]
155
 
156
/* The largest number of hard regs used by any entry added to the
157
   REG_VALUES table.  Cleared on each cselib_clear_table() invocation.  */
158
static unsigned int max_value_regs;
159
 
160
/* Here the set of indices I with REG_VALUES(I) != 0 is saved.  This is used
161
   in cselib_clear_table() for fast emptying.  */
162
static unsigned int *used_regs;
163
static unsigned int n_used_regs;
164
 
165
/* We pass this to cselib_invalidate_mem to invalidate all of
166
   memory for a non-const call instruction.  */
167
static GTY(()) rtx callmem;
168
 
169
/* Set by discard_useless_locs if it deleted the last location of any
170
   value.  */
171
static int values_became_useless;
172
 
173
/* Used as stop element of the containing_mem list so we can check
174
   presence in the list by checking the next pointer.  */
175
static cselib_val dummy_val;
176
 
177
/* If non-NULL, value of the eliminated arg_pointer_rtx or frame_pointer_rtx
178
   that is constant through the whole function and should never be
179
   eliminated.  */
180
static cselib_val *cfa_base_preserved_val;
181
static unsigned int cfa_base_preserved_regno;
182
 
183
/* Used to list all values that contain memory reference.
184
   May or may not contain the useless values - the list is compacted
185
   each time memory is invalidated.  */
186
static cselib_val *first_containing_mem = &dummy_val;
187
static alloc_pool elt_loc_list_pool, elt_list_pool, cselib_val_pool, value_pool;
188
 
189
/* If nonnull, cselib will call this function before freeing useless
190
   VALUEs.  A VALUE is deemed useless if its "locs" field is null.  */
191
void (*cselib_discard_hook) (cselib_val *);
192
 
193
/* If nonnull, cselib will call this function before recording sets or
194
   even clobbering outputs of INSN.  All the recorded sets will be
195
   represented in the array sets[n_sets].  new_val_min can be used to
196
   tell whether values present in sets are introduced by this
197
   instruction.  */
198
void (*cselib_record_sets_hook) (rtx insn, struct cselib_set *sets,
199
                                 int n_sets);
200
 
201
#define PRESERVED_VALUE_P(RTX) \
202
  (RTL_FLAG_CHECK1("PRESERVED_VALUE_P", (RTX), VALUE)->unchanging)
203
 
204
 
205
 
206
/* Allocate a struct elt_list and fill in its two elements with the
207
   arguments.  */
208
 
209
static inline struct elt_list *
210
new_elt_list (struct elt_list *next, cselib_val *elt)
211
{
212
  struct elt_list *el;
213
  el = (struct elt_list *) pool_alloc (elt_list_pool);
214
  el->next = next;
215
  el->elt = elt;
216
  return el;
217
}
218
 
219
/* Allocate a struct elt_loc_list and fill in its two elements with the
220
   arguments.  */
221
 
222
static inline struct elt_loc_list *
223
new_elt_loc_list (struct elt_loc_list *next, rtx loc)
224
{
225
  struct elt_loc_list *el;
226
  el = (struct elt_loc_list *) pool_alloc (elt_loc_list_pool);
227
  el->next = next;
228
  el->loc = loc;
229
  el->setting_insn = cselib_current_insn;
230
  gcc_assert (!next || !next->setting_insn
231
              || !DEBUG_INSN_P (next->setting_insn));
232
 
233
  /* If we're creating the first loc in a debug insn context, we've
234
     just created a debug value.  Count it.  */
235
  if (!next && cselib_current_insn && DEBUG_INSN_P (cselib_current_insn))
236
    n_debug_values++;
237
 
238
  return el;
239
}
240
 
241
/* Promote loc L to a nondebug cselib_current_insn if L is marked as
242
   originating from a debug insn, maintaining the debug values
243
   count.  */
244
 
245
static inline void
246
promote_debug_loc (struct elt_loc_list *l)
247
{
248
  if (l->setting_insn && DEBUG_INSN_P (l->setting_insn)
249
      && (!cselib_current_insn || !DEBUG_INSN_P (cselib_current_insn)))
250
    {
251
      n_debug_values--;
252
      l->setting_insn = cselib_current_insn;
253
      gcc_assert (!l->next);
254
    }
255
}
256
 
257
/* The elt_list at *PL is no longer needed.  Unchain it and free its
258
   storage.  */
259
 
260
static inline void
261
unchain_one_elt_list (struct elt_list **pl)
262
{
263
  struct elt_list *l = *pl;
264
 
265
  *pl = l->next;
266
  pool_free (elt_list_pool, l);
267
}
268
 
269
/* Likewise for elt_loc_lists.  */
270
 
271
static void
272
unchain_one_elt_loc_list (struct elt_loc_list **pl)
273
{
274
  struct elt_loc_list *l = *pl;
275
 
276
  *pl = l->next;
277
  pool_free (elt_loc_list_pool, l);
278
}
279
 
280
/* Likewise for cselib_vals.  This also frees the addr_list associated with
281
   V.  */
282
 
283
static void
284
unchain_one_value (cselib_val *v)
285
{
286
  while (v->addr_list)
287
    unchain_one_elt_list (&v->addr_list);
288
 
289
  pool_free (cselib_val_pool, v);
290
}
291
 
292
/* Remove all entries from the hash table.  Also used during
293
   initialization.  */
294
 
295
void
296
cselib_clear_table (void)
297
{
298
  cselib_reset_table (1);
299
}
300
 
301
/* Remove from hash table all VALUEs except constants.  */
302
 
303
static int
304
preserve_only_constants (void **x, void *info ATTRIBUTE_UNUSED)
305
{
306
  cselib_val *v = (cselib_val *)*x;
307
 
308
  if (v->locs != NULL
309
      && v->locs->next == NULL)
310
    {
311
      if (CONSTANT_P (v->locs->loc)
312
          && (GET_CODE (v->locs->loc) != CONST
313
              || !references_value_p (v->locs->loc, 0)))
314
        return 1;
315
      if (cfa_base_preserved_val)
316
        {
317
          if (v == cfa_base_preserved_val)
318
            return 1;
319
          if (GET_CODE (v->locs->loc) == PLUS
320
              && CONST_INT_P (XEXP (v->locs->loc, 1))
321
              && XEXP (v->locs->loc, 0) == cfa_base_preserved_val->val_rtx)
322
            return 1;
323
        }
324
    }
325
 
326
  htab_clear_slot (cselib_hash_table, x);
327
  return 1;
328
}
329
 
330
/* Remove all entries from the hash table, arranging for the next
331
   value to be numbered NUM.  */
332
 
333
void
334
cselib_reset_table (unsigned int num)
335
{
336
  unsigned int i;
337
 
338
  max_value_regs = 0;
339
 
340
  if (cfa_base_preserved_val)
341
    {
342
      unsigned int regno = cfa_base_preserved_regno;
343
      unsigned int new_used_regs = 0;
344
      for (i = 0; i < n_used_regs; i++)
345
        if (used_regs[i] == regno)
346
          {
347
            new_used_regs = 1;
348
            continue;
349
          }
350
        else
351
          REG_VALUES (used_regs[i]) = 0;
352
      gcc_assert (new_used_regs == 1);
353
      n_used_regs = new_used_regs;
354
      used_regs[0] = regno;
355
      max_value_regs
356
        = hard_regno_nregs[regno][GET_MODE (cfa_base_preserved_val->locs->loc)];
357
    }
358
  else
359
    {
360
      for (i = 0; i < n_used_regs; i++)
361
        REG_VALUES (used_regs[i]) = 0;
362
      n_used_regs = 0;
363
    }
364
 
365
  if (cselib_preserve_constants)
366
    htab_traverse (cselib_hash_table, preserve_only_constants, NULL);
367
  else
368
    htab_empty (cselib_hash_table);
369
 
370
  n_useless_values = 0;
371
  n_useless_debug_values = 0;
372
  n_debug_values = 0;
373
 
374
  next_uid = num;
375
 
376
  first_containing_mem = &dummy_val;
377
}
378
 
379
/* Return the number of the next value that will be generated.  */
380
 
381
unsigned int
382
cselib_get_next_uid (void)
383
{
384
  return next_uid;
385
}
386
 
387
/* The equality test for our hash table.  The first argument ENTRY is a table
388
   element (i.e. a cselib_val), while the second arg X is an rtx.  We know
389
   that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a
390
   CONST of an appropriate mode.  */
391
 
392
static int
393
entry_and_rtx_equal_p (const void *entry, const void *x_arg)
394
{
395
  struct elt_loc_list *l;
396
  const cselib_val *const v = (const cselib_val *) entry;
397
  rtx x = CONST_CAST_RTX ((const_rtx)x_arg);
398
  enum machine_mode mode = GET_MODE (x);
399
 
400
  gcc_assert (!CONST_INT_P (x) && GET_CODE (x) != CONST_FIXED
401
              && (mode != VOIDmode || GET_CODE (x) != CONST_DOUBLE));
402
 
403
  if (mode != GET_MODE (v->val_rtx))
404
    return 0;
405
 
406
  /* Unwrap X if necessary.  */
407
  if (GET_CODE (x) == CONST
408
      && (CONST_INT_P (XEXP (x, 0))
409
          || GET_CODE (XEXP (x, 0)) == CONST_FIXED
410
          || GET_CODE (XEXP (x, 0)) == CONST_DOUBLE))
411
    x = XEXP (x, 0);
412
 
413
  /* We don't guarantee that distinct rtx's have different hash values,
414
     so we need to do a comparison.  */
415
  for (l = v->locs; l; l = l->next)
416
    if (rtx_equal_for_cselib_p (l->loc, x))
417
      {
418
        promote_debug_loc (l);
419
        return 1;
420
      }
421
 
422
  return 0;
423
}
424
 
425
/* The hash function for our hash table.  The value is always computed with
426
   cselib_hash_rtx when adding an element; this function just extracts the
427
   hash value from a cselib_val structure.  */
428
 
429
static hashval_t
430
get_value_hash (const void *entry)
431
{
432
  const cselib_val *const v = (const cselib_val *) entry;
433
  return v->hash;
434
}
435
 
436
/* Return true if X contains a VALUE rtx.  If ONLY_USELESS is set, we
437
   only return true for values which point to a cselib_val whose value
438
   element has been set to zero, which implies the cselib_val will be
439
   removed.  */
440
 
441
int
442
references_value_p (const_rtx x, int only_useless)
443
{
444
  const enum rtx_code code = GET_CODE (x);
445
  const char *fmt = GET_RTX_FORMAT (code);
446
  int i, j;
447
 
448
  if (GET_CODE (x) == VALUE
449
      && (! only_useless || CSELIB_VAL_PTR (x)->locs == 0))
450
    return 1;
451
 
452
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
453
    {
454
      if (fmt[i] == 'e' && references_value_p (XEXP (x, i), only_useless))
455
        return 1;
456
      else if (fmt[i] == 'E')
457
        for (j = 0; j < XVECLEN (x, i); j++)
458
          if (references_value_p (XVECEXP (x, i, j), only_useless))
459
            return 1;
460
    }
461
 
462
  return 0;
463
}
464
 
465
/* For all locations found in X, delete locations that reference useless
466
   values (i.e. values without any location).  Called through
467
   htab_traverse.  */
468
 
469
static int
470
discard_useless_locs (void **x, void *info ATTRIBUTE_UNUSED)
471
{
472
  cselib_val *v = (cselib_val *)*x;
473
  struct elt_loc_list **p = &v->locs;
474
  bool had_locs = v->locs != NULL;
475
  rtx setting_insn = v->locs ? v->locs->setting_insn : NULL;
476
 
477
  while (*p)
478
    {
479
      if (references_value_p ((*p)->loc, 1))
480
        unchain_one_elt_loc_list (p);
481
      else
482
        p = &(*p)->next;
483
    }
484
 
485
  if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
486
    {
487
      if (setting_insn && DEBUG_INSN_P (setting_insn))
488
        n_useless_debug_values++;
489
      else
490
        n_useless_values++;
491
      values_became_useless = 1;
492
    }
493
  return 1;
494
}
495
 
496
/* If X is a value with no locations, remove it from the hashtable.  */
497
 
498
static int
499
discard_useless_values (void **x, void *info ATTRIBUTE_UNUSED)
500
{
501
  cselib_val *v = (cselib_val *)*x;
502
 
503
  if (v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
504
    {
505
      if (cselib_discard_hook)
506
        cselib_discard_hook (v);
507
 
508
      CSELIB_VAL_PTR (v->val_rtx) = NULL;
509
      htab_clear_slot (cselib_hash_table, x);
510
      unchain_one_value (v);
511
      n_useless_values--;
512
    }
513
 
514
  return 1;
515
}
516
 
517
/* Clean out useless values (i.e. those which no longer have locations
518
   associated with them) from the hash table.  */
519
 
520
static void
521
remove_useless_values (void)
522
{
523
  cselib_val **p, *v;
524
 
525
  /* First pass: eliminate locations that reference the value.  That in
526
     turn can make more values useless.  */
527
  do
528
    {
529
      values_became_useless = 0;
530
      htab_traverse (cselib_hash_table, discard_useless_locs, 0);
531
    }
532
  while (values_became_useless);
533
 
534
  /* Second pass: actually remove the values.  */
535
 
536
  p = &first_containing_mem;
537
  for (v = *p; v != &dummy_val; v = v->next_containing_mem)
538
    if (v->locs)
539
      {
540
        *p = v;
541
        p = &(*p)->next_containing_mem;
542
      }
543
  *p = &dummy_val;
544
 
545
  n_useless_values += n_useless_debug_values;
546
  n_debug_values -= n_useless_debug_values;
547
  n_useless_debug_values = 0;
548
 
549
  htab_traverse (cselib_hash_table, discard_useless_values, 0);
550
 
551
  gcc_assert (!n_useless_values);
552
}
553
 
554
/* Arrange for a value to not be removed from the hash table even if
555
   it becomes useless.  */
556
 
557
void
558
cselib_preserve_value (cselib_val *v)
559
{
560
  PRESERVED_VALUE_P (v->val_rtx) = 1;
561
}
562
 
563
/* Test whether a value is preserved.  */
564
 
565
bool
566
cselib_preserved_value_p (cselib_val *v)
567
{
568
  return PRESERVED_VALUE_P (v->val_rtx);
569
}
570
 
571
/* Arrange for a REG value to be assumed constant through the whole function,
572
   never invalidated and preserved across cselib_reset_table calls.  */
573
 
574
void
575
cselib_preserve_cfa_base_value (cselib_val *v, unsigned int regno)
576
{
577
  if (cselib_preserve_constants
578
      && v->locs
579
      && REG_P (v->locs->loc))
580
    {
581
      cfa_base_preserved_val = v;
582
      cfa_base_preserved_regno = regno;
583
    }
584
}
585
 
586
/* Clean all non-constant expressions in the hash table, but retain
587
   their values.  */
588
 
589
void
590
cselib_preserve_only_values (void)
591
{
592
  int i;
593
 
594
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
595
    cselib_invalidate_regno (i, reg_raw_mode[i]);
596
 
597
  cselib_invalidate_mem (callmem);
598
 
599
  remove_useless_values ();
600
 
601
  gcc_assert (first_containing_mem == &dummy_val);
602
}
603
 
604
/* Return the mode in which a register was last set.  If X is not a
605
   register, return its mode.  If the mode in which the register was
606
   set is not known, or the value was already clobbered, return
607
   VOIDmode.  */
608
 
609
enum machine_mode
610
cselib_reg_set_mode (const_rtx x)
611
{
612
  if (!REG_P (x))
613
    return GET_MODE (x);
614
 
615
  if (REG_VALUES (REGNO (x)) == NULL
616
      || REG_VALUES (REGNO (x))->elt == NULL)
617
    return VOIDmode;
618
 
619
  return GET_MODE (REG_VALUES (REGNO (x))->elt->val_rtx);
620
}
621
 
622
/* Return nonzero if we can prove that X and Y contain the same value, taking
623
   our gathered information into account.  */
624
 
625
int
626
rtx_equal_for_cselib_p (rtx x, rtx y)
627
{
628
  enum rtx_code code;
629
  const char *fmt;
630
  int i;
631
 
632
  if (REG_P (x) || MEM_P (x))
633
    {
634
      cselib_val *e = cselib_lookup (x, GET_MODE (x), 0);
635
 
636
      if (e)
637
        x = e->val_rtx;
638
    }
639
 
640
  if (REG_P (y) || MEM_P (y))
641
    {
642
      cselib_val *e = cselib_lookup (y, GET_MODE (y), 0);
643
 
644
      if (e)
645
        y = e->val_rtx;
646
    }
647
 
648
  if (x == y)
649
    return 1;
650
 
651
  if (GET_CODE (x) == VALUE && GET_CODE (y) == VALUE)
652
    return CSELIB_VAL_PTR (x) == CSELIB_VAL_PTR (y);
653
 
654
  if (GET_CODE (x) == VALUE)
655
    {
656
      cselib_val *e = CSELIB_VAL_PTR (x);
657
      struct elt_loc_list *l;
658
 
659
      for (l = e->locs; l; l = l->next)
660
        {
661
          rtx t = l->loc;
662
 
663
          /* Avoid infinite recursion.  */
664
          if (REG_P (t) || MEM_P (t))
665
            continue;
666
          else if (rtx_equal_for_cselib_p (t, y))
667
            return 1;
668
        }
669
 
670
      return 0;
671
    }
672
 
673
  if (GET_CODE (y) == VALUE)
674
    {
675
      cselib_val *e = CSELIB_VAL_PTR (y);
676
      struct elt_loc_list *l;
677
 
678
      for (l = e->locs; l; l = l->next)
679
        {
680
          rtx t = l->loc;
681
 
682
          if (REG_P (t) || MEM_P (t))
683
            continue;
684
          else if (rtx_equal_for_cselib_p (x, t))
685
            return 1;
686
        }
687
 
688
      return 0;
689
    }
690
 
691
  if (GET_CODE (x) != GET_CODE (y) || GET_MODE (x) != GET_MODE (y))
692
    return 0;
693
 
694
  /* These won't be handled correctly by the code below.  */
695
  switch (GET_CODE (x))
696
    {
697
    case CONST_DOUBLE:
698
    case CONST_FIXED:
699
    case DEBUG_EXPR:
700
      return 0;
701
 
702
    case LABEL_REF:
703
      return XEXP (x, 0) == XEXP (y, 0);
704
 
705
    default:
706
      break;
707
    }
708
 
709
  code = GET_CODE (x);
710
  fmt = GET_RTX_FORMAT (code);
711
 
712
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
713
    {
714
      int j;
715
 
716
      switch (fmt[i])
717
        {
718
        case 'w':
719
          if (XWINT (x, i) != XWINT (y, i))
720
            return 0;
721
          break;
722
 
723
        case 'n':
724
        case 'i':
725
          if (XINT (x, i) != XINT (y, i))
726
            return 0;
727
          break;
728
 
729
        case 'V':
730
        case 'E':
731
          /* Two vectors must have the same length.  */
732
          if (XVECLEN (x, i) != XVECLEN (y, i))
733
            return 0;
734
 
735
          /* And the corresponding elements must match.  */
736
          for (j = 0; j < XVECLEN (x, i); j++)
737
            if (! rtx_equal_for_cselib_p (XVECEXP (x, i, j),
738
                                          XVECEXP (y, i, j)))
739
              return 0;
740
          break;
741
 
742
        case 'e':
743
          if (i == 1
744
              && targetm.commutative_p (x, UNKNOWN)
745
              && rtx_equal_for_cselib_p (XEXP (x, 1), XEXP (y, 0))
746
              && rtx_equal_for_cselib_p (XEXP (x, 0), XEXP (y, 1)))
747
            return 1;
748
          if (! rtx_equal_for_cselib_p (XEXP (x, i), XEXP (y, i)))
749
            return 0;
750
          break;
751
 
752
        case 'S':
753
        case 's':
754
          if (strcmp (XSTR (x, i), XSTR (y, i)))
755
            return 0;
756
          break;
757
 
758
        case 'u':
759
          /* These are just backpointers, so they don't matter.  */
760
          break;
761
 
762
        case '0':
763
        case 't':
764
          break;
765
 
766
          /* It is believed that rtx's at this level will never
767
             contain anything but integers and other rtx's,
768
             except for within LABEL_REFs and SYMBOL_REFs.  */
769
        default:
770
          gcc_unreachable ();
771
        }
772
    }
773
  return 1;
774
}
775
 
776
/* We need to pass down the mode of constants through the hash table
777
   functions.  For that purpose, wrap them in a CONST of the appropriate
778
   mode.  */
779
static rtx
780
wrap_constant (enum machine_mode mode, rtx x)
781
{
782
  if (!CONST_INT_P (x) && GET_CODE (x) != CONST_FIXED
783
      && (GET_CODE (x) != CONST_DOUBLE || GET_MODE (x) != VOIDmode))
784
    return x;
785
  gcc_assert (mode != VOIDmode);
786
  return gen_rtx_CONST (mode, x);
787
}
788
 
789
/* Hash an rtx.  Return 0 if we couldn't hash the rtx.
790
   For registers and memory locations, we look up their cselib_val structure
791
   and return its VALUE element.
792
   Possible reasons for return 0 are: the object is volatile, or we couldn't
793
   find a register or memory location in the table and CREATE is zero.  If
794
   CREATE is nonzero, table elts are created for regs and mem.
795
   N.B. this hash function returns the same hash value for RTXes that
796
   differ only in the order of operands, thus it is suitable for comparisons
797
   that take commutativity into account.
798
   If we wanted to also support associative rules, we'd have to use a different
799
   strategy to avoid returning spurious 0, e.g. return ~(~0U >> 1) .
800
   We used to have a MODE argument for hashing for CONST_INTs, but that
801
   didn't make sense, since it caused spurious hash differences between
802
    (set (reg:SI 1) (const_int))
803
    (plus:SI (reg:SI 2) (reg:SI 1))
804
   and
805
    (plus:SI (reg:SI 2) (const_int))
806
   If the mode is important in any context, it must be checked specifically
807
   in a comparison anyway, since relying on hash differences is unsafe.  */
808
 
809
static unsigned int
810
cselib_hash_rtx (rtx x, int create)
811
{
812
  cselib_val *e;
813
  int i, j;
814
  enum rtx_code code;
815
  const char *fmt;
816
  unsigned int hash = 0;
817
 
818
  code = GET_CODE (x);
819
  hash += (unsigned) code + (unsigned) GET_MODE (x);
820
 
821
  switch (code)
822
    {
823
    case MEM:
824
    case REG:
825
      e = cselib_lookup (x, GET_MODE (x), create);
826
      if (! e)
827
        return 0;
828
 
829
      return e->hash;
830
 
831
    case DEBUG_EXPR:
832
      hash += ((unsigned) DEBUG_EXPR << 7)
833
              + DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x));
834
      return hash ? hash : (unsigned int) DEBUG_EXPR;
835
 
836
    case CONST_INT:
837
      hash += ((unsigned) CONST_INT << 7) + INTVAL (x);
838
      return hash ? hash : (unsigned int) CONST_INT;
839
 
840
    case CONST_DOUBLE:
841
      /* This is like the general case, except that it only counts
842
         the integers representing the constant.  */
843
      hash += (unsigned) code + (unsigned) GET_MODE (x);
844
      if (GET_MODE (x) != VOIDmode)
845
        hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
846
      else
847
        hash += ((unsigned) CONST_DOUBLE_LOW (x)
848
                 + (unsigned) CONST_DOUBLE_HIGH (x));
849
      return hash ? hash : (unsigned int) CONST_DOUBLE;
850
 
851
    case CONST_FIXED:
852
      hash += (unsigned int) code + (unsigned int) GET_MODE (x);
853
      hash += fixed_hash (CONST_FIXED_VALUE (x));
854
      return hash ? hash : (unsigned int) CONST_FIXED;
855
 
856
    case CONST_VECTOR:
857
      {
858
        int units;
859
        rtx elt;
860
 
861
        units = CONST_VECTOR_NUNITS (x);
862
 
863
        for (i = 0; i < units; ++i)
864
          {
865
            elt = CONST_VECTOR_ELT (x, i);
866
            hash += cselib_hash_rtx (elt, 0);
867
          }
868
 
869
        return hash;
870
      }
871
 
872
      /* Assume there is only one rtx object for any given label.  */
873
    case LABEL_REF:
874
      /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
875
         differences and differences between each stage's debugging dumps.  */
876
      hash += (((unsigned int) LABEL_REF << 7)
877
               + CODE_LABEL_NUMBER (XEXP (x, 0)));
878
      return hash ? hash : (unsigned int) LABEL_REF;
879
 
880
    case SYMBOL_REF:
881
      {
882
        /* Don't hash on the symbol's address to avoid bootstrap differences.
883
           Different hash values may cause expressions to be recorded in
884
           different orders and thus different registers to be used in the
885
           final assembler.  This also avoids differences in the dump files
886
           between various stages.  */
887
        unsigned int h = 0;
888
        const unsigned char *p = (const unsigned char *) XSTR (x, 0);
889
 
890
        while (*p)
891
          h += (h << 7) + *p++; /* ??? revisit */
892
 
893
        hash += ((unsigned int) SYMBOL_REF << 7) + h;
894
        return hash ? hash : (unsigned int) SYMBOL_REF;
895
      }
896
 
897
    case PRE_DEC:
898
    case PRE_INC:
899
    case POST_DEC:
900
    case POST_INC:
901
    case POST_MODIFY:
902
    case PRE_MODIFY:
903
    case PC:
904
    case CC0:
905
    case CALL:
906
    case UNSPEC_VOLATILE:
907
      return 0;
908
 
909
    case ASM_OPERANDS:
910
      if (MEM_VOLATILE_P (x))
911
        return 0;
912
 
913
      break;
914
 
915
    default:
916
      break;
917
    }
918
 
919
  i = GET_RTX_LENGTH (code) - 1;
920
  fmt = GET_RTX_FORMAT (code);
921
  for (; i >= 0; i--)
922
    {
923
      switch (fmt[i])
924
        {
925
        case 'e':
926
          {
927
            rtx tem = XEXP (x, i);
928
            unsigned int tem_hash = cselib_hash_rtx (tem, create);
929
 
930
            if (tem_hash == 0)
931
              return 0;
932
 
933
            hash += tem_hash;
934
          }
935
          break;
936
        case 'E':
937
          for (j = 0; j < XVECLEN (x, i); j++)
938
            {
939
              unsigned int tem_hash
940
                = cselib_hash_rtx (XVECEXP (x, i, j), create);
941
 
942
              if (tem_hash == 0)
943
                return 0;
944
 
945
              hash += tem_hash;
946
            }
947
          break;
948
 
949
        case 's':
950
          {
951
            const unsigned char *p = (const unsigned char *) XSTR (x, i);
952
 
953
            if (p)
954
              while (*p)
955
                hash += *p++;
956
            break;
957
          }
958
 
959
        case 'i':
960
          hash += XINT (x, i);
961
          break;
962
 
963
        case '0':
964
        case 't':
965
          /* unused */
966
          break;
967
 
968
        default:
969
          gcc_unreachable ();
970
        }
971
    }
972
 
973
  return hash ? hash : 1 + (unsigned int) GET_CODE (x);
974
}
975
 
976
/* Create a new value structure for VALUE and initialize it.  The mode of the
977
   value is MODE.  */
978
 
979
static inline cselib_val *
980
new_cselib_val (unsigned int hash, enum machine_mode mode, rtx x)
981
{
982
  cselib_val *e = (cselib_val *) pool_alloc (cselib_val_pool);
983
 
984
  gcc_assert (hash);
985
  gcc_assert (next_uid);
986
 
987
  e->hash = hash;
988
  e->uid = next_uid++;
989
  /* We use an alloc pool to allocate this RTL construct because it
990
     accounts for about 8% of the overall memory usage.  We know
991
     precisely when we can have VALUE RTXen (when cselib is active)
992
     so we don't need to put them in garbage collected memory.
993
     ??? Why should a VALUE be an RTX in the first place?  */
994
  e->val_rtx = (rtx) pool_alloc (value_pool);
995
  memset (e->val_rtx, 0, RTX_HDR_SIZE);
996
  PUT_CODE (e->val_rtx, VALUE);
997
  PUT_MODE (e->val_rtx, mode);
998
  CSELIB_VAL_PTR (e->val_rtx) = e;
999
  e->addr_list = 0;
1000
  e->locs = 0;
1001
  e->next_containing_mem = 0;
1002
 
1003
  if (dump_file && (dump_flags & TDF_DETAILS))
1004
    {
1005
      fprintf (dump_file, "cselib value %u:%u ", e->uid, hash);
1006
      if (flag_dump_noaddr || flag_dump_unnumbered)
1007
        fputs ("# ", dump_file);
1008
      else
1009
        fprintf (dump_file, "%p ", (void*)e);
1010
      print_rtl_single (dump_file, x);
1011
      fputc ('\n', dump_file);
1012
    }
1013
 
1014
  return e;
1015
}
1016
 
1017
/* ADDR_ELT is a value that is used as address.  MEM_ELT is the value that
1018
   contains the data at this address.  X is a MEM that represents the
1019
   value.  Update the two value structures to represent this situation.  */
1020
 
1021
static void
1022
add_mem_for_addr (cselib_val *addr_elt, cselib_val *mem_elt, rtx x)
1023
{
1024
  struct elt_loc_list *l;
1025
 
1026
  /* Avoid duplicates.  */
1027
  for (l = mem_elt->locs; l; l = l->next)
1028
    if (MEM_P (l->loc)
1029
        && CSELIB_VAL_PTR (XEXP (l->loc, 0)) == addr_elt)
1030
      {
1031
        promote_debug_loc (l);
1032
        return;
1033
      }
1034
 
1035
  addr_elt->addr_list = new_elt_list (addr_elt->addr_list, mem_elt);
1036
  mem_elt->locs
1037
    = new_elt_loc_list (mem_elt->locs,
1038
                        replace_equiv_address_nv (x, addr_elt->val_rtx));
1039
  if (mem_elt->next_containing_mem == NULL)
1040
    {
1041
      mem_elt->next_containing_mem = first_containing_mem;
1042
      first_containing_mem = mem_elt;
1043
    }
1044
}
1045
 
1046
/* Subroutine of cselib_lookup.  Return a value for X, which is a MEM rtx.
1047
   If CREATE, make a new one if we haven't seen it before.  */
1048
 
1049
static cselib_val *
1050
cselib_lookup_mem (rtx x, int create)
1051
{
1052
  enum machine_mode mode = GET_MODE (x);
1053
  void **slot;
1054
  cselib_val *addr;
1055
  cselib_val *mem_elt;
1056
  struct elt_list *l;
1057
 
1058
  if (MEM_VOLATILE_P (x) || mode == BLKmode
1059
      || !cselib_record_memory
1060
      || (FLOAT_MODE_P (mode) && flag_float_store))
1061
    return 0;
1062
 
1063
  /* Look up the value for the address.  */
1064
  addr = cselib_lookup (XEXP (x, 0), mode, create);
1065
  if (! addr)
1066
    return 0;
1067
 
1068
  /* Find a value that describes a value of our mode at that address.  */
1069
  for (l = addr->addr_list; l; l = l->next)
1070
    if (GET_MODE (l->elt->val_rtx) == mode)
1071
      {
1072
        promote_debug_loc (l->elt->locs);
1073
        return l->elt;
1074
      }
1075
 
1076
  if (! create)
1077
    return 0;
1078
 
1079
  mem_elt = new_cselib_val (next_uid, mode, x);
1080
  add_mem_for_addr (addr, mem_elt, x);
1081
  slot = htab_find_slot_with_hash (cselib_hash_table, wrap_constant (mode, x),
1082
                                   mem_elt->hash, INSERT);
1083
  *slot = mem_elt;
1084
  return mem_elt;
1085
}
1086
 
1087
/* Search thru the possible substitutions in P.  We prefer a non reg
1088
   substitution because this allows us to expand the tree further.  If
1089
   we find, just a reg, take the lowest regno.  There may be several
1090
   non-reg results, we just take the first one because they will all
1091
   expand to the same place.  */
1092
 
1093
static rtx
1094
expand_loc (struct elt_loc_list *p, struct expand_value_data *evd,
1095
            int max_depth)
1096
{
1097
  rtx reg_result = NULL;
1098
  unsigned int regno = UINT_MAX;
1099
  struct elt_loc_list *p_in = p;
1100
 
1101
  for (; p; p = p -> next)
1102
    {
1103
      /* Avoid infinite recursion trying to expand a reg into a
1104
         the same reg.  */
1105
      if ((REG_P (p->loc))
1106
          && (REGNO (p->loc) < regno)
1107
          && !bitmap_bit_p (evd->regs_active, REGNO (p->loc)))
1108
        {
1109
          reg_result = p->loc;
1110
          regno = REGNO (p->loc);
1111
        }
1112
      /* Avoid infinite recursion and do not try to expand the
1113
         value.  */
1114
      else if (GET_CODE (p->loc) == VALUE
1115
               && CSELIB_VAL_PTR (p->loc)->locs == p_in)
1116
        continue;
1117
      else if (!REG_P (p->loc))
1118
        {
1119
          rtx result, note;
1120
          if (dump_file && (dump_flags & TDF_DETAILS))
1121
            {
1122
              print_inline_rtx (dump_file, p->loc, 0);
1123
              fprintf (dump_file, "\n");
1124
            }
1125
          if (GET_CODE (p->loc) == LO_SUM
1126
              && GET_CODE (XEXP (p->loc, 1)) == SYMBOL_REF
1127
              && p->setting_insn
1128
              && (note = find_reg_note (p->setting_insn, REG_EQUAL, NULL_RTX))
1129
              && XEXP (note, 0) == XEXP (p->loc, 1))
1130
            return XEXP (p->loc, 1);
1131
          result = cselib_expand_value_rtx_1 (p->loc, evd, max_depth - 1);
1132
          if (result)
1133
            return result;
1134
        }
1135
 
1136
    }
1137
 
1138
  if (regno != UINT_MAX)
1139
    {
1140
      rtx result;
1141
      if (dump_file && (dump_flags & TDF_DETAILS))
1142
        fprintf (dump_file, "r%d\n", regno);
1143
 
1144
      result = cselib_expand_value_rtx_1 (reg_result, evd, max_depth - 1);
1145
      if (result)
1146
        return result;
1147
    }
1148
 
1149
  if (dump_file && (dump_flags & TDF_DETAILS))
1150
    {
1151
      if (reg_result)
1152
        {
1153
          print_inline_rtx (dump_file, reg_result, 0);
1154
          fprintf (dump_file, "\n");
1155
        }
1156
      else
1157
        fprintf (dump_file, "NULL\n");
1158
    }
1159
  return reg_result;
1160
}
1161
 
1162
 
1163
/* Forward substitute and expand an expression out to its roots.
1164
   This is the opposite of common subexpression.  Because local value
1165
   numbering is such a weak optimization, the expanded expression is
1166
   pretty much unique (not from a pointer equals point of view but
1167
   from a tree shape point of view.
1168
 
1169
   This function returns NULL if the expansion fails.  The expansion
1170
   will fail if there is no value number for one of the operands or if
1171
   one of the operands has been overwritten between the current insn
1172
   and the beginning of the basic block.  For instance x has no
1173
   expansion in:
1174
 
1175
   r1 <- r1 + 3
1176
   x <- r1 + 8
1177
 
1178
   REGS_ACTIVE is a scratch bitmap that should be clear when passing in.
1179
   It is clear on return.  */
1180
 
1181
rtx
1182
cselib_expand_value_rtx (rtx orig, bitmap regs_active, int max_depth)
1183
{
1184
  struct expand_value_data evd;
1185
 
1186
  evd.regs_active = regs_active;
1187
  evd.callback = NULL;
1188
  evd.callback_arg = NULL;
1189
  evd.dummy = false;
1190
 
1191
  return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1192
}
1193
 
1194
/* Same as cselib_expand_value_rtx, but using a callback to try to
1195
   resolve some expressions.  The CB function should return ORIG if it
1196
   can't or does not want to deal with a certain RTX.  Any other
1197
   return value, including NULL, will be used as the expansion for
1198
   VALUE, without any further changes.  */
1199
 
1200
rtx
1201
cselib_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1202
                            cselib_expand_callback cb, void *data)
1203
{
1204
  struct expand_value_data evd;
1205
 
1206
  evd.regs_active = regs_active;
1207
  evd.callback = cb;
1208
  evd.callback_arg = data;
1209
  evd.dummy = false;
1210
 
1211
  return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1212
}
1213
 
1214
/* Similar to cselib_expand_value_rtx_cb, but no rtxs are actually copied
1215
   or simplified.  Useful to find out whether cselib_expand_value_rtx_cb
1216
   would return NULL or non-NULL, without allocating new rtx.  */
1217
 
1218
bool
1219
cselib_dummy_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1220
                                  cselib_expand_callback cb, void *data)
1221
{
1222
  struct expand_value_data evd;
1223
 
1224
  evd.regs_active = regs_active;
1225
  evd.callback = cb;
1226
  evd.callback_arg = data;
1227
  evd.dummy = true;
1228
 
1229
  return cselib_expand_value_rtx_1 (orig, &evd, max_depth) != NULL;
1230
}
1231
 
1232
/* Internal implementation of cselib_expand_value_rtx and
1233
   cselib_expand_value_rtx_cb.  */
1234
 
1235
static rtx
1236
cselib_expand_value_rtx_1 (rtx orig, struct expand_value_data *evd,
1237
                           int max_depth)
1238
{
1239
  rtx copy, scopy;
1240
  int i, j;
1241
  RTX_CODE code;
1242
  const char *format_ptr;
1243
  enum machine_mode mode;
1244
 
1245
  code = GET_CODE (orig);
1246
 
1247
  /* For the context of dse, if we end up expand into a huge tree, we
1248
     will not have a useful address, so we might as well just give up
1249
     quickly.  */
1250
  if (max_depth <= 0)
1251
    return NULL;
1252
 
1253
  switch (code)
1254
    {
1255
    case REG:
1256
      {
1257
        struct elt_list *l = REG_VALUES (REGNO (orig));
1258
 
1259
        if (l && l->elt == NULL)
1260
          l = l->next;
1261
        for (; l; l = l->next)
1262
          if (GET_MODE (l->elt->val_rtx) == GET_MODE (orig))
1263
            {
1264
              rtx result;
1265
              int regno = REGNO (orig);
1266
 
1267
              /* The only thing that we are not willing to do (this
1268
                 is requirement of dse and if others potential uses
1269
                 need this function we should add a parm to control
1270
                 it) is that we will not substitute the
1271
                 STACK_POINTER_REGNUM, FRAME_POINTER or the
1272
                 HARD_FRAME_POINTER.
1273
 
1274
                 These expansions confuses the code that notices that
1275
                 stores into the frame go dead at the end of the
1276
                 function and that the frame is not effected by calls
1277
                 to subroutines.  If you allow the
1278
                 STACK_POINTER_REGNUM substitution, then dse will
1279
                 think that parameter pushing also goes dead which is
1280
                 wrong.  If you allow the FRAME_POINTER or the
1281
                 HARD_FRAME_POINTER then you lose the opportunity to
1282
                 make the frame assumptions.  */
1283
              if (regno == STACK_POINTER_REGNUM
1284
                  || regno == FRAME_POINTER_REGNUM
1285
                  || regno == HARD_FRAME_POINTER_REGNUM)
1286
                return orig;
1287
 
1288
              bitmap_set_bit (evd->regs_active, regno);
1289
 
1290
              if (dump_file && (dump_flags & TDF_DETAILS))
1291
                fprintf (dump_file, "expanding: r%d into: ", regno);
1292
 
1293
              result = expand_loc (l->elt->locs, evd, max_depth);
1294
              bitmap_clear_bit (evd->regs_active, regno);
1295
 
1296
              if (result)
1297
                return result;
1298
              else
1299
                return orig;
1300
            }
1301
      }
1302
 
1303
    case CONST_INT:
1304
    case CONST_DOUBLE:
1305
    case CONST_VECTOR:
1306
    case SYMBOL_REF:
1307
    case CODE_LABEL:
1308
    case PC:
1309
    case CC0:
1310
    case SCRATCH:
1311
      /* SCRATCH must be shared because they represent distinct values.  */
1312
      return orig;
1313
    case CLOBBER:
1314
      if (REG_P (XEXP (orig, 0)) && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0))))
1315
        return orig;
1316
      break;
1317
 
1318
    case CONST:
1319
      if (shared_const_p (orig))
1320
        return orig;
1321
      break;
1322
 
1323
    case SUBREG:
1324
      {
1325
        rtx subreg;
1326
 
1327
        if (evd->callback)
1328
          {
1329
            subreg = evd->callback (orig, evd->regs_active, max_depth,
1330
                                    evd->callback_arg);
1331
            if (subreg != orig)
1332
              return subreg;
1333
          }
1334
 
1335
        subreg = cselib_expand_value_rtx_1 (SUBREG_REG (orig), evd,
1336
                                            max_depth - 1);
1337
        if (!subreg)
1338
          return NULL;
1339
        scopy = simplify_gen_subreg (GET_MODE (orig), subreg,
1340
                                     GET_MODE (SUBREG_REG (orig)),
1341
                                     SUBREG_BYTE (orig));
1342
        if (scopy == NULL
1343
            || (GET_CODE (scopy) == SUBREG
1344
                && !REG_P (SUBREG_REG (scopy))
1345
                && !MEM_P (SUBREG_REG (scopy))))
1346
          return NULL;
1347
 
1348
        return scopy;
1349
      }
1350
 
1351
    case VALUE:
1352
      {
1353
        rtx result;
1354
 
1355
        if (dump_file && (dump_flags & TDF_DETAILS))
1356
          {
1357
            fputs ("\nexpanding ", dump_file);
1358
            print_rtl_single (dump_file, orig);
1359
            fputs (" into...", dump_file);
1360
          }
1361
 
1362
        if (evd->callback)
1363
          {
1364
            result = evd->callback (orig, evd->regs_active, max_depth,
1365
                                    evd->callback_arg);
1366
 
1367
            if (result != orig)
1368
              return result;
1369
          }
1370
 
1371
        result = expand_loc (CSELIB_VAL_PTR (orig)->locs, evd, max_depth);
1372
        return result;
1373
      }
1374
 
1375
    case DEBUG_EXPR:
1376
      if (evd->callback)
1377
        return evd->callback (orig, evd->regs_active, max_depth,
1378
                              evd->callback_arg);
1379
      return orig;
1380
 
1381
    default:
1382
      break;
1383
    }
1384
 
1385
  /* Copy the various flags, fields, and other information.  We assume
1386
     that all fields need copying, and then clear the fields that should
1387
     not be copied.  That is the sensible default behavior, and forces
1388
     us to explicitly document why we are *not* copying a flag.  */
1389
  if (evd->dummy)
1390
    copy = NULL;
1391
  else
1392
    copy = shallow_copy_rtx (orig);
1393
 
1394
  format_ptr = GET_RTX_FORMAT (code);
1395
 
1396
  for (i = 0; i < GET_RTX_LENGTH (code); i++)
1397
    switch (*format_ptr++)
1398
      {
1399
      case 'e':
1400
        if (XEXP (orig, i) != NULL)
1401
          {
1402
            rtx result = cselib_expand_value_rtx_1 (XEXP (orig, i), evd,
1403
                                                    max_depth - 1);
1404
            if (!result)
1405
              return NULL;
1406
            if (copy)
1407
              XEXP (copy, i) = result;
1408
          }
1409
        break;
1410
 
1411
      case 'E':
1412
      case 'V':
1413
        if (XVEC (orig, i) != NULL)
1414
          {
1415
            if (copy)
1416
              XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
1417
            for (j = 0; j < XVECLEN (orig, i); j++)
1418
              {
1419
                rtx result = cselib_expand_value_rtx_1 (XVECEXP (orig, i, j),
1420
                                                        evd, max_depth - 1);
1421
                if (!result)
1422
                  return NULL;
1423
                if (copy)
1424
                  XVECEXP (copy, i, j) = result;
1425
              }
1426
          }
1427
        break;
1428
 
1429
      case 't':
1430
      case 'w':
1431
      case 'i':
1432
      case 's':
1433
      case 'S':
1434
      case 'T':
1435
      case 'u':
1436
      case 'B':
1437
      case '0':
1438
        /* These are left unchanged.  */
1439
        break;
1440
 
1441
      default:
1442
        gcc_unreachable ();
1443
      }
1444
 
1445
  if (evd->dummy)
1446
    return orig;
1447
 
1448
  mode = GET_MODE (copy);
1449
  /* If an operand has been simplified into CONST_INT, which doesn't
1450
     have a mode and the mode isn't derivable from whole rtx's mode,
1451
     try simplify_*_operation first with mode from original's operand
1452
     and as a fallback wrap CONST_INT into gen_rtx_CONST.  */
1453
  scopy = copy;
1454
  switch (GET_RTX_CLASS (code))
1455
    {
1456
    case RTX_UNARY:
1457
      if (CONST_INT_P (XEXP (copy, 0))
1458
          && GET_MODE (XEXP (orig, 0)) != VOIDmode)
1459
        {
1460
          scopy = simplify_unary_operation (code, mode, XEXP (copy, 0),
1461
                                            GET_MODE (XEXP (orig, 0)));
1462
          if (scopy)
1463
            return scopy;
1464
        }
1465
      break;
1466
    case RTX_COMM_ARITH:
1467
    case RTX_BIN_ARITH:
1468
      /* These expressions can derive operand modes from the whole rtx's mode.  */
1469
      break;
1470
    case RTX_TERNARY:
1471
    case RTX_BITFIELD_OPS:
1472
      if (CONST_INT_P (XEXP (copy, 0))
1473
          && GET_MODE (XEXP (orig, 0)) != VOIDmode)
1474
        {
1475
          scopy = simplify_ternary_operation (code, mode,
1476
                                              GET_MODE (XEXP (orig, 0)),
1477
                                              XEXP (copy, 0), XEXP (copy, 1),
1478
                                              XEXP (copy, 2));
1479
          if (scopy)
1480
            return scopy;
1481
        }
1482
      break;
1483
    case RTX_COMPARE:
1484
    case RTX_COMM_COMPARE:
1485
      if (CONST_INT_P (XEXP (copy, 0))
1486
          && GET_MODE (XEXP (copy, 1)) == VOIDmode
1487
          && (GET_MODE (XEXP (orig, 0)) != VOIDmode
1488
              || GET_MODE (XEXP (orig, 1)) != VOIDmode))
1489
        {
1490
          scopy = simplify_relational_operation (code, mode,
1491
                                                 (GET_MODE (XEXP (orig, 0))
1492
                                                  != VOIDmode)
1493
                                                 ? GET_MODE (XEXP (orig, 0))
1494
                                                 : GET_MODE (XEXP (orig, 1)),
1495
                                                 XEXP (copy, 0),
1496
                                                 XEXP (copy, 1));
1497
          if (scopy)
1498
            return scopy;
1499
        }
1500
      break;
1501
    default:
1502
      break;
1503
    }
1504
  scopy = simplify_rtx (copy);
1505
  if (scopy)
1506
    return scopy;
1507
  return copy;
1508
}
1509
 
1510
/* Walk rtx X and replace all occurrences of REG and MEM subexpressions
1511
   with VALUE expressions.  This way, it becomes independent of changes
1512
   to registers and memory.
1513
   X isn't actually modified; if modifications are needed, new rtl is
1514
   allocated.  However, the return value can share rtl with X.  */
1515
 
1516
rtx
1517
cselib_subst_to_values (rtx x)
1518
{
1519
  enum rtx_code code = GET_CODE (x);
1520
  const char *fmt = GET_RTX_FORMAT (code);
1521
  cselib_val *e;
1522
  struct elt_list *l;
1523
  rtx copy = x;
1524
  int i;
1525
 
1526
  switch (code)
1527
    {
1528
    case REG:
1529
      l = REG_VALUES (REGNO (x));
1530
      if (l && l->elt == NULL)
1531
        l = l->next;
1532
      for (; l; l = l->next)
1533
        if (GET_MODE (l->elt->val_rtx) == GET_MODE (x))
1534
          return l->elt->val_rtx;
1535
 
1536
      gcc_unreachable ();
1537
 
1538
    case MEM:
1539
      e = cselib_lookup_mem (x, 0);
1540
      if (! e)
1541
        {
1542
          /* This happens for autoincrements.  Assign a value that doesn't
1543
             match any other.  */
1544
          e = new_cselib_val (next_uid, GET_MODE (x), x);
1545
        }
1546
      return e->val_rtx;
1547
 
1548
    case CONST_DOUBLE:
1549
    case CONST_VECTOR:
1550
    case CONST_INT:
1551
    case CONST_FIXED:
1552
      return x;
1553
 
1554
    case POST_INC:
1555
    case PRE_INC:
1556
    case POST_DEC:
1557
    case PRE_DEC:
1558
    case POST_MODIFY:
1559
    case PRE_MODIFY:
1560
      e = new_cselib_val (next_uid, GET_MODE (x), x);
1561
      return e->val_rtx;
1562
 
1563
    default:
1564
      break;
1565
    }
1566
 
1567
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1568
    {
1569
      if (fmt[i] == 'e')
1570
        {
1571
          rtx t = cselib_subst_to_values (XEXP (x, i));
1572
 
1573
          if (t != XEXP (x, i))
1574
            {
1575
              if (x == copy)
1576
                copy = shallow_copy_rtx (x);
1577
              XEXP (copy, i) = t;
1578
            }
1579
        }
1580
      else if (fmt[i] == 'E')
1581
        {
1582
          int j;
1583
 
1584
          for (j = 0; j < XVECLEN (x, i); j++)
1585
            {
1586
              rtx t = cselib_subst_to_values (XVECEXP (x, i, j));
1587
 
1588
              if (t != XVECEXP (x, i, j))
1589
                {
1590
                  if (XVEC (x, i) == XVEC (copy, i))
1591
                    {
1592
                      if (x == copy)
1593
                        copy = shallow_copy_rtx (x);
1594
                      XVEC (copy, i) = shallow_copy_rtvec (XVEC (x, i));
1595
                    }
1596
                  XVECEXP (copy, i, j) = t;
1597
                }
1598
            }
1599
        }
1600
    }
1601
 
1602
  return copy;
1603
}
1604
 
1605
/* Look up the rtl expression X in our tables and return the value it has.
1606
   If CREATE is zero, we return NULL if we don't know the value.  Otherwise,
1607
   we create a new one if possible, using mode MODE if X doesn't have a mode
1608
   (i.e. because it's a constant).  */
1609
 
1610
static cselib_val *
1611
cselib_lookup_1 (rtx x, enum machine_mode mode, int create)
1612
{
1613
  void **slot;
1614
  cselib_val *e;
1615
  unsigned int hashval;
1616
 
1617
  if (GET_MODE (x) != VOIDmode)
1618
    mode = GET_MODE (x);
1619
 
1620
  if (GET_CODE (x) == VALUE)
1621
    return CSELIB_VAL_PTR (x);
1622
 
1623
  if (REG_P (x))
1624
    {
1625
      struct elt_list *l;
1626
      unsigned int i = REGNO (x);
1627
 
1628
      l = REG_VALUES (i);
1629
      if (l && l->elt == NULL)
1630
        l = l->next;
1631
      for (; l; l = l->next)
1632
        if (mode == GET_MODE (l->elt->val_rtx))
1633
          {
1634
            promote_debug_loc (l->elt->locs);
1635
            return l->elt;
1636
          }
1637
 
1638
      if (! create)
1639
        return 0;
1640
 
1641
      if (i < FIRST_PSEUDO_REGISTER)
1642
        {
1643
          unsigned int n = hard_regno_nregs[i][mode];
1644
 
1645
          if (n > max_value_regs)
1646
            max_value_regs = n;
1647
        }
1648
 
1649
      e = new_cselib_val (next_uid, GET_MODE (x), x);
1650
      e->locs = new_elt_loc_list (e->locs, x);
1651
      if (REG_VALUES (i) == 0)
1652
        {
1653
          /* Maintain the invariant that the first entry of
1654
             REG_VALUES, if present, must be the value used to set the
1655
             register, or NULL.  */
1656
          used_regs[n_used_regs++] = i;
1657
          REG_VALUES (i) = new_elt_list (REG_VALUES (i), NULL);
1658
        }
1659
      REG_VALUES (i)->next = new_elt_list (REG_VALUES (i)->next, e);
1660
      slot = htab_find_slot_with_hash (cselib_hash_table, x, e->hash, INSERT);
1661
      *slot = e;
1662
      return e;
1663
    }
1664
 
1665
  if (MEM_P (x))
1666
    return cselib_lookup_mem (x, create);
1667
 
1668
  hashval = cselib_hash_rtx (x, create);
1669
  /* Can't even create if hashing is not possible.  */
1670
  if (! hashval)
1671
    return 0;
1672
 
1673
  slot = htab_find_slot_with_hash (cselib_hash_table, wrap_constant (mode, x),
1674
                                   hashval, create ? INSERT : NO_INSERT);
1675
  if (slot == 0)
1676
    return 0;
1677
 
1678
  e = (cselib_val *) *slot;
1679
  if (e)
1680
    return e;
1681
 
1682
  e = new_cselib_val (hashval, mode, x);
1683
 
1684
  /* We have to fill the slot before calling cselib_subst_to_values:
1685
     the hash table is inconsistent until we do so, and
1686
     cselib_subst_to_values will need to do lookups.  */
1687
  *slot = (void *) e;
1688
  e->locs = new_elt_loc_list (e->locs, cselib_subst_to_values (x));
1689
  return e;
1690
}
1691
 
1692
/* Wrapper for cselib_lookup, that indicates X is in INSN.  */
1693
 
1694
cselib_val *
1695
cselib_lookup_from_insn (rtx x, enum machine_mode mode,
1696
                         int create, rtx insn)
1697
{
1698
  cselib_val *ret;
1699
 
1700
  gcc_assert (!cselib_current_insn);
1701
  cselib_current_insn = insn;
1702
 
1703
  ret = cselib_lookup (x, mode, create);
1704
 
1705
  cselib_current_insn = NULL;
1706
 
1707
  return ret;
1708
}
1709
 
1710
/* Wrapper for cselib_lookup_1, that logs the lookup result and
1711
   maintains invariants related with debug insns.  */
1712
 
1713
cselib_val *
1714
cselib_lookup (rtx x, enum machine_mode mode, int create)
1715
{
1716
  cselib_val *ret = cselib_lookup_1 (x, mode, create);
1717
 
1718
  /* ??? Should we return NULL if we're not to create an entry, the
1719
     found loc is a debug loc and cselib_current_insn is not DEBUG?
1720
     If so, we should also avoid converting val to non-DEBUG; probably
1721
     easiest setting cselib_current_insn to NULL before the call
1722
     above.  */
1723
 
1724
  if (dump_file && (dump_flags & TDF_DETAILS))
1725
    {
1726
      fputs ("cselib lookup ", dump_file);
1727
      print_inline_rtx (dump_file, x, 2);
1728
      fprintf (dump_file, " => %u:%u\n",
1729
               ret ? ret->uid : 0,
1730
               ret ? ret->hash : 0);
1731
    }
1732
 
1733
  return ret;
1734
}
1735
 
1736
/* Invalidate any entries in reg_values that overlap REGNO.  This is called
1737
   if REGNO is changing.  MODE is the mode of the assignment to REGNO, which
1738
   is used to determine how many hard registers are being changed.  If MODE
1739
   is VOIDmode, then only REGNO is being changed; this is used when
1740
   invalidating call clobbered registers across a call.  */
1741
 
1742
static void
1743
cselib_invalidate_regno (unsigned int regno, enum machine_mode mode)
1744
{
1745
  unsigned int endregno;
1746
  unsigned int i;
1747
 
1748
  /* If we see pseudos after reload, something is _wrong_.  */
1749
  gcc_assert (!reload_completed || regno < FIRST_PSEUDO_REGISTER
1750
              || reg_renumber[regno] < 0);
1751
 
1752
  /* Determine the range of registers that must be invalidated.  For
1753
     pseudos, only REGNO is affected.  For hard regs, we must take MODE
1754
     into account, and we must also invalidate lower register numbers
1755
     if they contain values that overlap REGNO.  */
1756
  if (regno < FIRST_PSEUDO_REGISTER)
1757
    {
1758
      gcc_assert (mode != VOIDmode);
1759
 
1760
      if (regno < max_value_regs)
1761
        i = 0;
1762
      else
1763
        i = regno - max_value_regs;
1764
 
1765
      endregno = end_hard_regno (mode, regno);
1766
    }
1767
  else
1768
    {
1769
      i = regno;
1770
      endregno = regno + 1;
1771
    }
1772
 
1773
  for (; i < endregno; i++)
1774
    {
1775
      struct elt_list **l = &REG_VALUES (i);
1776
 
1777
      /* Go through all known values for this reg; if it overlaps the range
1778
         we're invalidating, remove the value.  */
1779
      while (*l)
1780
        {
1781
          cselib_val *v = (*l)->elt;
1782
          bool had_locs;
1783
          rtx setting_insn;
1784
          struct elt_loc_list **p;
1785
          unsigned int this_last = i;
1786
 
1787
          if (i < FIRST_PSEUDO_REGISTER && v != NULL)
1788
            this_last = end_hard_regno (GET_MODE (v->val_rtx), i) - 1;
1789
 
1790
          if (this_last < regno || v == NULL
1791
              || (v == cfa_base_preserved_val
1792
                  && i == cfa_base_preserved_regno))
1793
            {
1794
              l = &(*l)->next;
1795
              continue;
1796
            }
1797
 
1798
          /* We have an overlap.  */
1799
          if (*l == REG_VALUES (i))
1800
            {
1801
              /* Maintain the invariant that the first entry of
1802
                 REG_VALUES, if present, must be the value used to set
1803
                 the register, or NULL.  This is also nice because
1804
                 then we won't push the same regno onto user_regs
1805
                 multiple times.  */
1806
              (*l)->elt = NULL;
1807
              l = &(*l)->next;
1808
            }
1809
          else
1810
            unchain_one_elt_list (l);
1811
 
1812
          had_locs = v->locs != NULL;
1813
          setting_insn = v->locs ? v->locs->setting_insn : NULL;
1814
 
1815
          /* Now, we clear the mapping from value to reg.  It must exist, so
1816
             this code will crash intentionally if it doesn't.  */
1817
          for (p = &v->locs; ; p = &(*p)->next)
1818
            {
1819
              rtx x = (*p)->loc;
1820
 
1821
              if (REG_P (x) && REGNO (x) == i)
1822
                {
1823
                  unchain_one_elt_loc_list (p);
1824
                  break;
1825
                }
1826
            }
1827
 
1828
          if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
1829
            {
1830
              if (setting_insn && DEBUG_INSN_P (setting_insn))
1831
                n_useless_debug_values++;
1832
              else
1833
                n_useless_values++;
1834
            }
1835
        }
1836
    }
1837
}
1838
 
1839
/* Return 1 if X has a value that can vary even between two
1840
   executions of the program.  0 means X can be compared reliably
1841
   against certain constants or near-constants.  */
1842
 
1843
static bool
1844
cselib_rtx_varies_p (const_rtx x ATTRIBUTE_UNUSED, bool from_alias ATTRIBUTE_UNUSED)
1845
{
1846
  /* We actually don't need to verify very hard.  This is because
1847
     if X has actually changed, we invalidate the memory anyway,
1848
     so assume that all common memory addresses are
1849
     invariant.  */
1850
  return 0;
1851
}
1852
 
1853
/* Invalidate any locations in the table which are changed because of a
1854
   store to MEM_RTX.  If this is called because of a non-const call
1855
   instruction, MEM_RTX is (mem:BLK const0_rtx).  */
1856
 
1857
static void
1858
cselib_invalidate_mem (rtx mem_rtx)
1859
{
1860
  cselib_val **vp, *v, *next;
1861
  int num_mems = 0;
1862
  rtx mem_addr;
1863
 
1864
  mem_addr = canon_rtx (get_addr (XEXP (mem_rtx, 0)));
1865
  mem_rtx = canon_rtx (mem_rtx);
1866
 
1867
  vp = &first_containing_mem;
1868
  for (v = *vp; v != &dummy_val; v = next)
1869
    {
1870
      bool has_mem = false;
1871
      struct elt_loc_list **p = &v->locs;
1872
      bool had_locs = v->locs != NULL;
1873
      rtx setting_insn = v->locs ? v->locs->setting_insn : NULL;
1874
 
1875
      while (*p)
1876
        {
1877
          rtx x = (*p)->loc;
1878
          cselib_val *addr;
1879
          struct elt_list **mem_chain;
1880
 
1881
          /* MEMs may occur in locations only at the top level; below
1882
             that every MEM or REG is substituted by its VALUE.  */
1883
          if (!MEM_P (x))
1884
            {
1885
              p = &(*p)->next;
1886
              continue;
1887
            }
1888
          if (num_mems < PARAM_VALUE (PARAM_MAX_CSELIB_MEMORY_LOCATIONS)
1889
              && ! canon_true_dependence (mem_rtx, GET_MODE (mem_rtx), mem_addr,
1890
                                          x, NULL_RTX, cselib_rtx_varies_p))
1891
            {
1892
              has_mem = true;
1893
              num_mems++;
1894
              p = &(*p)->next;
1895
              continue;
1896
            }
1897
 
1898
          /* This one overlaps.  */
1899
          /* We must have a mapping from this MEM's address to the
1900
             value (E).  Remove that, too.  */
1901
          addr = cselib_lookup (XEXP (x, 0), VOIDmode, 0);
1902
          mem_chain = &addr->addr_list;
1903
          for (;;)
1904
            {
1905
              if ((*mem_chain)->elt == v)
1906
                {
1907
                  unchain_one_elt_list (mem_chain);
1908
                  break;
1909
                }
1910
 
1911
              mem_chain = &(*mem_chain)->next;
1912
            }
1913
 
1914
          unchain_one_elt_loc_list (p);
1915
        }
1916
 
1917
      if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
1918
        {
1919
          if (setting_insn && DEBUG_INSN_P (setting_insn))
1920
            n_useless_debug_values++;
1921
          else
1922
            n_useless_values++;
1923
        }
1924
 
1925
      next = v->next_containing_mem;
1926
      if (has_mem)
1927
        {
1928
          *vp = v;
1929
          vp = &(*vp)->next_containing_mem;
1930
        }
1931
      else
1932
        v->next_containing_mem = NULL;
1933
    }
1934
  *vp = &dummy_val;
1935
}
1936
 
1937
/* Invalidate DEST, which is being assigned to or clobbered.  */
1938
 
1939
void
1940
cselib_invalidate_rtx (rtx dest)
1941
{
1942
  while (GET_CODE (dest) == SUBREG
1943
         || GET_CODE (dest) == ZERO_EXTRACT
1944
         || GET_CODE (dest) == STRICT_LOW_PART)
1945
    dest = XEXP (dest, 0);
1946
 
1947
  if (REG_P (dest))
1948
    cselib_invalidate_regno (REGNO (dest), GET_MODE (dest));
1949
  else if (MEM_P (dest))
1950
    cselib_invalidate_mem (dest);
1951
 
1952
  /* Some machines don't define AUTO_INC_DEC, but they still use push
1953
     instructions.  We need to catch that case here in order to
1954
     invalidate the stack pointer correctly.  Note that invalidating
1955
     the stack pointer is different from invalidating DEST.  */
1956
  if (push_operand (dest, GET_MODE (dest)))
1957
    cselib_invalidate_rtx (stack_pointer_rtx);
1958
}
1959
 
1960
/* A wrapper for cselib_invalidate_rtx to be called via note_stores.  */
1961
 
1962
static void
1963
cselib_invalidate_rtx_note_stores (rtx dest, const_rtx ignore ATTRIBUTE_UNUSED,
1964
                                   void *data ATTRIBUTE_UNUSED)
1965
{
1966
  cselib_invalidate_rtx (dest);
1967
}
1968
 
1969
/* Record the result of a SET instruction.  DEST is being set; the source
1970
   contains the value described by SRC_ELT.  If DEST is a MEM, DEST_ADDR_ELT
1971
   describes its address.  */
1972
 
1973
static void
1974
cselib_record_set (rtx dest, cselib_val *src_elt, cselib_val *dest_addr_elt)
1975
{
1976
  int dreg = REG_P (dest) ? (int) REGNO (dest) : -1;
1977
 
1978
  if (src_elt == 0 || side_effects_p (dest))
1979
    return;
1980
 
1981
  if (dreg >= 0)
1982
    {
1983
      if (dreg < FIRST_PSEUDO_REGISTER)
1984
        {
1985
          unsigned int n = hard_regno_nregs[dreg][GET_MODE (dest)];
1986
 
1987
          if (n > max_value_regs)
1988
            max_value_regs = n;
1989
        }
1990
 
1991
      if (REG_VALUES (dreg) == 0)
1992
        {
1993
          used_regs[n_used_regs++] = dreg;
1994
          REG_VALUES (dreg) = new_elt_list (REG_VALUES (dreg), src_elt);
1995
        }
1996
      else
1997
        {
1998
          /* The register should have been invalidated.  */
1999
          gcc_assert (REG_VALUES (dreg)->elt == 0);
2000
          REG_VALUES (dreg)->elt = src_elt;
2001
        }
2002
 
2003
      if (src_elt->locs == 0 && !PRESERVED_VALUE_P (src_elt->val_rtx))
2004
        n_useless_values--;
2005
      src_elt->locs = new_elt_loc_list (src_elt->locs, dest);
2006
    }
2007
  else if (MEM_P (dest) && dest_addr_elt != 0
2008
           && cselib_record_memory)
2009
    {
2010
      if (src_elt->locs == 0 && !PRESERVED_VALUE_P (src_elt->val_rtx))
2011
        n_useless_values--;
2012
      add_mem_for_addr (dest_addr_elt, src_elt, dest);
2013
    }
2014
}
2015
 
2016
/* There is no good way to determine how many elements there can be
2017
   in a PARALLEL.  Since it's fairly cheap, use a really large number.  */
2018
#define MAX_SETS (FIRST_PSEUDO_REGISTER * 2)
2019
 
2020
/* Record the effects of any sets in INSN.  */
2021
static void
2022
cselib_record_sets (rtx insn)
2023
{
2024
  int n_sets = 0;
2025
  int i;
2026
  struct cselib_set sets[MAX_SETS];
2027
  rtx body = PATTERN (insn);
2028
  rtx cond = 0;
2029
 
2030
  body = PATTERN (insn);
2031
  if (GET_CODE (body) == COND_EXEC)
2032
    {
2033
      cond = COND_EXEC_TEST (body);
2034
      body = COND_EXEC_CODE (body);
2035
    }
2036
 
2037
  /* Find all sets.  */
2038
  if (GET_CODE (body) == SET)
2039
    {
2040
      sets[0].src = SET_SRC (body);
2041
      sets[0].dest = SET_DEST (body);
2042
      n_sets = 1;
2043
    }
2044
  else if (GET_CODE (body) == PARALLEL)
2045
    {
2046
      /* Look through the PARALLEL and record the values being
2047
         set, if possible.  Also handle any CLOBBERs.  */
2048
      for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
2049
        {
2050
          rtx x = XVECEXP (body, 0, i);
2051
 
2052
          if (GET_CODE (x) == SET)
2053
            {
2054
              sets[n_sets].src = SET_SRC (x);
2055
              sets[n_sets].dest = SET_DEST (x);
2056
              n_sets++;
2057
            }
2058
        }
2059
    }
2060
 
2061
  if (n_sets == 1
2062
      && MEM_P (sets[0].src)
2063
      && !cselib_record_memory
2064
      && MEM_READONLY_P (sets[0].src))
2065
    {
2066
      rtx note = find_reg_equal_equiv_note (insn);
2067
 
2068
      if (note && CONSTANT_P (XEXP (note, 0)))
2069
        sets[0].src = XEXP (note, 0);
2070
    }
2071
 
2072
  /* Look up the values that are read.  Do this before invalidating the
2073
     locations that are written.  */
2074
  for (i = 0; i < n_sets; i++)
2075
    {
2076
      rtx dest = sets[i].dest;
2077
 
2078
      /* A STRICT_LOW_PART can be ignored; we'll record the equivalence for
2079
         the low part after invalidating any knowledge about larger modes.  */
2080
      if (GET_CODE (sets[i].dest) == STRICT_LOW_PART)
2081
        sets[i].dest = dest = XEXP (dest, 0);
2082
 
2083
      /* We don't know how to record anything but REG or MEM.  */
2084
      if (REG_P (dest)
2085
          || (MEM_P (dest) && cselib_record_memory))
2086
        {
2087
          rtx src = sets[i].src;
2088
          if (cond)
2089
            src = gen_rtx_IF_THEN_ELSE (GET_MODE (dest), cond, src, dest);
2090
          sets[i].src_elt = cselib_lookup (src, GET_MODE (dest), 1);
2091
          if (MEM_P (dest))
2092
            {
2093
              enum machine_mode address_mode
2094
                = targetm.addr_space.address_mode (MEM_ADDR_SPACE (dest));
2095
 
2096
              sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0),
2097
                                                     address_mode, 1);
2098
            }
2099
          else
2100
            sets[i].dest_addr_elt = 0;
2101
        }
2102
    }
2103
 
2104
  if (cselib_record_sets_hook)
2105
    cselib_record_sets_hook (insn, sets, n_sets);
2106
 
2107
  /* Invalidate all locations written by this insn.  Note that the elts we
2108
     looked up in the previous loop aren't affected, just some of their
2109
     locations may go away.  */
2110
  note_stores (body, cselib_invalidate_rtx_note_stores, NULL);
2111
 
2112
  /* If this is an asm, look for duplicate sets.  This can happen when the
2113
     user uses the same value as an output multiple times.  This is valid
2114
     if the outputs are not actually used thereafter.  Treat this case as
2115
     if the value isn't actually set.  We do this by smashing the destination
2116
     to pc_rtx, so that we won't record the value later.  */
2117
  if (n_sets >= 2 && asm_noperands (body) >= 0)
2118
    {
2119
      for (i = 0; i < n_sets; i++)
2120
        {
2121
          rtx dest = sets[i].dest;
2122
          if (REG_P (dest) || MEM_P (dest))
2123
            {
2124
              int j;
2125
              for (j = i + 1; j < n_sets; j++)
2126
                if (rtx_equal_p (dest, sets[j].dest))
2127
                  {
2128
                    sets[i].dest = pc_rtx;
2129
                    sets[j].dest = pc_rtx;
2130
                  }
2131
            }
2132
        }
2133
    }
2134
 
2135
  /* Now enter the equivalences in our tables.  */
2136
  for (i = 0; i < n_sets; i++)
2137
    {
2138
      rtx dest = sets[i].dest;
2139
      if (REG_P (dest)
2140
          || (MEM_P (dest) && cselib_record_memory))
2141
        cselib_record_set (dest, sets[i].src_elt, sets[i].dest_addr_elt);
2142
    }
2143
}
2144
 
2145
/* Record the effects of INSN.  */
2146
 
2147
void
2148
cselib_process_insn (rtx insn)
2149
{
2150
  int i;
2151
  rtx x;
2152
 
2153
  cselib_current_insn = insn;
2154
 
2155
  /* Forget everything at a CODE_LABEL, a volatile asm, or a setjmp.  */
2156
  if (LABEL_P (insn)
2157
      || (CALL_P (insn)
2158
          && find_reg_note (insn, REG_SETJMP, NULL))
2159
      || (NONJUMP_INSN_P (insn)
2160
          && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
2161
          && MEM_VOLATILE_P (PATTERN (insn))))
2162
    {
2163
      cselib_reset_table (next_uid);
2164
      cselib_current_insn = NULL_RTX;
2165
      return;
2166
    }
2167
 
2168
  if (! INSN_P (insn))
2169
    {
2170
      cselib_current_insn = NULL_RTX;
2171
      return;
2172
    }
2173
 
2174
  /* If this is a call instruction, forget anything stored in a
2175
     call clobbered register, or, if this is not a const call, in
2176
     memory.  */
2177
  if (CALL_P (insn))
2178
    {
2179
      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2180
        if (call_used_regs[i]
2181
            || (REG_VALUES (i) && REG_VALUES (i)->elt
2182
                && HARD_REGNO_CALL_PART_CLOBBERED (i,
2183
                      GET_MODE (REG_VALUES (i)->elt->val_rtx))))
2184
          cselib_invalidate_regno (i, reg_raw_mode[i]);
2185
 
2186
      /* Since it is not clear how cselib is going to be used, be
2187
         conservative here and treat looping pure or const functions
2188
         as if they were regular functions.  */
2189
      if (RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)
2190
          || !(RTL_CONST_OR_PURE_CALL_P (insn)))
2191
        cselib_invalidate_mem (callmem);
2192
    }
2193
 
2194
  cselib_record_sets (insn);
2195
 
2196
#ifdef AUTO_INC_DEC
2197
  /* Clobber any registers which appear in REG_INC notes.  We
2198
     could keep track of the changes to their values, but it is
2199
     unlikely to help.  */
2200
  for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
2201
    if (REG_NOTE_KIND (x) == REG_INC)
2202
      cselib_invalidate_rtx (XEXP (x, 0));
2203
#endif
2204
 
2205
  /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
2206
     after we have processed the insn.  */
2207
  if (CALL_P (insn))
2208
    for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
2209
      if (GET_CODE (XEXP (x, 0)) == CLOBBER)
2210
        cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0));
2211
 
2212
  cselib_current_insn = NULL_RTX;
2213
 
2214
  if (n_useless_values > MAX_USELESS_VALUES
2215
      /* remove_useless_values is linear in the hash table size.  Avoid
2216
         quadratic behavior for very large hashtables with very few
2217
         useless elements.  */
2218
      && ((unsigned int)n_useless_values
2219
          > (cselib_hash_table->n_elements
2220
             - cselib_hash_table->n_deleted
2221
             - n_debug_values) / 4))
2222
    remove_useless_values ();
2223
}
2224
 
2225
/* Initialize cselib for one pass.  The caller must also call
2226
   init_alias_analysis.  */
2227
 
2228
void
2229
cselib_init (int record_what)
2230
{
2231
  elt_list_pool = create_alloc_pool ("elt_list",
2232
                                     sizeof (struct elt_list), 10);
2233
  elt_loc_list_pool = create_alloc_pool ("elt_loc_list",
2234
                                         sizeof (struct elt_loc_list), 10);
2235
  cselib_val_pool = create_alloc_pool ("cselib_val_list",
2236
                                       sizeof (cselib_val), 10);
2237
  value_pool = create_alloc_pool ("value", RTX_CODE_SIZE (VALUE), 100);
2238
  cselib_record_memory = record_what & CSELIB_RECORD_MEMORY;
2239
  cselib_preserve_constants = record_what & CSELIB_PRESERVE_CONSTANTS;
2240
 
2241
  /* (mem:BLK (scratch)) is a special mechanism to conflict with everything,
2242
     see canon_true_dependence.  This is only created once.  */
2243
  if (! callmem)
2244
    callmem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2245
 
2246
  cselib_nregs = max_reg_num ();
2247
 
2248
  /* We preserve reg_values to allow expensive clearing of the whole thing.
2249
     Reallocate it however if it happens to be too large.  */
2250
  if (!reg_values || reg_values_size < cselib_nregs
2251
      || (reg_values_size > 10 && reg_values_size > cselib_nregs * 4))
2252
    {
2253
      if (reg_values)
2254
        free (reg_values);
2255
      /* Some space for newly emit instructions so we don't end up
2256
         reallocating in between passes.  */
2257
      reg_values_size = cselib_nregs + (63 + cselib_nregs) / 16;
2258
      reg_values = XCNEWVEC (struct elt_list *, reg_values_size);
2259
    }
2260
  used_regs = XNEWVEC (unsigned int, cselib_nregs);
2261
  n_used_regs = 0;
2262
  cselib_hash_table = htab_create (31, get_value_hash,
2263
                                   entry_and_rtx_equal_p, NULL);
2264
  next_uid = 1;
2265
}
2266
 
2267
/* Called when the current user is done with cselib.  */
2268
 
2269
void
2270
cselib_finish (void)
2271
{
2272
  cselib_discard_hook = NULL;
2273
  cselib_preserve_constants = false;
2274
  cfa_base_preserved_val = NULL;
2275
  cfa_base_preserved_regno = INVALID_REGNUM;
2276
  free_alloc_pool (elt_list_pool);
2277
  free_alloc_pool (elt_loc_list_pool);
2278
  free_alloc_pool (cselib_val_pool);
2279
  free_alloc_pool (value_pool);
2280
  cselib_clear_table ();
2281
  htab_delete (cselib_hash_table);
2282
  free (used_regs);
2283
  used_regs = 0;
2284
  cselib_hash_table = 0;
2285
  n_useless_values = 0;
2286
  n_useless_debug_values = 0;
2287
  n_debug_values = 0;
2288
  next_uid = 0;
2289
}
2290
 
2291
/* Dump the cselib_val *X to FILE *info.  */
2292
 
2293
static int
2294
dump_cselib_val (void **x, void *info)
2295
{
2296
  cselib_val *v = (cselib_val *)*x;
2297
  FILE *out = (FILE *)info;
2298
  bool need_lf = true;
2299
 
2300
  print_inline_rtx (out, v->val_rtx, 0);
2301
 
2302
  if (v->locs)
2303
    {
2304
      struct elt_loc_list *l = v->locs;
2305
      if (need_lf)
2306
        {
2307
          fputc ('\n', out);
2308
          need_lf = false;
2309
        }
2310
      fputs (" locs:", out);
2311
      do
2312
        {
2313
          fprintf (out, "\n  from insn %i ",
2314
                   INSN_UID (l->setting_insn));
2315
          print_inline_rtx (out, l->loc, 4);
2316
        }
2317
      while ((l = l->next));
2318
      fputc ('\n', out);
2319
    }
2320
  else
2321
    {
2322
      fputs (" no locs", out);
2323
      need_lf = true;
2324
    }
2325
 
2326
  if (v->addr_list)
2327
    {
2328
      struct elt_list *e = v->addr_list;
2329
      if (need_lf)
2330
        {
2331
          fputc ('\n', out);
2332
          need_lf = false;
2333
        }
2334
      fputs (" addr list:", out);
2335
      do
2336
        {
2337
          fputs ("\n  ", out);
2338
          print_inline_rtx (out, e->elt->val_rtx, 2);
2339
        }
2340
      while ((e = e->next));
2341
      fputc ('\n', out);
2342
    }
2343
  else
2344
    {
2345
      fputs (" no addrs", out);
2346
      need_lf = true;
2347
    }
2348
 
2349
  if (v->next_containing_mem == &dummy_val)
2350
    fputs (" last mem\n", out);
2351
  else if (v->next_containing_mem)
2352
    {
2353
      fputs (" next mem ", out);
2354
      print_inline_rtx (out, v->next_containing_mem->val_rtx, 2);
2355
      fputc ('\n', out);
2356
    }
2357
  else if (need_lf)
2358
    fputc ('\n', out);
2359
 
2360
  return 1;
2361
}
2362
 
2363
/* Dump to OUT everything in the CSELIB table.  */
2364
 
2365
void
2366
dump_cselib_table (FILE *out)
2367
{
2368
  fprintf (out, "cselib hash table:\n");
2369
  htab_traverse (cselib_hash_table, dump_cselib_val, out);
2370
  if (first_containing_mem != &dummy_val)
2371
    {
2372
      fputs ("first mem ", out);
2373
      print_inline_rtx (out, first_containing_mem->val_rtx, 2);
2374
      fputc ('\n', out);
2375
    }
2376
  fprintf (out, "next uid %i\n", next_uid);
2377
}
2378
 
2379
#include "gt-cselib.h"

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.