OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-dev/] [or1k-gcc/] [gcc/] [dse.c] - Blame information for rev 707

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 684 jeremybenn
/* RTL dead store elimination.
2
   Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011
3
   Free Software Foundation, Inc.
4
 
5
   Contributed by Richard Sandiford <rsandifor@codesourcery.com>
6
   and Kenneth Zadeck <zadeck@naturalbridge.com>
7
 
8
This file is part of GCC.
9
 
10
GCC is free software; you can redistribute it and/or modify it under
11
the terms of the GNU General Public License as published by the Free
12
Software Foundation; either version 3, or (at your option) any later
13
version.
14
 
15
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16
WARRANTY; without even the implied warranty of MERCHANTABILITY or
17
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
18
for more details.
19
 
20
You should have received a copy of the GNU General Public License
21
along with GCC; see the file COPYING3.  If not see
22
<http://www.gnu.org/licenses/>.  */
23
 
24
#undef BASELINE
25
 
26
#include "config.h"
27
#include "system.h"
28
#include "coretypes.h"
29
#include "hashtab.h"
30
#include "tm.h"
31
#include "rtl.h"
32
#include "tree.h"
33
#include "tm_p.h"
34
#include "regs.h"
35
#include "hard-reg-set.h"
36
#include "regset.h"
37
#include "flags.h"
38
#include "df.h"
39
#include "cselib.h"
40
#include "timevar.h"
41
#include "tree-pass.h"
42
#include "alloc-pool.h"
43
#include "alias.h"
44
#include "insn-config.h"
45
#include "expr.h"
46
#include "recog.h"
47
#include "dse.h"
48
#include "optabs.h"
49
#include "dbgcnt.h"
50
#include "target.h"
51
#include "params.h"
52
#include "tree-flow.h"
53
 
54
/* This file contains three techniques for performing Dead Store
55
   Elimination (dse).
56
 
57
   * The first technique performs dse locally on any base address.  It
58
   is based on the cselib which is a local value numbering technique.
59
   This technique is local to a basic block but deals with a fairly
60
   general addresses.
61
 
62
   * The second technique performs dse globally but is restricted to
63
   base addresses that are either constant or are relative to the
64
   frame_pointer.
65
 
66
   * The third technique, (which is only done after register allocation)
67
   processes the spill spill slots.  This differs from the second
68
   technique because it takes advantage of the fact that spilling is
69
   completely free from the effects of aliasing.
70
 
71
   Logically, dse is a backwards dataflow problem.  A store can be
72
   deleted if it if cannot be reached in the backward direction by any
73
   use of the value being stored.  However, the local technique uses a
74
   forwards scan of the basic block because cselib requires that the
75
   block be processed in that order.
76
 
77
   The pass is logically broken into 7 steps:
78
 
79
   0) Initialization.
80
 
81
   1) The local algorithm, as well as scanning the insns for the two
82
   global algorithms.
83
 
84
   2) Analysis to see if the global algs are necessary.  In the case
85
   of stores base on a constant address, there must be at least two
86
   stores to that address, to make it possible to delete some of the
87
   stores.  In the case of stores off of the frame or spill related
88
   stores, only one store to an address is necessary because those
89
   stores die at the end of the function.
90
 
91
   3) Set up the global dataflow equations based on processing the
92
   info parsed in the first step.
93
 
94
   4) Solve the dataflow equations.
95
 
96
   5) Delete the insns that the global analysis has indicated are
97
   unnecessary.
98
 
99
   6) Delete insns that store the same value as preceeding store
100
   where the earlier store couldn't be eliminated.
101
 
102
   7) Cleanup.
103
 
104
   This step uses cselib and canon_rtx to build the largest expression
105
   possible for each address.  This pass is a forwards pass through
106
   each basic block.  From the point of view of the global technique,
107
   the first pass could examine a block in either direction.  The
108
   forwards ordering is to accommodate cselib.
109
 
110
   We a simplifying assumption: addresses fall into four broad
111
   categories:
112
 
113
   1) base has rtx_varies_p == false, offset is constant.
114
   2) base has rtx_varies_p == false, offset variable.
115
   3) base has rtx_varies_p == true, offset constant.
116
   4) base has rtx_varies_p == true, offset variable.
117
 
118
   The local passes are able to process all 4 kinds of addresses.  The
119
   global pass only handles (1).
120
 
121
   The global problem is formulated as follows:
122
 
123
     A store, S1, to address A, where A is not relative to the stack
124
     frame, can be eliminated if all paths from S1 to the end of the
125
     of the function contain another store to A before a read to A.
126
 
127
     If the address A is relative to the stack frame, a store S2 to A
128
     can be eliminated if there are no paths from S1 that reach the
129
     end of the function that read A before another store to A.  In
130
     this case S2 can be deleted if there are paths to from S2 to the
131
     end of the function that have no reads or writes to A.  This
132
     second case allows stores to the stack frame to be deleted that
133
     would otherwise die when the function returns.  This cannot be
134
     done if stores_off_frame_dead_at_return is not true.  See the doc
135
     for that variable for when this variable is false.
136
 
137
     The global problem is formulated as a backwards set union
138
     dataflow problem where the stores are the gens and reads are the
139
     kills.  Set union problems are rare and require some special
140
     handling given our representation of bitmaps.  A straightforward
141
     implementation of requires a lot of bitmaps filled with 1s.
142
     These are expensive and cumbersome in our bitmap formulation so
143
     care has been taken to avoid large vectors filled with 1s.  See
144
     the comments in bb_info and in the dataflow confluence functions
145
     for details.
146
 
147
   There are two places for further enhancements to this algorithm:
148
 
149
   1) The original dse which was embedded in a pass called flow also
150
   did local address forwarding.  For example in
151
 
152
   A <- r100
153
   ... <- A
154
 
155
   flow would replace the right hand side of the second insn with a
156
   reference to r100.  Most of the information is available to add this
157
   to this pass.  It has not done it because it is a lot of work in
158
   the case that either r100 is assigned to between the first and
159
   second insn and/or the second insn is a load of part of the value
160
   stored by the first insn.
161
 
162
   insn 5 in gcc.c-torture/compile/990203-1.c simple case.
163
   insn 15 in gcc.c-torture/execute/20001017-2.c simple case.
164
   insn 25 in gcc.c-torture/execute/20001026-1.c simple case.
165
   insn 44 in gcc.c-torture/execute/20010910-1.c simple case.
166
 
167
   2) The cleaning up of spill code is quite profitable.  It currently
168
   depends on reading tea leaves and chicken entrails left by reload.
169
   This pass depends on reload creating a singleton alias set for each
170
   spill slot and telling the next dse pass which of these alias sets
171
   are the singletons.  Rather than analyze the addresses of the
172
   spills, dse's spill processing just does analysis of the loads and
173
   stores that use those alias sets.  There are three cases where this
174
   falls short:
175
 
176
     a) Reload sometimes creates the slot for one mode of access, and
177
     then inserts loads and/or stores for a smaller mode.  In this
178
     case, the current code just punts on the slot.  The proper thing
179
     to do is to back out and use one bit vector position for each
180
     byte of the entity associated with the slot.  This depends on
181
     KNOWING that reload always generates the accesses for each of the
182
     bytes in some canonical (read that easy to understand several
183
     passes after reload happens) way.
184
 
185
     b) Reload sometimes decides that spill slot it allocated was not
186
     large enough for the mode and goes back and allocates more slots
187
     with the same mode and alias set.  The backout in this case is a
188
     little more graceful than (a).  In this case the slot is unmarked
189
     as being a spill slot and if final address comes out to be based
190
     off the frame pointer, the global algorithm handles this slot.
191
 
192
     c) For any pass that may prespill, there is currently no
193
     mechanism to tell the dse pass that the slot being used has the
194
     special properties that reload uses.  It may be that all that is
195
     required is to have those passes make the same calls that reload
196
     does, assuming that the alias sets can be manipulated in the same
197
     way.  */
198
 
199
/* There are limits to the size of constant offsets we model for the
200
   global problem.  There are certainly test cases, that exceed this
201
   limit, however, it is unlikely that there are important programs
202
   that really have constant offsets this size.  */
203
#define MAX_OFFSET (64 * 1024)
204
 
205
 
206
static bitmap scratch = NULL;
207
struct insn_info;
208
 
209
/* This structure holds information about a candidate store.  */
210
struct store_info
211
{
212
 
213
  /* False means this is a clobber.  */
214
  bool is_set;
215
 
216
  /* False if a single HOST_WIDE_INT bitmap is used for positions_needed.  */
217
  bool is_large;
218
 
219
  /* The id of the mem group of the base address.  If rtx_varies_p is
220
     true, this is -1.  Otherwise, it is the index into the group
221
     table.  */
222
  int group_id;
223
 
224
  /* This is the cselib value.  */
225
  cselib_val *cse_base;
226
 
227
  /* This canonized mem.  */
228
  rtx mem;
229
 
230
  /* Canonized MEM address for use by canon_true_dependence.  */
231
  rtx mem_addr;
232
 
233
  /* If this is non-zero, it is the alias set of a spill location.  */
234
  alias_set_type alias_set;
235
 
236
  /* The offset of the first and byte before the last byte associated
237
     with the operation.  */
238
  HOST_WIDE_INT begin, end;
239
 
240
  union
241
    {
242
      /* A bitmask as wide as the number of bytes in the word that
243
         contains a 1 if the byte may be needed.  The store is unused if
244
         all of the bits are 0.  This is used if IS_LARGE is false.  */
245
      unsigned HOST_WIDE_INT small_bitmask;
246
 
247
      struct
248
        {
249
          /* A bitmap with one bit per byte.  Cleared bit means the position
250
             is needed.  Used if IS_LARGE is false.  */
251
          bitmap bmap;
252
 
253
          /* Number of set bits (i.e. unneeded bytes) in BITMAP.  If it is
254
             equal to END - BEGIN, the whole store is unused.  */
255
          int count;
256
        } large;
257
    } positions_needed;
258
 
259
  /* The next store info for this insn.  */
260
  struct store_info *next;
261
 
262
  /* The right hand side of the store.  This is used if there is a
263
     subsequent reload of the mems address somewhere later in the
264
     basic block.  */
265
  rtx rhs;
266
 
267
  /* If rhs is or holds a constant, this contains that constant,
268
     otherwise NULL.  */
269
  rtx const_rhs;
270
 
271
  /* Set if this store stores the same constant value as REDUNDANT_REASON
272
     insn stored.  These aren't eliminated early, because doing that
273
     might prevent the earlier larger store to be eliminated.  */
274
  struct insn_info *redundant_reason;
275
};
276
 
277
/* Return a bitmask with the first N low bits set.  */
278
 
279
static unsigned HOST_WIDE_INT
280
lowpart_bitmask (int n)
281
{
282
  unsigned HOST_WIDE_INT mask = ~(unsigned HOST_WIDE_INT) 0;
283
  return mask >> (HOST_BITS_PER_WIDE_INT - n);
284
}
285
 
286
typedef struct store_info *store_info_t;
287
static alloc_pool cse_store_info_pool;
288
static alloc_pool rtx_store_info_pool;
289
 
290
/* This structure holds information about a load.  These are only
291
   built for rtx bases.  */
292
struct read_info
293
{
294
  /* The id of the mem group of the base address.  */
295
  int group_id;
296
 
297
  /* If this is non-zero, it is the alias set of a spill location.  */
298
  alias_set_type alias_set;
299
 
300
  /* The offset of the first and byte after the last byte associated
301
     with the operation.  If begin == end == 0, the read did not have
302
     a constant offset.  */
303
  int begin, end;
304
 
305
  /* The mem being read.  */
306
  rtx mem;
307
 
308
  /* The next read_info for this insn.  */
309
  struct read_info *next;
310
};
311
typedef struct read_info *read_info_t;
312
static alloc_pool read_info_pool;
313
 
314
 
315
/* One of these records is created for each insn.  */
316
 
317
struct insn_info
318
{
319
  /* Set true if the insn contains a store but the insn itself cannot
320
     be deleted.  This is set if the insn is a parallel and there is
321
     more than one non dead output or if the insn is in some way
322
     volatile.  */
323
  bool cannot_delete;
324
 
325
  /* This field is only used by the global algorithm.  It is set true
326
     if the insn contains any read of mem except for a (1).  This is
327
     also set if the insn is a call or has a clobber mem.  If the insn
328
     contains a wild read, the use_rec will be null.  */
329
  bool wild_read;
330
 
331
  /* This is true only for CALL instructions which could potentially read
332
     any non-frame memory location. This field is used by the global
333
     algorithm.  */
334
  bool non_frame_wild_read;
335
 
336
  /* This field is only used for the processing of const functions.
337
     These functions cannot read memory, but they can read the stack
338
     because that is where they may get their parms.  We need to be
339
     this conservative because, like the store motion pass, we don't
340
     consider CALL_INSN_FUNCTION_USAGE when processing call insns.
341
     Moreover, we need to distinguish two cases:
342
     1. Before reload (register elimination), the stores related to
343
        outgoing arguments are stack pointer based and thus deemed
344
        of non-constant base in this pass.  This requires special
345
        handling but also means that the frame pointer based stores
346
        need not be killed upon encountering a const function call.
347
     2. After reload, the stores related to outgoing arguments can be
348
        either stack pointer or hard frame pointer based.  This means
349
        that we have no other choice than also killing all the frame
350
        pointer based stores upon encountering a const function call.
351
     This field is set after reload for const function calls.  Having
352
     this set is less severe than a wild read, it just means that all
353
     the frame related stores are killed rather than all the stores.  */
354
  bool frame_read;
355
 
356
  /* This field is only used for the processing of const functions.
357
     It is set if the insn may contain a stack pointer based store.  */
358
  bool stack_pointer_based;
359
 
360
  /* This is true if any of the sets within the store contains a
361
     cselib base.  Such stores can only be deleted by the local
362
     algorithm.  */
363
  bool contains_cselib_groups;
364
 
365
  /* The insn. */
366
  rtx insn;
367
 
368
  /* The list of mem sets or mem clobbers that are contained in this
369
     insn.  If the insn is deletable, it contains only one mem set.
370
     But it could also contain clobbers.  Insns that contain more than
371
     one mem set are not deletable, but each of those mems are here in
372
     order to provide info to delete other insns.  */
373
  store_info_t store_rec;
374
 
375
  /* The linked list of mem uses in this insn.  Only the reads from
376
     rtx bases are listed here.  The reads to cselib bases are
377
     completely processed during the first scan and so are never
378
     created.  */
379
  read_info_t read_rec;
380
 
381
  /* The live fixed registers.  We assume only fixed registers can
382
     cause trouble by being clobbered from an expanded pattern;
383
     storing only the live fixed registers (rather than all registers)
384
     means less memory needs to be allocated / copied for the individual
385
     stores.  */
386
  regset fixed_regs_live;
387
 
388
  /* The prev insn in the basic block.  */
389
  struct insn_info * prev_insn;
390
 
391
  /* The linked list of insns that are in consideration for removal in
392
     the forwards pass thru the basic block.  This pointer may be
393
     trash as it is not cleared when a wild read occurs.  The only
394
     time it is guaranteed to be correct is when the traversal starts
395
     at active_local_stores.  */
396
  struct insn_info * next_local_store;
397
};
398
 
399
typedef struct insn_info *insn_info_t;
400
static alloc_pool insn_info_pool;
401
 
402
/* The linked list of stores that are under consideration in this
403
   basic block.  */
404
static insn_info_t active_local_stores;
405
static int active_local_stores_len;
406
 
407
struct bb_info
408
{
409
 
410
  /* Pointer to the insn info for the last insn in the block.  These
411
     are linked so this is how all of the insns are reached.  During
412
     scanning this is the current insn being scanned.  */
413
  insn_info_t last_insn;
414
 
415
  /* The info for the global dataflow problem.  */
416
 
417
 
418
  /* This is set if the transfer function should and in the wild_read
419
     bitmap before applying the kill and gen sets.  That vector knocks
420
     out most of the bits in the bitmap and thus speeds up the
421
     operations.  */
422
  bool apply_wild_read;
423
 
424
  /* The following 4 bitvectors hold information about which positions
425
     of which stores are live or dead.  They are indexed by
426
     get_bitmap_index.  */
427
 
428
  /* The set of store positions that exist in this block before a wild read.  */
429
  bitmap gen;
430
 
431
  /* The set of load positions that exist in this block above the
432
     same position of a store.  */
433
  bitmap kill;
434
 
435
  /* The set of stores that reach the top of the block without being
436
     killed by a read.
437
 
438
     Do not represent the in if it is all ones.  Note that this is
439
     what the bitvector should logically be initialized to for a set
440
     intersection problem.  However, like the kill set, this is too
441
     expensive.  So initially, the in set will only be created for the
442
     exit block and any block that contains a wild read.  */
443
  bitmap in;
444
 
445
  /* The set of stores that reach the bottom of the block from it's
446
     successors.
447
 
448
     Do not represent the in if it is all ones.  Note that this is
449
     what the bitvector should logically be initialized to for a set
450
     intersection problem.  However, like the kill and in set, this is
451
     too expensive.  So what is done is that the confluence operator
452
     just initializes the vector from one of the out sets of the
453
     successors of the block.  */
454
  bitmap out;
455
 
456
  /* The following bitvector is indexed by the reg number.  It
457
     contains the set of regs that are live at the current instruction
458
     being processed.  While it contains info for all of the
459
     registers, only the hard registers are actually examined.  It is used
460
     to assure that shift and/or add sequences that are inserted do not
461
     accidently clobber live hard regs.  */
462
  bitmap regs_live;
463
};
464
 
465
typedef struct bb_info *bb_info_t;
466
static alloc_pool bb_info_pool;
467
 
468
/* Table to hold all bb_infos.  */
469
static bb_info_t *bb_table;
470
 
471
/* There is a group_info for each rtx base that is used to reference
472
   memory.  There are also not many of the rtx bases because they are
473
   very limited in scope.  */
474
 
475
struct group_info
476
{
477
  /* The actual base of the address.  */
478
  rtx rtx_base;
479
 
480
  /* The sequential id of the base.  This allows us to have a
481
     canonical ordering of these that is not based on addresses.  */
482
  int id;
483
 
484
  /* True if there are any positions that are to be processed
485
     globally.  */
486
  bool process_globally;
487
 
488
  /* True if the base of this group is either the frame_pointer or
489
     hard_frame_pointer.  */
490
  bool frame_related;
491
 
492
  /* A mem wrapped around the base pointer for the group in order to do
493
     read dependency.  It must be given BLKmode in order to encompass all
494
     the possible offsets from the base.  */
495
  rtx base_mem;
496
 
497
  /* Canonized version of base_mem's address.  */
498
  rtx canon_base_addr;
499
 
500
  /* These two sets of two bitmaps are used to keep track of how many
501
     stores are actually referencing that position from this base.  We
502
     only do this for rtx bases as this will be used to assign
503
     positions in the bitmaps for the global problem.  Bit N is set in
504
     store1 on the first store for offset N.  Bit N is set in store2
505
     for the second store to offset N.  This is all we need since we
506
     only care about offsets that have two or more stores for them.
507
 
508
     The "_n" suffix is for offsets less than 0 and the "_p" suffix is
509
     for 0 and greater offsets.
510
 
511
     There is one special case here, for stores into the stack frame,
512
     we will or store1 into store2 before deciding which stores look
513
     at globally.  This is because stores to the stack frame that have
514
     no other reads before the end of the function can also be
515
     deleted.  */
516
  bitmap store1_n, store1_p, store2_n, store2_p;
517
 
518
  /* These bitmaps keep track of offsets in this group escape this function.
519
     An offset escapes if it corresponds to a named variable whose
520
     addressable flag is set.  */
521
  bitmap escaped_n, escaped_p;
522
 
523
  /* The positions in this bitmap have the same assignments as the in,
524
     out, gen and kill bitmaps.  This bitmap is all zeros except for
525
     the positions that are occupied by stores for this group.  */
526
  bitmap group_kill;
527
 
528
  /* The offset_map is used to map the offsets from this base into
529
     positions in the global bitmaps.  It is only created after all of
530
     the all of stores have been scanned and we know which ones we
531
     care about.  */
532
  int *offset_map_n, *offset_map_p;
533
  int offset_map_size_n, offset_map_size_p;
534
};
535
typedef struct group_info *group_info_t;
536
typedef const struct group_info *const_group_info_t;
537
static alloc_pool rtx_group_info_pool;
538
 
539
/* Tables of group_info structures, hashed by base value.  */
540
static htab_t rtx_group_table;
541
 
542
/* Index into the rtx_group_vec.  */
543
static int rtx_group_next_id;
544
 
545
DEF_VEC_P(group_info_t);
546
DEF_VEC_ALLOC_P(group_info_t,heap);
547
 
548
static VEC(group_info_t,heap) *rtx_group_vec;
549
 
550
 
551
/* This structure holds the set of changes that are being deferred
552
   when removing read operation.  See replace_read.  */
553
struct deferred_change
554
{
555
 
556
  /* The mem that is being replaced.  */
557
  rtx *loc;
558
 
559
  /* The reg it is being replaced with.  */
560
  rtx reg;
561
 
562
  struct deferred_change *next;
563
};
564
 
565
typedef struct deferred_change *deferred_change_t;
566
static alloc_pool deferred_change_pool;
567
 
568
static deferred_change_t deferred_change_list = NULL;
569
 
570
/* This are used to hold the alias sets of spill variables.  Since
571
   these are never aliased and there may be a lot of them, it makes
572
   sense to treat them specially.  This bitvector is only allocated in
573
   calls from dse_record_singleton_alias_set which currently is only
574
   made during reload1.  So when dse is called before reload this
575
   mechanism does nothing.  */
576
 
577
static bitmap clear_alias_sets = NULL;
578
 
579
/* The set of clear_alias_sets that have been disqualified because
580
   there are loads or stores using a different mode than the alias set
581
   was registered with.  */
582
static bitmap disqualified_clear_alias_sets = NULL;
583
 
584
/* The group that holds all of the clear_alias_sets.  */
585
static group_info_t clear_alias_group;
586
 
587
/* The modes of the clear_alias_sets.  */
588
static htab_t clear_alias_mode_table;
589
 
590
/* Hash table element to look up the mode for an alias set.  */
591
struct clear_alias_mode_holder
592
{
593
  alias_set_type alias_set;
594
  enum machine_mode mode;
595
};
596
 
597
static alloc_pool clear_alias_mode_pool;
598
 
599
/* This is true except if cfun->stdarg -- i.e. we cannot do
600
   this for vararg functions because they play games with the frame.  */
601
static bool stores_off_frame_dead_at_return;
602
 
603
/* Counter for stats.  */
604
static int globally_deleted;
605
static int locally_deleted;
606
static int spill_deleted;
607
 
608
static bitmap all_blocks;
609
 
610
/* Locations that are killed by calls in the global phase.  */
611
static bitmap kill_on_calls;
612
 
613
/* The number of bits used in the global bitmaps.  */
614
static unsigned int current_position;
615
 
616
 
617
static bool gate_dse (void);
618
static bool gate_dse1 (void);
619
static bool gate_dse2 (void);
620
 
621
 
622
/*----------------------------------------------------------------------------
623
   Zeroth step.
624
 
625
   Initialization.
626
----------------------------------------------------------------------------*/
627
 
628
/* Hashtable callbacks for maintaining the "bases" field of
629
   store_group_info, given that the addresses are function invariants.  */
630
 
631
static int
632
clear_alias_mode_eq (const void *p1, const void *p2)
633
{
634
  const struct clear_alias_mode_holder * h1
635
    = (const struct clear_alias_mode_holder *) p1;
636
  const struct clear_alias_mode_holder * h2
637
    = (const struct clear_alias_mode_holder *) p2;
638
  return h1->alias_set == h2->alias_set;
639
}
640
 
641
 
642
static hashval_t
643
clear_alias_mode_hash (const void *p)
644
{
645
  const struct clear_alias_mode_holder *holder
646
    = (const struct clear_alias_mode_holder *) p;
647
  return holder->alias_set;
648
}
649
 
650
 
651
/* Find the entry associated with ALIAS_SET.  */
652
 
653
static struct clear_alias_mode_holder *
654
clear_alias_set_lookup (alias_set_type alias_set)
655
{
656
  struct clear_alias_mode_holder tmp_holder;
657
  void **slot;
658
 
659
  tmp_holder.alias_set = alias_set;
660
  slot = htab_find_slot (clear_alias_mode_table, &tmp_holder, NO_INSERT);
661
  gcc_assert (*slot);
662
 
663
  return (struct clear_alias_mode_holder *) *slot;
664
}
665
 
666
 
667
/* Hashtable callbacks for maintaining the "bases" field of
668
   store_group_info, given that the addresses are function invariants.  */
669
 
670
static int
671
invariant_group_base_eq (const void *p1, const void *p2)
672
{
673
  const_group_info_t gi1 = (const_group_info_t) p1;
674
  const_group_info_t gi2 = (const_group_info_t) p2;
675
  return rtx_equal_p (gi1->rtx_base, gi2->rtx_base);
676
}
677
 
678
 
679
static hashval_t
680
invariant_group_base_hash (const void *p)
681
{
682
  const_group_info_t gi = (const_group_info_t) p;
683
  int do_not_record;
684
  return hash_rtx (gi->rtx_base, Pmode, &do_not_record, NULL, false);
685
}
686
 
687
 
688
/* Get the GROUP for BASE.  Add a new group if it is not there.  */
689
 
690
static group_info_t
691
get_group_info (rtx base)
692
{
693
  struct group_info tmp_gi;
694
  group_info_t gi;
695
  void **slot;
696
 
697
  if (base)
698
    {
699
      /* Find the store_base_info structure for BASE, creating a new one
700
         if necessary.  */
701
      tmp_gi.rtx_base = base;
702
      slot = htab_find_slot (rtx_group_table, &tmp_gi, INSERT);
703
      gi = (group_info_t) *slot;
704
    }
705
  else
706
    {
707
      if (!clear_alias_group)
708
        {
709
          clear_alias_group = gi =
710
            (group_info_t) pool_alloc (rtx_group_info_pool);
711
          memset (gi, 0, sizeof (struct group_info));
712
          gi->id = rtx_group_next_id++;
713
          gi->store1_n = BITMAP_ALLOC (NULL);
714
          gi->store1_p = BITMAP_ALLOC (NULL);
715
          gi->store2_n = BITMAP_ALLOC (NULL);
716
          gi->store2_p = BITMAP_ALLOC (NULL);
717
          gi->escaped_p = BITMAP_ALLOC (NULL);
718
          gi->escaped_n = BITMAP_ALLOC (NULL);
719
          gi->group_kill = BITMAP_ALLOC (NULL);
720
          gi->process_globally = false;
721
          gi->offset_map_size_n = 0;
722
          gi->offset_map_size_p = 0;
723
          gi->offset_map_n = NULL;
724
          gi->offset_map_p = NULL;
725
          VEC_safe_push (group_info_t, heap, rtx_group_vec, gi);
726
        }
727
      return clear_alias_group;
728
    }
729
 
730
  if (gi == NULL)
731
    {
732
      *slot = gi = (group_info_t) pool_alloc (rtx_group_info_pool);
733
      gi->rtx_base = base;
734
      gi->id = rtx_group_next_id++;
735
      gi->base_mem = gen_rtx_MEM (BLKmode, base);
736
      gi->canon_base_addr = canon_rtx (base);
737
      gi->store1_n = BITMAP_ALLOC (NULL);
738
      gi->store1_p = BITMAP_ALLOC (NULL);
739
      gi->store2_n = BITMAP_ALLOC (NULL);
740
      gi->store2_p = BITMAP_ALLOC (NULL);
741
      gi->escaped_p = BITMAP_ALLOC (NULL);
742
      gi->escaped_n = BITMAP_ALLOC (NULL);
743
      gi->group_kill = BITMAP_ALLOC (NULL);
744
      gi->process_globally = false;
745
      gi->frame_related =
746
        (base == frame_pointer_rtx) || (base == hard_frame_pointer_rtx);
747
      gi->offset_map_size_n = 0;
748
      gi->offset_map_size_p = 0;
749
      gi->offset_map_n = NULL;
750
      gi->offset_map_p = NULL;
751
      VEC_safe_push (group_info_t, heap, rtx_group_vec, gi);
752
    }
753
 
754
  return gi;
755
}
756
 
757
 
758
/* Initialization of data structures.  */
759
 
760
static void
761
dse_step0 (void)
762
{
763
  locally_deleted = 0;
764
  globally_deleted = 0;
765
  spill_deleted = 0;
766
 
767
  scratch = BITMAP_ALLOC (NULL);
768
  kill_on_calls = BITMAP_ALLOC (NULL);
769
 
770
  rtx_store_info_pool
771
    = create_alloc_pool ("rtx_store_info_pool",
772
                         sizeof (struct store_info), 100);
773
  read_info_pool
774
    = create_alloc_pool ("read_info_pool",
775
                         sizeof (struct read_info), 100);
776
  insn_info_pool
777
    = create_alloc_pool ("insn_info_pool",
778
                         sizeof (struct insn_info), 100);
779
  bb_info_pool
780
    = create_alloc_pool ("bb_info_pool",
781
                         sizeof (struct bb_info), 100);
782
  rtx_group_info_pool
783
    = create_alloc_pool ("rtx_group_info_pool",
784
                         sizeof (struct group_info), 100);
785
  deferred_change_pool
786
    = create_alloc_pool ("deferred_change_pool",
787
                         sizeof (struct deferred_change), 10);
788
 
789
  rtx_group_table = htab_create (11, invariant_group_base_hash,
790
                                 invariant_group_base_eq, NULL);
791
 
792
  bb_table = XCNEWVEC (bb_info_t, last_basic_block);
793
  rtx_group_next_id = 0;
794
 
795
  stores_off_frame_dead_at_return = !cfun->stdarg;
796
 
797
  init_alias_analysis ();
798
 
799
  if (clear_alias_sets)
800
    clear_alias_group = get_group_info (NULL);
801
  else
802
    clear_alias_group = NULL;
803
}
804
 
805
 
806
 
807
/*----------------------------------------------------------------------------
808
   First step.
809
 
810
   Scan all of the insns.  Any random ordering of the blocks is fine.
811
   Each block is scanned in forward order to accommodate cselib which
812
   is used to remove stores with non-constant bases.
813
----------------------------------------------------------------------------*/
814
 
815
/* Delete all of the store_info recs from INSN_INFO.  */
816
 
817
static void
818
free_store_info (insn_info_t insn_info)
819
{
820
  store_info_t store_info = insn_info->store_rec;
821
  while (store_info)
822
    {
823
      store_info_t next = store_info->next;
824
      if (store_info->is_large)
825
        BITMAP_FREE (store_info->positions_needed.large.bmap);
826
      if (store_info->cse_base)
827
        pool_free (cse_store_info_pool, store_info);
828
      else
829
        pool_free (rtx_store_info_pool, store_info);
830
      store_info = next;
831
    }
832
 
833
  insn_info->cannot_delete = true;
834
  insn_info->contains_cselib_groups = false;
835
  insn_info->store_rec = NULL;
836
}
837
 
838
typedef struct
839
{
840
  rtx first, current;
841
  regset fixed_regs_live;
842
  bool failure;
843
} note_add_store_info;
844
 
845
/* Callback for emit_inc_dec_insn_before via note_stores.
846
   Check if a register is clobbered which is live afterwards.  */
847
 
848
static void
849
note_add_store (rtx loc, const_rtx expr ATTRIBUTE_UNUSED, void *data)
850
{
851
  rtx insn;
852
  note_add_store_info *info = (note_add_store_info *) data;
853
  int r, n;
854
 
855
  if (!REG_P (loc))
856
    return;
857
 
858
  /* If this register is referenced by the current or an earlier insn,
859
     that's OK.  E.g. this applies to the register that is being incremented
860
     with this addition.  */
861
  for (insn = info->first;
862
       insn != NEXT_INSN (info->current);
863
       insn = NEXT_INSN (insn))
864
    if (reg_referenced_p (loc, PATTERN (insn)))
865
      return;
866
 
867
  /* If we come here, we have a clobber of a register that's only OK
868
     if that register is not live.  If we don't have liveness information
869
     available, fail now.  */
870
  if (!info->fixed_regs_live)
871
    {
872
      info->failure =  true;
873
      return;
874
    }
875
  /* Now check if this is a live fixed register.  */
876
  r = REGNO (loc);
877
  n = hard_regno_nregs[r][GET_MODE (loc)];
878
  while (--n >=  0)
879
    if (REGNO_REG_SET_P (info->fixed_regs_live, r+n))
880
      info->failure =  true;
881
}
882
 
883
/* Callback for for_each_inc_dec that emits an INSN that sets DEST to
884
   SRC + SRCOFF before insn ARG.  */
885
 
886
static int
887
emit_inc_dec_insn_before (rtx mem ATTRIBUTE_UNUSED,
888
                          rtx op ATTRIBUTE_UNUSED,
889
                          rtx dest, rtx src, rtx srcoff, void *arg)
890
{
891
  insn_info_t insn_info = (insn_info_t) arg;
892
  rtx insn = insn_info->insn, new_insn, cur;
893
  note_add_store_info info;
894
 
895
  /* We can reuse all operands without copying, because we are about
896
     to delete the insn that contained it.  */
897
  if (srcoff)
898
    {
899
      start_sequence ();
900
      emit_insn (gen_add3_insn (dest, src, srcoff));
901
      new_insn = get_insns ();
902
      end_sequence ();
903
    }
904
  else
905
    new_insn = gen_move_insn (dest, src);
906
  info.first = new_insn;
907
  info.fixed_regs_live = insn_info->fixed_regs_live;
908
  info.failure = false;
909
  for (cur = new_insn; cur; cur = NEXT_INSN (cur))
910
    {
911
      info.current = cur;
912
      note_stores (PATTERN (cur), note_add_store, &info);
913
    }
914
 
915
  /* If a failure was flagged above, return 1 so that for_each_inc_dec will
916
     return it immediately, communicating the failure to its caller.  */
917
  if (info.failure)
918
    return 1;
919
 
920
  emit_insn_before (new_insn, insn);
921
 
922
  return -1;
923
}
924
 
925
/* Before we delete INSN_INFO->INSN, make sure that the auto inc/dec, if it
926
   is there, is split into a separate insn.
927
   Return true on success (or if there was nothing to do), false on failure.  */
928
 
929
static bool
930
check_for_inc_dec_1 (insn_info_t insn_info)
931
{
932
  rtx insn = insn_info->insn;
933
  rtx note = find_reg_note (insn, REG_INC, NULL_RTX);
934
  if (note)
935
    return for_each_inc_dec (&insn, emit_inc_dec_insn_before, insn_info) == 0;
936
  return true;
937
}
938
 
939
 
940
/* Entry point for postreload.  If you work on reload_cse, or you need this
941
   anywhere else, consider if you can provide register liveness information
942
   and add a parameter to this function so that it can be passed down in
943
   insn_info.fixed_regs_live.  */
944
bool
945
check_for_inc_dec (rtx insn)
946
{
947
  struct insn_info insn_info;
948
  rtx note;
949
 
950
  insn_info.insn = insn;
951
  insn_info.fixed_regs_live = NULL;
952
  note = find_reg_note (insn, REG_INC, NULL_RTX);
953
  if (note)
954
    return for_each_inc_dec (&insn, emit_inc_dec_insn_before, &insn_info) == 0;
955
  return true;
956
}
957
 
958
/* Delete the insn and free all of the fields inside INSN_INFO.  */
959
 
960
static void
961
delete_dead_store_insn (insn_info_t insn_info)
962
{
963
  read_info_t read_info;
964
 
965
  if (!dbg_cnt (dse))
966
    return;
967
 
968
  if (!check_for_inc_dec_1 (insn_info))
969
    return;
970
  if (dump_file)
971
    {
972
      fprintf (dump_file, "Locally deleting insn %d ",
973
               INSN_UID (insn_info->insn));
974
      if (insn_info->store_rec->alias_set)
975
        fprintf (dump_file, "alias set %d\n",
976
                 (int) insn_info->store_rec->alias_set);
977
      else
978
        fprintf (dump_file, "\n");
979
    }
980
 
981
  free_store_info (insn_info);
982
  read_info = insn_info->read_rec;
983
 
984
  while (read_info)
985
    {
986
      read_info_t next = read_info->next;
987
      pool_free (read_info_pool, read_info);
988
      read_info = next;
989
    }
990
  insn_info->read_rec = NULL;
991
 
992
  delete_insn (insn_info->insn);
993
  locally_deleted++;
994
  insn_info->insn = NULL;
995
 
996
  insn_info->wild_read = false;
997
}
998
 
999
/* Check if EXPR can possibly escape the current function scope.  */
1000
static bool
1001
can_escape (tree expr)
1002
{
1003
  tree base;
1004
  if (!expr)
1005
    return true;
1006
  base = get_base_address (expr);
1007
  if (DECL_P (base)
1008
      && !may_be_aliased (base))
1009
    return false;
1010
  return true;
1011
}
1012
 
1013
/* Set the store* bitmaps offset_map_size* fields in GROUP based on
1014
   OFFSET and WIDTH.  */
1015
 
1016
static void
1017
set_usage_bits (group_info_t group, HOST_WIDE_INT offset, HOST_WIDE_INT width,
1018
                tree expr)
1019
{
1020
  HOST_WIDE_INT i;
1021
  bool expr_escapes = can_escape (expr);
1022
  if (offset > -MAX_OFFSET && offset + width < MAX_OFFSET)
1023
    for (i=offset; i<offset+width; i++)
1024
      {
1025
        bitmap store1;
1026
        bitmap store2;
1027
        bitmap escaped;
1028
        int ai;
1029
        if (i < 0)
1030
          {
1031
            store1 = group->store1_n;
1032
            store2 = group->store2_n;
1033
            escaped = group->escaped_n;
1034
            ai = -i;
1035
          }
1036
        else
1037
          {
1038
            store1 = group->store1_p;
1039
            store2 = group->store2_p;
1040
            escaped = group->escaped_p;
1041
            ai = i;
1042
          }
1043
 
1044
        if (!bitmap_set_bit (store1, ai))
1045
          bitmap_set_bit (store2, ai);
1046
        else
1047
          {
1048
            if (i < 0)
1049
              {
1050
                if (group->offset_map_size_n < ai)
1051
                  group->offset_map_size_n = ai;
1052
              }
1053
            else
1054
              {
1055
                if (group->offset_map_size_p < ai)
1056
                  group->offset_map_size_p = ai;
1057
              }
1058
          }
1059
        if (expr_escapes)
1060
          bitmap_set_bit (escaped, ai);
1061
      }
1062
}
1063
 
1064
static void
1065
reset_active_stores (void)
1066
{
1067
  active_local_stores = NULL;
1068
  active_local_stores_len = 0;
1069
}
1070
 
1071
/* Free all READ_REC of the LAST_INSN of BB_INFO.  */
1072
 
1073
static void
1074
free_read_records (bb_info_t bb_info)
1075
{
1076
  insn_info_t insn_info = bb_info->last_insn;
1077
  read_info_t *ptr = &insn_info->read_rec;
1078
  while (*ptr)
1079
    {
1080
      read_info_t next = (*ptr)->next;
1081
      if ((*ptr)->alias_set == 0)
1082
        {
1083
          pool_free (read_info_pool, *ptr);
1084
          *ptr = next;
1085
        }
1086
      else
1087
        ptr = &(*ptr)->next;
1088
    }
1089
}
1090
 
1091
/* Set the BB_INFO so that the last insn is marked as a wild read.  */
1092
 
1093
static void
1094
add_wild_read (bb_info_t bb_info)
1095
{
1096
  insn_info_t insn_info = bb_info->last_insn;
1097
  insn_info->wild_read = true;
1098
  free_read_records (bb_info);
1099
  reset_active_stores ();
1100
}
1101
 
1102
/* Set the BB_INFO so that the last insn is marked as a wild read of
1103
   non-frame locations.  */
1104
 
1105
static void
1106
add_non_frame_wild_read (bb_info_t bb_info)
1107
{
1108
  insn_info_t insn_info = bb_info->last_insn;
1109
  insn_info->non_frame_wild_read = true;
1110
  free_read_records (bb_info);
1111
  reset_active_stores ();
1112
}
1113
 
1114
/* Return true if X is a constant or one of the registers that behave
1115
   as a constant over the life of a function.  This is equivalent to
1116
   !rtx_varies_p for memory addresses.  */
1117
 
1118
static bool
1119
const_or_frame_p (rtx x)
1120
{
1121
  switch (GET_CODE (x))
1122
    {
1123
    case CONST:
1124
    case CONST_INT:
1125
    case CONST_DOUBLE:
1126
    case CONST_VECTOR:
1127
    case SYMBOL_REF:
1128
    case LABEL_REF:
1129
      return true;
1130
 
1131
    case REG:
1132
      /* Note that we have to test for the actual rtx used for the frame
1133
         and arg pointers and not just the register number in case we have
1134
         eliminated the frame and/or arg pointer and are using it
1135
         for pseudos.  */
1136
      if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
1137
          /* The arg pointer varies if it is not a fixed register.  */
1138
          || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
1139
          || x == pic_offset_table_rtx)
1140
        return true;
1141
      return false;
1142
 
1143
    default:
1144
      return false;
1145
    }
1146
}
1147
 
1148
/* Take all reasonable action to put the address of MEM into the form
1149
   that we can do analysis on.
1150
 
1151
   The gold standard is to get the address into the form: address +
1152
   OFFSET where address is something that rtx_varies_p considers a
1153
   constant.  When we can get the address in this form, we can do
1154
   global analysis on it.  Note that for constant bases, address is
1155
   not actually returned, only the group_id.  The address can be
1156
   obtained from that.
1157
 
1158
   If that fails, we try cselib to get a value we can at least use
1159
   locally.  If that fails we return false.
1160
 
1161
   The GROUP_ID is set to -1 for cselib bases and the index of the
1162
   group for non_varying bases.
1163
 
1164
   FOR_READ is true if this is a mem read and false if not.  */
1165
 
1166
static bool
1167
canon_address (rtx mem,
1168
               alias_set_type *alias_set_out,
1169
               int *group_id,
1170
               HOST_WIDE_INT *offset,
1171
               cselib_val **base)
1172
{
1173
  enum machine_mode address_mode
1174
    = targetm.addr_space.address_mode (MEM_ADDR_SPACE (mem));
1175
  rtx mem_address = XEXP (mem, 0);
1176
  rtx expanded_address, address;
1177
  int expanded;
1178
 
1179
  /* Make sure that cselib is has initialized all of the operands of
1180
     the address before asking it to do the subst.  */
1181
 
1182
  if (clear_alias_sets)
1183
    {
1184
      /* If this is a spill, do not do any further processing.  */
1185
      alias_set_type alias_set = MEM_ALIAS_SET (mem);
1186
      if (dump_file)
1187
        fprintf (dump_file, "found alias set %d\n", (int) alias_set);
1188
      if (bitmap_bit_p (clear_alias_sets, alias_set))
1189
        {
1190
          struct clear_alias_mode_holder *entry
1191
            = clear_alias_set_lookup (alias_set);
1192
 
1193
          /* If the modes do not match, we cannot process this set.  */
1194
          if (entry->mode != GET_MODE (mem))
1195
            {
1196
              if (dump_file)
1197
                fprintf (dump_file,
1198
                         "disqualifying alias set %d, (%s) != (%s)\n",
1199
                         (int) alias_set, GET_MODE_NAME (entry->mode),
1200
                         GET_MODE_NAME (GET_MODE (mem)));
1201
 
1202
              bitmap_set_bit (disqualified_clear_alias_sets, alias_set);
1203
              return false;
1204
            }
1205
 
1206
          *alias_set_out = alias_set;
1207
          *group_id = clear_alias_group->id;
1208
          return true;
1209
        }
1210
    }
1211
 
1212
  *alias_set_out = 0;
1213
 
1214
  cselib_lookup (mem_address, address_mode, 1, GET_MODE (mem));
1215
 
1216
  if (dump_file)
1217
    {
1218
      fprintf (dump_file, "  mem: ");
1219
      print_inline_rtx (dump_file, mem_address, 0);
1220
      fprintf (dump_file, "\n");
1221
    }
1222
 
1223
  /* First see if just canon_rtx (mem_address) is const or frame,
1224
     if not, try cselib_expand_value_rtx and call canon_rtx on that.  */
1225
  address = NULL_RTX;
1226
  for (expanded = 0; expanded < 2; expanded++)
1227
    {
1228
      if (expanded)
1229
        {
1230
          /* Use cselib to replace all of the reg references with the full
1231
             expression.  This will take care of the case where we have
1232
 
1233
             r_x = base + offset;
1234
             val = *r_x;
1235
 
1236
             by making it into
1237
 
1238
             val = *(base + offset);  */
1239
 
1240
          expanded_address = cselib_expand_value_rtx (mem_address,
1241
                                                      scratch, 5);
1242
 
1243
          /* If this fails, just go with the address from first
1244
             iteration.  */
1245
          if (!expanded_address)
1246
            break;
1247
        }
1248
      else
1249
        expanded_address = mem_address;
1250
 
1251
      /* Split the address into canonical BASE + OFFSET terms.  */
1252
      address = canon_rtx (expanded_address);
1253
 
1254
      *offset = 0;
1255
 
1256
      if (dump_file)
1257
        {
1258
          if (expanded)
1259
            {
1260
              fprintf (dump_file, "\n   after cselib_expand address: ");
1261
              print_inline_rtx (dump_file, expanded_address, 0);
1262
              fprintf (dump_file, "\n");
1263
            }
1264
 
1265
          fprintf (dump_file, "\n   after canon_rtx address: ");
1266
          print_inline_rtx (dump_file, address, 0);
1267
          fprintf (dump_file, "\n");
1268
        }
1269
 
1270
      if (GET_CODE (address) == CONST)
1271
        address = XEXP (address, 0);
1272
 
1273
      if (GET_CODE (address) == PLUS
1274
          && CONST_INT_P (XEXP (address, 1)))
1275
        {
1276
          *offset = INTVAL (XEXP (address, 1));
1277
          address = XEXP (address, 0);
1278
        }
1279
 
1280
      if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (mem))
1281
          && const_or_frame_p (address))
1282
        {
1283
          group_info_t group = get_group_info (address);
1284
 
1285
          if (dump_file)
1286
            fprintf (dump_file, "  gid=%d offset=%d \n",
1287
                     group->id, (int)*offset);
1288
          *base = NULL;
1289
          *group_id = group->id;
1290
          return true;
1291
        }
1292
    }
1293
 
1294
  *base = cselib_lookup (address, address_mode, true, GET_MODE (mem));
1295
  *group_id = -1;
1296
 
1297
  if (*base == NULL)
1298
    {
1299
      if (dump_file)
1300
        fprintf (dump_file, " no cselib val - should be a wild read.\n");
1301
      return false;
1302
    }
1303
  if (dump_file)
1304
    fprintf (dump_file, "  varying cselib base=%u:%u offset = %d\n",
1305
             (*base)->uid, (*base)->hash, (int)*offset);
1306
  return true;
1307
}
1308
 
1309
 
1310
/* Clear the rhs field from the active_local_stores array.  */
1311
 
1312
static void
1313
clear_rhs_from_active_local_stores (void)
1314
{
1315
  insn_info_t ptr = active_local_stores;
1316
 
1317
  while (ptr)
1318
    {
1319
      store_info_t store_info = ptr->store_rec;
1320
      /* Skip the clobbers.  */
1321
      while (!store_info->is_set)
1322
        store_info = store_info->next;
1323
 
1324
      store_info->rhs = NULL;
1325
      store_info->const_rhs = NULL;
1326
 
1327
      ptr = ptr->next_local_store;
1328
    }
1329
}
1330
 
1331
 
1332
/* Mark byte POS bytes from the beginning of store S_INFO as unneeded.  */
1333
 
1334
static inline void
1335
set_position_unneeded (store_info_t s_info, int pos)
1336
{
1337
  if (__builtin_expect (s_info->is_large, false))
1338
    {
1339
      if (bitmap_set_bit (s_info->positions_needed.large.bmap, pos))
1340
        s_info->positions_needed.large.count++;
1341
    }
1342
  else
1343
    s_info->positions_needed.small_bitmask
1344
      &= ~(((unsigned HOST_WIDE_INT) 1) << pos);
1345
}
1346
 
1347
/* Mark the whole store S_INFO as unneeded.  */
1348
 
1349
static inline void
1350
set_all_positions_unneeded (store_info_t s_info)
1351
{
1352
  if (__builtin_expect (s_info->is_large, false))
1353
    {
1354
      int pos, end = s_info->end - s_info->begin;
1355
      for (pos = 0; pos < end; pos++)
1356
        bitmap_set_bit (s_info->positions_needed.large.bmap, pos);
1357
      s_info->positions_needed.large.count = end;
1358
    }
1359
  else
1360
    s_info->positions_needed.small_bitmask = (unsigned HOST_WIDE_INT) 0;
1361
}
1362
 
1363
/* Return TRUE if any bytes from S_INFO store are needed.  */
1364
 
1365
static inline bool
1366
any_positions_needed_p (store_info_t s_info)
1367
{
1368
  if (__builtin_expect (s_info->is_large, false))
1369
    return (s_info->positions_needed.large.count
1370
            < s_info->end - s_info->begin);
1371
  else
1372
    return (s_info->positions_needed.small_bitmask
1373
            != (unsigned HOST_WIDE_INT) 0);
1374
}
1375
 
1376
/* Return TRUE if all bytes START through START+WIDTH-1 from S_INFO
1377
   store are needed.  */
1378
 
1379
static inline bool
1380
all_positions_needed_p (store_info_t s_info, int start, int width)
1381
{
1382
  if (__builtin_expect (s_info->is_large, false))
1383
    {
1384
      int end = start + width;
1385
      while (start < end)
1386
        if (bitmap_bit_p (s_info->positions_needed.large.bmap, start++))
1387
          return false;
1388
      return true;
1389
    }
1390
  else
1391
    {
1392
      unsigned HOST_WIDE_INT mask = lowpart_bitmask (width) << start;
1393
      return (s_info->positions_needed.small_bitmask & mask) == mask;
1394
    }
1395
}
1396
 
1397
 
1398
static rtx get_stored_val (store_info_t, enum machine_mode, HOST_WIDE_INT,
1399
                           HOST_WIDE_INT, basic_block, bool);
1400
 
1401
 
1402
/* BODY is an instruction pattern that belongs to INSN.  Return 1 if
1403
   there is a candidate store, after adding it to the appropriate
1404
   local store group if so.  */
1405
 
1406
static int
1407
record_store (rtx body, bb_info_t bb_info)
1408
{
1409
  rtx mem, rhs, const_rhs, mem_addr;
1410
  HOST_WIDE_INT offset = 0;
1411
  HOST_WIDE_INT width = 0;
1412
  alias_set_type spill_alias_set;
1413
  insn_info_t insn_info = bb_info->last_insn;
1414
  store_info_t store_info = NULL;
1415
  int group_id;
1416
  cselib_val *base = NULL;
1417
  insn_info_t ptr, last, redundant_reason;
1418
  bool store_is_unused;
1419
 
1420
  if (GET_CODE (body) != SET && GET_CODE (body) != CLOBBER)
1421
    return 0;
1422
 
1423
  mem = SET_DEST (body);
1424
 
1425
  /* If this is not used, then this cannot be used to keep the insn
1426
     from being deleted.  On the other hand, it does provide something
1427
     that can be used to prove that another store is dead.  */
1428
  store_is_unused
1429
    = (find_reg_note (insn_info->insn, REG_UNUSED, mem) != NULL);
1430
 
1431
  /* Check whether that value is a suitable memory location.  */
1432
  if (!MEM_P (mem))
1433
    {
1434
      /* If the set or clobber is unused, then it does not effect our
1435
         ability to get rid of the entire insn.  */
1436
      if (!store_is_unused)
1437
        insn_info->cannot_delete = true;
1438
      return 0;
1439
    }
1440
 
1441
  /* At this point we know mem is a mem. */
1442
  if (GET_MODE (mem) == BLKmode)
1443
    {
1444
      if (GET_CODE (XEXP (mem, 0)) == SCRATCH)
1445
        {
1446
          if (dump_file)
1447
            fprintf (dump_file, " adding wild read for (clobber (mem:BLK (scratch))\n");
1448
          add_wild_read (bb_info);
1449
          insn_info->cannot_delete = true;
1450
          return 0;
1451
        }
1452
      /* Handle (set (mem:BLK (addr) [... S36 ...]) (const_int 0))
1453
         as memset (addr, 0, 36);  */
1454
      else if (!MEM_SIZE_KNOWN_P (mem)
1455
               || MEM_SIZE (mem) <= 0
1456
               || MEM_SIZE (mem) > MAX_OFFSET
1457
               || GET_CODE (body) != SET
1458
               || !CONST_INT_P (SET_SRC (body)))
1459
        {
1460
          if (!store_is_unused)
1461
            {
1462
              /* If the set or clobber is unused, then it does not effect our
1463
                 ability to get rid of the entire insn.  */
1464
              insn_info->cannot_delete = true;
1465
              clear_rhs_from_active_local_stores ();
1466
            }
1467
          return 0;
1468
        }
1469
    }
1470
 
1471
  /* We can still process a volatile mem, we just cannot delete it.  */
1472
  if (MEM_VOLATILE_P (mem))
1473
    insn_info->cannot_delete = true;
1474
 
1475
  if (!canon_address (mem, &spill_alias_set, &group_id, &offset, &base))
1476
    {
1477
      clear_rhs_from_active_local_stores ();
1478
      return 0;
1479
    }
1480
 
1481
  if (GET_MODE (mem) == BLKmode)
1482
    width = MEM_SIZE (mem);
1483
  else
1484
    {
1485
      width = GET_MODE_SIZE (GET_MODE (mem));
1486
      gcc_assert ((unsigned) width <= HOST_BITS_PER_WIDE_INT);
1487
    }
1488
 
1489
  if (spill_alias_set)
1490
    {
1491
      bitmap store1 = clear_alias_group->store1_p;
1492
      bitmap store2 = clear_alias_group->store2_p;
1493
 
1494
      gcc_assert (GET_MODE (mem) != BLKmode);
1495
 
1496
      if (!bitmap_set_bit (store1, spill_alias_set))
1497
        bitmap_set_bit (store2, spill_alias_set);
1498
 
1499
      if (clear_alias_group->offset_map_size_p < spill_alias_set)
1500
        clear_alias_group->offset_map_size_p = spill_alias_set;
1501
 
1502
      store_info = (store_info_t) pool_alloc (rtx_store_info_pool);
1503
 
1504
      if (dump_file)
1505
        fprintf (dump_file, " processing spill store %d(%s)\n",
1506
                 (int) spill_alias_set, GET_MODE_NAME (GET_MODE (mem)));
1507
    }
1508
  else if (group_id >= 0)
1509
    {
1510
      /* In the restrictive case where the base is a constant or the
1511
         frame pointer we can do global analysis.  */
1512
 
1513
      group_info_t group
1514
        = VEC_index (group_info_t, rtx_group_vec, group_id);
1515
      tree expr = MEM_EXPR (mem);
1516
 
1517
      store_info = (store_info_t) pool_alloc (rtx_store_info_pool);
1518
      set_usage_bits (group, offset, width, expr);
1519
 
1520
      if (dump_file)
1521
        fprintf (dump_file, " processing const base store gid=%d[%d..%d)\n",
1522
                 group_id, (int)offset, (int)(offset+width));
1523
    }
1524
  else
1525
    {
1526
      rtx base_term = find_base_term (XEXP (mem, 0));
1527
      if (!base_term
1528
          || (GET_CODE (base_term) == ADDRESS
1529
              && GET_MODE (base_term) == Pmode
1530
              && XEXP (base_term, 0) == stack_pointer_rtx))
1531
        insn_info->stack_pointer_based = true;
1532
      insn_info->contains_cselib_groups = true;
1533
 
1534
      store_info = (store_info_t) pool_alloc (cse_store_info_pool);
1535
      group_id = -1;
1536
 
1537
      if (dump_file)
1538
        fprintf (dump_file, " processing cselib store [%d..%d)\n",
1539
                 (int)offset, (int)(offset+width));
1540
    }
1541
 
1542
  const_rhs = rhs = NULL_RTX;
1543
  if (GET_CODE (body) == SET
1544
      /* No place to keep the value after ra.  */
1545
      && !reload_completed
1546
      && (REG_P (SET_SRC (body))
1547
          || GET_CODE (SET_SRC (body)) == SUBREG
1548
          || CONSTANT_P (SET_SRC (body)))
1549
      && !MEM_VOLATILE_P (mem)
1550
      /* Sometimes the store and reload is used for truncation and
1551
         rounding.  */
1552
      && !(FLOAT_MODE_P (GET_MODE (mem)) && (flag_float_store)))
1553
    {
1554
      rhs = SET_SRC (body);
1555
      if (CONSTANT_P (rhs))
1556
        const_rhs = rhs;
1557
      else if (body == PATTERN (insn_info->insn))
1558
        {
1559
          rtx tem = find_reg_note (insn_info->insn, REG_EQUAL, NULL_RTX);
1560
          if (tem && CONSTANT_P (XEXP (tem, 0)))
1561
            const_rhs = XEXP (tem, 0);
1562
        }
1563
      if (const_rhs == NULL_RTX && REG_P (rhs))
1564
        {
1565
          rtx tem = cselib_expand_value_rtx (rhs, scratch, 5);
1566
 
1567
          if (tem && CONSTANT_P (tem))
1568
            const_rhs = tem;
1569
        }
1570
    }
1571
 
1572
  /* Check to see if this stores causes some other stores to be
1573
     dead.  */
1574
  ptr = active_local_stores;
1575
  last = NULL;
1576
  redundant_reason = NULL;
1577
  mem = canon_rtx (mem);
1578
  /* For alias_set != 0 canon_true_dependence should be never called.  */
1579
  if (spill_alias_set)
1580
    mem_addr = NULL_RTX;
1581
  else
1582
    {
1583
      if (group_id < 0)
1584
        mem_addr = base->val_rtx;
1585
      else
1586
        {
1587
          group_info_t group
1588
            = VEC_index (group_info_t, rtx_group_vec, group_id);
1589
          mem_addr = group->canon_base_addr;
1590
        }
1591
      if (offset)
1592
        mem_addr = plus_constant (mem_addr, offset);
1593
    }
1594
 
1595
  while (ptr)
1596
    {
1597
      insn_info_t next = ptr->next_local_store;
1598
      store_info_t s_info = ptr->store_rec;
1599
      bool del = true;
1600
 
1601
      /* Skip the clobbers. We delete the active insn if this insn
1602
         shadows the set.  To have been put on the active list, it
1603
         has exactly on set. */
1604
      while (!s_info->is_set)
1605
        s_info = s_info->next;
1606
 
1607
      if (s_info->alias_set != spill_alias_set)
1608
        del = false;
1609
      else if (s_info->alias_set)
1610
        {
1611
          struct clear_alias_mode_holder *entry
1612
            = clear_alias_set_lookup (s_info->alias_set);
1613
          /* Generally, spills cannot be processed if and of the
1614
             references to the slot have a different mode.  But if
1615
             we are in the same block and mode is exactly the same
1616
             between this store and one before in the same block,
1617
             we can still delete it.  */
1618
          if ((GET_MODE (mem) == GET_MODE (s_info->mem))
1619
              && (GET_MODE (mem) == entry->mode))
1620
            {
1621
              del = true;
1622
              set_all_positions_unneeded (s_info);
1623
            }
1624
          if (dump_file)
1625
            fprintf (dump_file, "    trying spill store in insn=%d alias_set=%d\n",
1626
                     INSN_UID (ptr->insn), (int) s_info->alias_set);
1627
        }
1628
      else if ((s_info->group_id == group_id)
1629
               && (s_info->cse_base == base))
1630
        {
1631
          HOST_WIDE_INT i;
1632
          if (dump_file)
1633
            fprintf (dump_file, "    trying store in insn=%d gid=%d[%d..%d)\n",
1634
                     INSN_UID (ptr->insn), s_info->group_id,
1635
                     (int)s_info->begin, (int)s_info->end);
1636
 
1637
          /* Even if PTR won't be eliminated as unneeded, if both
1638
             PTR and this insn store the same constant value, we might
1639
             eliminate this insn instead.  */
1640
          if (s_info->const_rhs
1641
              && const_rhs
1642
              && offset >= s_info->begin
1643
              && offset + width <= s_info->end
1644
              && all_positions_needed_p (s_info, offset - s_info->begin,
1645
                                         width))
1646
            {
1647
              if (GET_MODE (mem) == BLKmode)
1648
                {
1649
                  if (GET_MODE (s_info->mem) == BLKmode
1650
                      && s_info->const_rhs == const_rhs)
1651
                    redundant_reason = ptr;
1652
                }
1653
              else if (s_info->const_rhs == const0_rtx
1654
                       && const_rhs == const0_rtx)
1655
                redundant_reason = ptr;
1656
              else
1657
                {
1658
                  rtx val;
1659
                  start_sequence ();
1660
                  val = get_stored_val (s_info, GET_MODE (mem),
1661
                                        offset, offset + width,
1662
                                        BLOCK_FOR_INSN (insn_info->insn),
1663
                                        true);
1664
                  if (get_insns () != NULL)
1665
                    val = NULL_RTX;
1666
                  end_sequence ();
1667
                  if (val && rtx_equal_p (val, const_rhs))
1668
                    redundant_reason = ptr;
1669
                }
1670
            }
1671
 
1672
          for (i = MAX (offset, s_info->begin);
1673
               i < offset + width && i < s_info->end;
1674
               i++)
1675
            set_position_unneeded (s_info, i - s_info->begin);
1676
        }
1677
      else if (s_info->rhs)
1678
        /* Need to see if it is possible for this store to overwrite
1679
           the value of store_info.  If it is, set the rhs to NULL to
1680
           keep it from being used to remove a load.  */
1681
        {
1682
          if (canon_true_dependence (s_info->mem,
1683
                                     GET_MODE (s_info->mem),
1684
                                     s_info->mem_addr,
1685
                                     mem, mem_addr))
1686
            {
1687
              s_info->rhs = NULL;
1688
              s_info->const_rhs = NULL;
1689
            }
1690
        }
1691
 
1692
      /* An insn can be deleted if every position of every one of
1693
         its s_infos is zero.  */
1694
      if (any_positions_needed_p (s_info))
1695
        del = false;
1696
 
1697
      if (del)
1698
        {
1699
          insn_info_t insn_to_delete = ptr;
1700
 
1701
          active_local_stores_len--;
1702
          if (last)
1703
            last->next_local_store = ptr->next_local_store;
1704
          else
1705
            active_local_stores = ptr->next_local_store;
1706
 
1707
          if (!insn_to_delete->cannot_delete)
1708
            delete_dead_store_insn (insn_to_delete);
1709
        }
1710
      else
1711
        last = ptr;
1712
 
1713
      ptr = next;
1714
    }
1715
 
1716
  /* Finish filling in the store_info.  */
1717
  store_info->next = insn_info->store_rec;
1718
  insn_info->store_rec = store_info;
1719
  store_info->mem = mem;
1720
  store_info->alias_set = spill_alias_set;
1721
  store_info->mem_addr = mem_addr;
1722
  store_info->cse_base = base;
1723
  if (width > HOST_BITS_PER_WIDE_INT)
1724
    {
1725
      store_info->is_large = true;
1726
      store_info->positions_needed.large.count = 0;
1727
      store_info->positions_needed.large.bmap = BITMAP_ALLOC (NULL);
1728
    }
1729
  else
1730
    {
1731
      store_info->is_large = false;
1732
      store_info->positions_needed.small_bitmask = lowpart_bitmask (width);
1733
    }
1734
  store_info->group_id = group_id;
1735
  store_info->begin = offset;
1736
  store_info->end = offset + width;
1737
  store_info->is_set = GET_CODE (body) == SET;
1738
  store_info->rhs = rhs;
1739
  store_info->const_rhs = const_rhs;
1740
  store_info->redundant_reason = redundant_reason;
1741
 
1742
  /* If this is a clobber, we return 0.  We will only be able to
1743
     delete this insn if there is only one store USED store, but we
1744
     can use the clobber to delete other stores earlier.  */
1745
  return store_info->is_set ? 1 : 0;
1746
}
1747
 
1748
 
1749
static void
1750
dump_insn_info (const char * start, insn_info_t insn_info)
1751
{
1752
  fprintf (dump_file, "%s insn=%d %s\n", start,
1753
           INSN_UID (insn_info->insn),
1754
           insn_info->store_rec ? "has store" : "naked");
1755
}
1756
 
1757
 
1758
/* If the modes are different and the value's source and target do not
1759
   line up, we need to extract the value from lower part of the rhs of
1760
   the store, shift it, and then put it into a form that can be shoved
1761
   into the read_insn.  This function generates a right SHIFT of a
1762
   value that is at least ACCESS_SIZE bytes wide of READ_MODE.  The
1763
   shift sequence is returned or NULL if we failed to find a
1764
   shift.  */
1765
 
1766
static rtx
1767
find_shift_sequence (int access_size,
1768
                     store_info_t store_info,
1769
                     enum machine_mode read_mode,
1770
                     int shift, bool speed, bool require_cst)
1771
{
1772
  enum machine_mode store_mode = GET_MODE (store_info->mem);
1773
  enum machine_mode new_mode;
1774
  rtx read_reg = NULL;
1775
 
1776
  /* Some machines like the x86 have shift insns for each size of
1777
     operand.  Other machines like the ppc or the ia-64 may only have
1778
     shift insns that shift values within 32 or 64 bit registers.
1779
     This loop tries to find the smallest shift insn that will right
1780
     justify the value we want to read but is available in one insn on
1781
     the machine.  */
1782
 
1783
  for (new_mode = smallest_mode_for_size (access_size * BITS_PER_UNIT,
1784
                                          MODE_INT);
1785
       GET_MODE_BITSIZE (new_mode) <= BITS_PER_WORD;
1786
       new_mode = GET_MODE_WIDER_MODE (new_mode))
1787
    {
1788
      rtx target, new_reg, shift_seq, insn, new_lhs;
1789
      int cost;
1790
 
1791
      /* If a constant was stored into memory, try to simplify it here,
1792
         otherwise the cost of the shift might preclude this optimization
1793
         e.g. at -Os, even when no actual shift will be needed.  */
1794
      if (store_info->const_rhs)
1795
        {
1796
          unsigned int byte = subreg_lowpart_offset (new_mode, store_mode);
1797
          rtx ret = simplify_subreg (new_mode, store_info->const_rhs,
1798
                                     store_mode, byte);
1799
          if (ret && CONSTANT_P (ret))
1800
            {
1801
              ret = simplify_const_binary_operation (LSHIFTRT, new_mode,
1802
                                                     ret, GEN_INT (shift));
1803
              if (ret && CONSTANT_P (ret))
1804
                {
1805
                  byte = subreg_lowpart_offset (read_mode, new_mode);
1806
                  ret = simplify_subreg (read_mode, ret, new_mode, byte);
1807
                  if (ret && CONSTANT_P (ret)
1808
                      && set_src_cost (ret, speed) <= COSTS_N_INSNS (1))
1809
                    return ret;
1810
                }
1811
            }
1812
        }
1813
 
1814
      if (require_cst)
1815
        return NULL_RTX;
1816
 
1817
      /* Try a wider mode if truncating the store mode to NEW_MODE
1818
         requires a real instruction.  */
1819
      if (GET_MODE_BITSIZE (new_mode) < GET_MODE_BITSIZE (store_mode)
1820
          && !TRULY_NOOP_TRUNCATION_MODES_P (new_mode, store_mode))
1821
        continue;
1822
 
1823
      /* Also try a wider mode if the necessary punning is either not
1824
         desirable or not possible.  */
1825
      if (!CONSTANT_P (store_info->rhs)
1826
          && !MODES_TIEABLE_P (new_mode, store_mode))
1827
        continue;
1828
 
1829
      new_reg = gen_reg_rtx (new_mode);
1830
 
1831
      start_sequence ();
1832
 
1833
      /* In theory we could also check for an ashr.  Ian Taylor knows
1834
         of one dsp where the cost of these two was not the same.  But
1835
         this really is a rare case anyway.  */
1836
      target = expand_binop (new_mode, lshr_optab, new_reg,
1837
                             GEN_INT (shift), new_reg, 1, OPTAB_DIRECT);
1838
 
1839
      shift_seq = get_insns ();
1840
      end_sequence ();
1841
 
1842
      if (target != new_reg || shift_seq == NULL)
1843
        continue;
1844
 
1845
      cost = 0;
1846
      for (insn = shift_seq; insn != NULL_RTX; insn = NEXT_INSN (insn))
1847
        if (INSN_P (insn))
1848
          cost += insn_rtx_cost (PATTERN (insn), speed);
1849
 
1850
      /* The computation up to here is essentially independent
1851
         of the arguments and could be precomputed.  It may
1852
         not be worth doing so.  We could precompute if
1853
         worthwhile or at least cache the results.  The result
1854
         technically depends on both SHIFT and ACCESS_SIZE,
1855
         but in practice the answer will depend only on ACCESS_SIZE.  */
1856
 
1857
      if (cost > COSTS_N_INSNS (1))
1858
        continue;
1859
 
1860
      new_lhs = extract_low_bits (new_mode, store_mode,
1861
                                  copy_rtx (store_info->rhs));
1862
      if (new_lhs == NULL_RTX)
1863
        continue;
1864
 
1865
      /* We found an acceptable shift.  Generate a move to
1866
         take the value from the store and put it into the
1867
         shift pseudo, then shift it, then generate another
1868
         move to put in into the target of the read.  */
1869
      emit_move_insn (new_reg, new_lhs);
1870
      emit_insn (shift_seq);
1871
      read_reg = extract_low_bits (read_mode, new_mode, new_reg);
1872
      break;
1873
    }
1874
 
1875
  return read_reg;
1876
}
1877
 
1878
 
1879
/* Call back for note_stores to find the hard regs set or clobbered by
1880
   insn.  Data is a bitmap of the hardregs set so far.  */
1881
 
1882
static void
1883
look_for_hardregs (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
1884
{
1885
  bitmap regs_set = (bitmap) data;
1886
 
1887
  if (REG_P (x)
1888
      && HARD_REGISTER_P (x))
1889
    {
1890
      unsigned int regno = REGNO (x);
1891
      bitmap_set_range (regs_set, regno,
1892
                        hard_regno_nregs[regno][GET_MODE (x)]);
1893
    }
1894
}
1895
 
1896
/* Helper function for replace_read and record_store.
1897
   Attempt to return a value stored in STORE_INFO, from READ_BEGIN
1898
   to one before READ_END bytes read in READ_MODE.  Return NULL
1899
   if not successful.  If REQUIRE_CST is true, return always constant.  */
1900
 
1901
static rtx
1902
get_stored_val (store_info_t store_info, enum machine_mode read_mode,
1903
                HOST_WIDE_INT read_begin, HOST_WIDE_INT read_end,
1904
                basic_block bb, bool require_cst)
1905
{
1906
  enum machine_mode store_mode = GET_MODE (store_info->mem);
1907
  int shift;
1908
  int access_size; /* In bytes.  */
1909
  rtx read_reg;
1910
 
1911
  /* To get here the read is within the boundaries of the write so
1912
     shift will never be negative.  Start out with the shift being in
1913
     bytes.  */
1914
  if (store_mode == BLKmode)
1915
    shift = 0;
1916
  else if (BYTES_BIG_ENDIAN)
1917
    shift = store_info->end - read_end;
1918
  else
1919
    shift = read_begin - store_info->begin;
1920
 
1921
  access_size = shift + GET_MODE_SIZE (read_mode);
1922
 
1923
  /* From now on it is bits.  */
1924
  shift *= BITS_PER_UNIT;
1925
 
1926
  if (shift)
1927
    read_reg = find_shift_sequence (access_size, store_info, read_mode, shift,
1928
                                    optimize_bb_for_speed_p (bb),
1929
                                    require_cst);
1930
  else if (store_mode == BLKmode)
1931
    {
1932
      /* The store is a memset (addr, const_val, const_size).  */
1933
      gcc_assert (CONST_INT_P (store_info->rhs));
1934
      store_mode = int_mode_for_mode (read_mode);
1935
      if (store_mode == BLKmode)
1936
        read_reg = NULL_RTX;
1937
      else if (store_info->rhs == const0_rtx)
1938
        read_reg = extract_low_bits (read_mode, store_mode, const0_rtx);
1939
      else if (GET_MODE_BITSIZE (store_mode) > HOST_BITS_PER_WIDE_INT
1940
               || BITS_PER_UNIT >= HOST_BITS_PER_WIDE_INT)
1941
        read_reg = NULL_RTX;
1942
      else
1943
        {
1944
          unsigned HOST_WIDE_INT c
1945
            = INTVAL (store_info->rhs)
1946
              & (((HOST_WIDE_INT) 1 << BITS_PER_UNIT) - 1);
1947
          int shift = BITS_PER_UNIT;
1948
          while (shift < HOST_BITS_PER_WIDE_INT)
1949
            {
1950
              c |= (c << shift);
1951
              shift <<= 1;
1952
            }
1953
          read_reg = gen_int_mode (c, store_mode);
1954
          read_reg = extract_low_bits (read_mode, store_mode, read_reg);
1955
        }
1956
    }
1957
  else if (store_info->const_rhs
1958
           && (require_cst
1959
               || GET_MODE_CLASS (read_mode) != GET_MODE_CLASS (store_mode)))
1960
    read_reg = extract_low_bits (read_mode, store_mode,
1961
                                 copy_rtx (store_info->const_rhs));
1962
  else
1963
    read_reg = extract_low_bits (read_mode, store_mode,
1964
                                 copy_rtx (store_info->rhs));
1965
  if (require_cst && read_reg && !CONSTANT_P (read_reg))
1966
    read_reg = NULL_RTX;
1967
  return read_reg;
1968
}
1969
 
1970
/* Take a sequence of:
1971
     A <- r1
1972
     ...
1973
     ... <- A
1974
 
1975
   and change it into
1976
   r2 <- r1
1977
   A <- r1
1978
   ...
1979
   ... <- r2
1980
 
1981
   or
1982
 
1983
   r3 <- extract (r1)
1984
   r3 <- r3 >> shift
1985
   r2 <- extract (r3)
1986
   ... <- r2
1987
 
1988
   or
1989
 
1990
   r2 <- extract (r1)
1991
   ... <- r2
1992
 
1993
   Depending on the alignment and the mode of the store and
1994
   subsequent load.
1995
 
1996
 
1997
   The STORE_INFO and STORE_INSN are for the store and READ_INFO
1998
   and READ_INSN are for the read.  Return true if the replacement
1999
   went ok.  */
2000
 
2001
static bool
2002
replace_read (store_info_t store_info, insn_info_t store_insn,
2003
              read_info_t read_info, insn_info_t read_insn, rtx *loc,
2004
              bitmap regs_live)
2005
{
2006
  enum machine_mode store_mode = GET_MODE (store_info->mem);
2007
  enum machine_mode read_mode = GET_MODE (read_info->mem);
2008
  rtx insns, this_insn, read_reg;
2009
  basic_block bb;
2010
 
2011
  if (!dbg_cnt (dse))
2012
    return false;
2013
 
2014
  /* Create a sequence of instructions to set up the read register.
2015
     This sequence goes immediately before the store and its result
2016
     is read by the load.
2017
 
2018
     We need to keep this in perspective.  We are replacing a read
2019
     with a sequence of insns, but the read will almost certainly be
2020
     in cache, so it is not going to be an expensive one.  Thus, we
2021
     are not willing to do a multi insn shift or worse a subroutine
2022
     call to get rid of the read.  */
2023
  if (dump_file)
2024
    fprintf (dump_file, "trying to replace %smode load in insn %d"
2025
             " from %smode store in insn %d\n",
2026
             GET_MODE_NAME (read_mode), INSN_UID (read_insn->insn),
2027
             GET_MODE_NAME (store_mode), INSN_UID (store_insn->insn));
2028
  start_sequence ();
2029
  bb = BLOCK_FOR_INSN (read_insn->insn);
2030
  read_reg = get_stored_val (store_info,
2031
                             read_mode, read_info->begin, read_info->end,
2032
                             bb, false);
2033
  if (read_reg == NULL_RTX)
2034
    {
2035
      end_sequence ();
2036
      if (dump_file)
2037
        fprintf (dump_file, " -- could not extract bits of stored value\n");
2038
      return false;
2039
    }
2040
  /* Force the value into a new register so that it won't be clobbered
2041
     between the store and the load.  */
2042
  read_reg = copy_to_mode_reg (read_mode, read_reg);
2043
  insns = get_insns ();
2044
  end_sequence ();
2045
 
2046
  if (insns != NULL_RTX)
2047
    {
2048
      /* Now we have to scan the set of new instructions to see if the
2049
         sequence contains and sets of hardregs that happened to be
2050
         live at this point.  For instance, this can happen if one of
2051
         the insns sets the CC and the CC happened to be live at that
2052
         point.  This does occasionally happen, see PR 37922.  */
2053
      bitmap regs_set = BITMAP_ALLOC (NULL);
2054
 
2055
      for (this_insn = insns; this_insn != NULL_RTX; this_insn = NEXT_INSN (this_insn))
2056
        note_stores (PATTERN (this_insn), look_for_hardregs, regs_set);
2057
 
2058
      bitmap_and_into (regs_set, regs_live);
2059
      if (!bitmap_empty_p (regs_set))
2060
        {
2061
          if (dump_file)
2062
            {
2063
              fprintf (dump_file,
2064
                       "abandoning replacement because sequence clobbers live hardregs:");
2065
              df_print_regset (dump_file, regs_set);
2066
            }
2067
 
2068
          BITMAP_FREE (regs_set);
2069
          return false;
2070
        }
2071
      BITMAP_FREE (regs_set);
2072
    }
2073
 
2074
  if (validate_change (read_insn->insn, loc, read_reg, 0))
2075
    {
2076
      deferred_change_t deferred_change =
2077
        (deferred_change_t) pool_alloc (deferred_change_pool);
2078
 
2079
      /* Insert this right before the store insn where it will be safe
2080
         from later insns that might change it before the read.  */
2081
      emit_insn_before (insns, store_insn->insn);
2082
 
2083
      /* And now for the kludge part: cselib croaks if you just
2084
         return at this point.  There are two reasons for this:
2085
 
2086
         1) Cselib has an idea of how many pseudos there are and
2087
         that does not include the new ones we just added.
2088
 
2089
         2) Cselib does not know about the move insn we added
2090
         above the store_info, and there is no way to tell it
2091
         about it, because it has "moved on".
2092
 
2093
         Problem (1) is fixable with a certain amount of engineering.
2094
         Problem (2) is requires starting the bb from scratch.  This
2095
         could be expensive.
2096
 
2097
         So we are just going to have to lie.  The move/extraction
2098
         insns are not really an issue, cselib did not see them.  But
2099
         the use of the new pseudo read_insn is a real problem because
2100
         cselib has not scanned this insn.  The way that we solve this
2101
         problem is that we are just going to put the mem back for now
2102
         and when we are finished with the block, we undo this.  We
2103
         keep a table of mems to get rid of.  At the end of the basic
2104
         block we can put them back.  */
2105
 
2106
      *loc = read_info->mem;
2107
      deferred_change->next = deferred_change_list;
2108
      deferred_change_list = deferred_change;
2109
      deferred_change->loc = loc;
2110
      deferred_change->reg = read_reg;
2111
 
2112
      /* Get rid of the read_info, from the point of view of the
2113
         rest of dse, play like this read never happened.  */
2114
      read_insn->read_rec = read_info->next;
2115
      pool_free (read_info_pool, read_info);
2116
      if (dump_file)
2117
        {
2118
          fprintf (dump_file, " -- replaced the loaded MEM with ");
2119
          print_simple_rtl (dump_file, read_reg);
2120
          fprintf (dump_file, "\n");
2121
        }
2122
      return true;
2123
    }
2124
  else
2125
    {
2126
      if (dump_file)
2127
        {
2128
          fprintf (dump_file, " -- replacing the loaded MEM with ");
2129
          print_simple_rtl (dump_file, read_reg);
2130
          fprintf (dump_file, " led to an invalid instruction\n");
2131
        }
2132
      return false;
2133
    }
2134
}
2135
 
2136
/* A for_each_rtx callback in which DATA is the bb_info.  Check to see
2137
   if LOC is a mem and if it is look at the address and kill any
2138
   appropriate stores that may be active.  */
2139
 
2140
static int
2141
check_mem_read_rtx (rtx *loc, void *data)
2142
{
2143
  rtx mem = *loc, mem_addr;
2144
  bb_info_t bb_info;
2145
  insn_info_t insn_info;
2146
  HOST_WIDE_INT offset = 0;
2147
  HOST_WIDE_INT width = 0;
2148
  alias_set_type spill_alias_set = 0;
2149
  cselib_val *base = NULL;
2150
  int group_id;
2151
  read_info_t read_info;
2152
 
2153
  if (!mem || !MEM_P (mem))
2154
    return 0;
2155
 
2156
  bb_info = (bb_info_t) data;
2157
  insn_info = bb_info->last_insn;
2158
 
2159
  if ((MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2160
      || (MEM_VOLATILE_P (mem)))
2161
    {
2162
      if (dump_file)
2163
        fprintf (dump_file, " adding wild read, volatile or barrier.\n");
2164
      add_wild_read (bb_info);
2165
      insn_info->cannot_delete = true;
2166
      return 0;
2167
    }
2168
 
2169
  /* If it is reading readonly mem, then there can be no conflict with
2170
     another write. */
2171
  if (MEM_READONLY_P (mem))
2172
    return 0;
2173
 
2174
  if (!canon_address (mem, &spill_alias_set, &group_id, &offset, &base))
2175
    {
2176
      if (dump_file)
2177
        fprintf (dump_file, " adding wild read, canon_address failure.\n");
2178
      add_wild_read (bb_info);
2179
      return 0;
2180
    }
2181
 
2182
  if (GET_MODE (mem) == BLKmode)
2183
    width = -1;
2184
  else
2185
    width = GET_MODE_SIZE (GET_MODE (mem));
2186
 
2187
  read_info = (read_info_t) pool_alloc (read_info_pool);
2188
  read_info->group_id = group_id;
2189
  read_info->mem = mem;
2190
  read_info->alias_set = spill_alias_set;
2191
  read_info->begin = offset;
2192
  read_info->end = offset + width;
2193
  read_info->next = insn_info->read_rec;
2194
  insn_info->read_rec = read_info;
2195
  /* For alias_set != 0 canon_true_dependence should be never called.  */
2196
  if (spill_alias_set)
2197
    mem_addr = NULL_RTX;
2198
  else
2199
    {
2200
      if (group_id < 0)
2201
        mem_addr = base->val_rtx;
2202
      else
2203
        {
2204
          group_info_t group
2205
            = VEC_index (group_info_t, rtx_group_vec, group_id);
2206
          mem_addr = group->canon_base_addr;
2207
        }
2208
      if (offset)
2209
        mem_addr = plus_constant (mem_addr, offset);
2210
    }
2211
 
2212
  /* We ignore the clobbers in store_info.  The is mildly aggressive,
2213
     but there really should not be a clobber followed by a read.  */
2214
 
2215
  if (spill_alias_set)
2216
    {
2217
      insn_info_t i_ptr = active_local_stores;
2218
      insn_info_t last = NULL;
2219
 
2220
      if (dump_file)
2221
        fprintf (dump_file, " processing spill load %d\n",
2222
                 (int) spill_alias_set);
2223
 
2224
      while (i_ptr)
2225
        {
2226
          store_info_t store_info = i_ptr->store_rec;
2227
 
2228
          /* Skip the clobbers.  */
2229
          while (!store_info->is_set)
2230
            store_info = store_info->next;
2231
 
2232
          if (store_info->alias_set == spill_alias_set)
2233
            {
2234
              if (dump_file)
2235
                dump_insn_info ("removing from active", i_ptr);
2236
 
2237
              active_local_stores_len--;
2238
              if (last)
2239
                last->next_local_store = i_ptr->next_local_store;
2240
              else
2241
                active_local_stores = i_ptr->next_local_store;
2242
            }
2243
          else
2244
            last = i_ptr;
2245
          i_ptr = i_ptr->next_local_store;
2246
        }
2247
    }
2248
  else if (group_id >= 0)
2249
    {
2250
      /* This is the restricted case where the base is a constant or
2251
         the frame pointer and offset is a constant.  */
2252
      insn_info_t i_ptr = active_local_stores;
2253
      insn_info_t last = NULL;
2254
 
2255
      if (dump_file)
2256
        {
2257
          if (width == -1)
2258
            fprintf (dump_file, " processing const load gid=%d[BLK]\n",
2259
                     group_id);
2260
          else
2261
            fprintf (dump_file, " processing const load gid=%d[%d..%d)\n",
2262
                     group_id, (int)offset, (int)(offset+width));
2263
        }
2264
 
2265
      while (i_ptr)
2266
        {
2267
          bool remove = false;
2268
          store_info_t store_info = i_ptr->store_rec;
2269
 
2270
          /* Skip the clobbers.  */
2271
          while (!store_info->is_set)
2272
            store_info = store_info->next;
2273
 
2274
          /* There are three cases here.  */
2275
          if (store_info->group_id < 0)
2276
            /* We have a cselib store followed by a read from a
2277
               const base. */
2278
            remove
2279
              = canon_true_dependence (store_info->mem,
2280
                                       GET_MODE (store_info->mem),
2281
                                       store_info->mem_addr,
2282
                                       mem, mem_addr);
2283
 
2284
          else if (group_id == store_info->group_id)
2285
            {
2286
              /* This is a block mode load.  We may get lucky and
2287
                 canon_true_dependence may save the day.  */
2288
              if (width == -1)
2289
                remove
2290
                  = canon_true_dependence (store_info->mem,
2291
                                           GET_MODE (store_info->mem),
2292
                                           store_info->mem_addr,
2293
                                           mem, mem_addr);
2294
 
2295
              /* If this read is just reading back something that we just
2296
                 stored, rewrite the read.  */
2297
              else
2298
                {
2299
                  if (store_info->rhs
2300
                      && offset >= store_info->begin
2301
                      && offset + width <= store_info->end
2302
                      && all_positions_needed_p (store_info,
2303
                                                 offset - store_info->begin,
2304
                                                 width)
2305
                      && replace_read (store_info, i_ptr, read_info,
2306
                                       insn_info, loc, bb_info->regs_live))
2307
                    return 0;
2308
 
2309
                  /* The bases are the same, just see if the offsets
2310
                     overlap.  */
2311
                  if ((offset < store_info->end)
2312
                      && (offset + width > store_info->begin))
2313
                    remove = true;
2314
                }
2315
            }
2316
 
2317
          /* else
2318
             The else case that is missing here is that the
2319
             bases are constant but different.  There is nothing
2320
             to do here because there is no overlap.  */
2321
 
2322
          if (remove)
2323
            {
2324
              if (dump_file)
2325
                dump_insn_info ("removing from active", i_ptr);
2326
 
2327
              active_local_stores_len--;
2328
              if (last)
2329
                last->next_local_store = i_ptr->next_local_store;
2330
              else
2331
                active_local_stores = i_ptr->next_local_store;
2332
            }
2333
          else
2334
            last = i_ptr;
2335
          i_ptr = i_ptr->next_local_store;
2336
        }
2337
    }
2338
  else
2339
    {
2340
      insn_info_t i_ptr = active_local_stores;
2341
      insn_info_t last = NULL;
2342
      if (dump_file)
2343
        {
2344
          fprintf (dump_file, " processing cselib load mem:");
2345
          print_inline_rtx (dump_file, mem, 0);
2346
          fprintf (dump_file, "\n");
2347
        }
2348
 
2349
      while (i_ptr)
2350
        {
2351
          bool remove = false;
2352
          store_info_t store_info = i_ptr->store_rec;
2353
 
2354
          if (dump_file)
2355
            fprintf (dump_file, " processing cselib load against insn %d\n",
2356
                     INSN_UID (i_ptr->insn));
2357
 
2358
          /* Skip the clobbers.  */
2359
          while (!store_info->is_set)
2360
            store_info = store_info->next;
2361
 
2362
          /* If this read is just reading back something that we just
2363
             stored, rewrite the read.  */
2364
          if (store_info->rhs
2365
              && store_info->group_id == -1
2366
              && store_info->cse_base == base
2367
              && width != -1
2368
              && offset >= store_info->begin
2369
              && offset + width <= store_info->end
2370
              && all_positions_needed_p (store_info,
2371
                                         offset - store_info->begin, width)
2372
              && replace_read (store_info, i_ptr,  read_info, insn_info, loc,
2373
                               bb_info->regs_live))
2374
            return 0;
2375
 
2376
          if (!store_info->alias_set)
2377
            remove = canon_true_dependence (store_info->mem,
2378
                                            GET_MODE (store_info->mem),
2379
                                            store_info->mem_addr,
2380
                                            mem, mem_addr);
2381
 
2382
          if (remove)
2383
            {
2384
              if (dump_file)
2385
                dump_insn_info ("removing from active", i_ptr);
2386
 
2387
              active_local_stores_len--;
2388
              if (last)
2389
                last->next_local_store = i_ptr->next_local_store;
2390
              else
2391
                active_local_stores = i_ptr->next_local_store;
2392
            }
2393
          else
2394
            last = i_ptr;
2395
          i_ptr = i_ptr->next_local_store;
2396
        }
2397
    }
2398
  return 0;
2399
}
2400
 
2401
/* A for_each_rtx callback in which DATA points the INSN_INFO for
2402
   as check_mem_read_rtx.  Nullify the pointer if i_m_r_m_r returns
2403
   true for any part of *LOC.  */
2404
 
2405
static void
2406
check_mem_read_use (rtx *loc, void *data)
2407
{
2408
  for_each_rtx (loc, check_mem_read_rtx, data);
2409
}
2410
 
2411
 
2412
/* Get arguments passed to CALL_INSN.  Return TRUE if successful.
2413
   So far it only handles arguments passed in registers.  */
2414
 
2415
static bool
2416
get_call_args (rtx call_insn, tree fn, rtx *args, int nargs)
2417
{
2418
  CUMULATIVE_ARGS args_so_far_v;
2419
  cumulative_args_t args_so_far;
2420
  tree arg;
2421
  int idx;
2422
 
2423
  INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
2424
  args_so_far = pack_cumulative_args (&args_so_far_v);
2425
 
2426
  arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
2427
  for (idx = 0;
2428
       arg != void_list_node && idx < nargs;
2429
       arg = TREE_CHAIN (arg), idx++)
2430
    {
2431
      enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
2432
      rtx reg, link, tmp;
2433
      reg = targetm.calls.function_arg (args_so_far, mode, NULL_TREE, true);
2434
      if (!reg || !REG_P (reg) || GET_MODE (reg) != mode
2435
          || GET_MODE_CLASS (mode) != MODE_INT)
2436
        return false;
2437
 
2438
      for (link = CALL_INSN_FUNCTION_USAGE (call_insn);
2439
           link;
2440
           link = XEXP (link, 1))
2441
        if (GET_CODE (XEXP (link, 0)) == USE)
2442
          {
2443
            args[idx] = XEXP (XEXP (link, 0), 0);
2444
            if (REG_P (args[idx])
2445
                && REGNO (args[idx]) == REGNO (reg)
2446
                && (GET_MODE (args[idx]) == mode
2447
                    || (GET_MODE_CLASS (GET_MODE (args[idx])) == MODE_INT
2448
                        && (GET_MODE_SIZE (GET_MODE (args[idx]))
2449
                            <= UNITS_PER_WORD)
2450
                        && (GET_MODE_SIZE (GET_MODE (args[idx]))
2451
                            > GET_MODE_SIZE (mode)))))
2452
              break;
2453
          }
2454
      if (!link)
2455
        return false;
2456
 
2457
      tmp = cselib_expand_value_rtx (args[idx], scratch, 5);
2458
      if (GET_MODE (args[idx]) != mode)
2459
        {
2460
          if (!tmp || !CONST_INT_P (tmp))
2461
            return false;
2462
          tmp = gen_int_mode (INTVAL (tmp), mode);
2463
        }
2464
      if (tmp)
2465
        args[idx] = tmp;
2466
 
2467
      targetm.calls.function_arg_advance (args_so_far, mode, NULL_TREE, true);
2468
    }
2469
  if (arg != void_list_node || idx != nargs)
2470
    return false;
2471
  return true;
2472
}
2473
 
2474
/* Return a bitmap of the fixed registers contained in IN.  */
2475
 
2476
static bitmap
2477
copy_fixed_regs (const_bitmap in)
2478
{
2479
  bitmap ret;
2480
 
2481
  ret = ALLOC_REG_SET (NULL);
2482
  bitmap_and (ret, in, fixed_reg_set_regset);
2483
  return ret;
2484
}
2485
 
2486
/* Apply record_store to all candidate stores in INSN.  Mark INSN
2487
   if some part of it is not a candidate store and assigns to a
2488
   non-register target.  */
2489
 
2490
static void
2491
scan_insn (bb_info_t bb_info, rtx insn)
2492
{
2493
  rtx body;
2494
  insn_info_t insn_info = (insn_info_t) pool_alloc (insn_info_pool);
2495
  int mems_found = 0;
2496
  memset (insn_info, 0, sizeof (struct insn_info));
2497
 
2498
  if (dump_file)
2499
    fprintf (dump_file, "\n**scanning insn=%d\n",
2500
             INSN_UID (insn));
2501
 
2502
  insn_info->prev_insn = bb_info->last_insn;
2503
  insn_info->insn = insn;
2504
  bb_info->last_insn = insn_info;
2505
 
2506
  if (DEBUG_INSN_P (insn))
2507
    {
2508
      insn_info->cannot_delete = true;
2509
      return;
2510
    }
2511
 
2512
  /* Cselib clears the table for this case, so we have to essentially
2513
     do the same.  */
2514
  if (NONJUMP_INSN_P (insn)
2515
      && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
2516
      && MEM_VOLATILE_P (PATTERN (insn)))
2517
    {
2518
      add_wild_read (bb_info);
2519
      insn_info->cannot_delete = true;
2520
      return;
2521
    }
2522
 
2523
  /* Look at all of the uses in the insn.  */
2524
  note_uses (&PATTERN (insn), check_mem_read_use, bb_info);
2525
 
2526
  if (CALL_P (insn))
2527
    {
2528
      bool const_call;
2529
      tree memset_call = NULL_TREE;
2530
 
2531
      insn_info->cannot_delete = true;
2532
 
2533
      /* Const functions cannot do anything bad i.e. read memory,
2534
         however, they can read their parameters which may have
2535
         been pushed onto the stack.
2536
         memset and bzero don't read memory either.  */
2537
      const_call = RTL_CONST_CALL_P (insn);
2538
      if (!const_call)
2539
        {
2540
          rtx call = PATTERN (insn);
2541
          if (GET_CODE (call) == PARALLEL)
2542
            call = XVECEXP (call, 0, 0);
2543
          if (GET_CODE (call) == SET)
2544
            call = SET_SRC (call);
2545
          if (GET_CODE (call) == CALL
2546
              && MEM_P (XEXP (call, 0))
2547
              && GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
2548
            {
2549
              rtx symbol = XEXP (XEXP (call, 0), 0);
2550
              if (SYMBOL_REF_DECL (symbol)
2551
                  && TREE_CODE (SYMBOL_REF_DECL (symbol)) == FUNCTION_DECL)
2552
                {
2553
                  if ((DECL_BUILT_IN_CLASS (SYMBOL_REF_DECL (symbol))
2554
                       == BUILT_IN_NORMAL
2555
                       && (DECL_FUNCTION_CODE (SYMBOL_REF_DECL (symbol))
2556
                           == BUILT_IN_MEMSET))
2557
                      || SYMBOL_REF_DECL (symbol) == block_clear_fn)
2558
                    memset_call = SYMBOL_REF_DECL (symbol);
2559
                }
2560
            }
2561
        }
2562
      if (const_call || memset_call)
2563
        {
2564
          insn_info_t i_ptr = active_local_stores;
2565
          insn_info_t last = NULL;
2566
 
2567
          if (dump_file)
2568
            fprintf (dump_file, "%s call %d\n",
2569
                     const_call ? "const" : "memset", INSN_UID (insn));
2570
 
2571
          /* See the head comment of the frame_read field.  */
2572
          if (reload_completed)
2573
            insn_info->frame_read = true;
2574
 
2575
          /* Loop over the active stores and remove those which are
2576
             killed by the const function call.  */
2577
          while (i_ptr)
2578
            {
2579
              bool remove_store = false;
2580
 
2581
              /* The stack pointer based stores are always killed.  */
2582
              if (i_ptr->stack_pointer_based)
2583
                remove_store = true;
2584
 
2585
              /* If the frame is read, the frame related stores are killed.  */
2586
              else if (insn_info->frame_read)
2587
                {
2588
                  store_info_t store_info = i_ptr->store_rec;
2589
 
2590
                  /* Skip the clobbers.  */
2591
                  while (!store_info->is_set)
2592
                    store_info = store_info->next;
2593
 
2594
                  if (store_info->group_id >= 0
2595
                      && VEC_index (group_info_t, rtx_group_vec,
2596
                                    store_info->group_id)->frame_related)
2597
                    remove_store = true;
2598
                }
2599
 
2600
              if (remove_store)
2601
                {
2602
                  if (dump_file)
2603
                    dump_insn_info ("removing from active", i_ptr);
2604
 
2605
                  active_local_stores_len--;
2606
                  if (last)
2607
                    last->next_local_store = i_ptr->next_local_store;
2608
                  else
2609
                    active_local_stores = i_ptr->next_local_store;
2610
                }
2611
              else
2612
                last = i_ptr;
2613
 
2614
              i_ptr = i_ptr->next_local_store;
2615
            }
2616
 
2617
          if (memset_call)
2618
            {
2619
              rtx args[3];
2620
              if (get_call_args (insn, memset_call, args, 3)
2621
                  && CONST_INT_P (args[1])
2622
                  && CONST_INT_P (args[2])
2623
                  && INTVAL (args[2]) > 0)
2624
                {
2625
                  rtx mem = gen_rtx_MEM (BLKmode, args[0]);
2626
                  set_mem_size (mem, INTVAL (args[2]));
2627
                  body = gen_rtx_SET (VOIDmode, mem, args[1]);
2628
                  mems_found += record_store (body, bb_info);
2629
                  if (dump_file)
2630
                    fprintf (dump_file, "handling memset as BLKmode store\n");
2631
                  if (mems_found == 1)
2632
                    {
2633
                      if (active_local_stores_len++
2634
                          >= PARAM_VALUE (PARAM_MAX_DSE_ACTIVE_LOCAL_STORES))
2635
                        {
2636
                          active_local_stores_len = 1;
2637
                          active_local_stores = NULL;
2638
                        }
2639
                      insn_info->fixed_regs_live
2640
                        = copy_fixed_regs (bb_info->regs_live);
2641
                      insn_info->next_local_store = active_local_stores;
2642
                      active_local_stores = insn_info;
2643
                    }
2644
                }
2645
            }
2646
        }
2647
 
2648
      else
2649
        /* Every other call, including pure functions, may read any memory
2650
           that is not relative to the frame.  */
2651
        add_non_frame_wild_read (bb_info);
2652
 
2653
      return;
2654
    }
2655
 
2656
  /* Assuming that there are sets in these insns, we cannot delete
2657
     them.  */
2658
  if ((GET_CODE (PATTERN (insn)) == CLOBBER)
2659
      || volatile_refs_p (PATTERN (insn))
2660
      || insn_could_throw_p (insn)
2661
      || (RTX_FRAME_RELATED_P (insn))
2662
      || find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX))
2663
    insn_info->cannot_delete = true;
2664
 
2665
  body = PATTERN (insn);
2666
  if (GET_CODE (body) == PARALLEL)
2667
    {
2668
      int i;
2669
      for (i = 0; i < XVECLEN (body, 0); i++)
2670
        mems_found += record_store (XVECEXP (body, 0, i), bb_info);
2671
    }
2672
  else
2673
    mems_found += record_store (body, bb_info);
2674
 
2675
  if (dump_file)
2676
    fprintf (dump_file, "mems_found = %d, cannot_delete = %s\n",
2677
             mems_found, insn_info->cannot_delete ? "true" : "false");
2678
 
2679
  /* If we found some sets of mems, add it into the active_local_stores so
2680
     that it can be locally deleted if found dead or used for
2681
     replace_read and redundant constant store elimination.  Otherwise mark
2682
     it as cannot delete.  This simplifies the processing later.  */
2683
  if (mems_found == 1)
2684
    {
2685
      if (active_local_stores_len++
2686
          >= PARAM_VALUE (PARAM_MAX_DSE_ACTIVE_LOCAL_STORES))
2687
        {
2688
          active_local_stores_len = 1;
2689
          active_local_stores = NULL;
2690
        }
2691
      insn_info->fixed_regs_live = copy_fixed_regs (bb_info->regs_live);
2692
      insn_info->next_local_store = active_local_stores;
2693
      active_local_stores = insn_info;
2694
    }
2695
  else
2696
    insn_info->cannot_delete = true;
2697
}
2698
 
2699
 
2700
/* Remove BASE from the set of active_local_stores.  This is a
2701
   callback from cselib that is used to get rid of the stores in
2702
   active_local_stores.  */
2703
 
2704
static void
2705
remove_useless_values (cselib_val *base)
2706
{
2707
  insn_info_t insn_info = active_local_stores;
2708
  insn_info_t last = NULL;
2709
 
2710
  while (insn_info)
2711
    {
2712
      store_info_t store_info = insn_info->store_rec;
2713
      bool del = false;
2714
 
2715
      /* If ANY of the store_infos match the cselib group that is
2716
         being deleted, then the insn can not be deleted.  */
2717
      while (store_info)
2718
        {
2719
          if ((store_info->group_id == -1)
2720
              && (store_info->cse_base == base))
2721
            {
2722
              del = true;
2723
              break;
2724
            }
2725
          store_info = store_info->next;
2726
        }
2727
 
2728
      if (del)
2729
        {
2730
          active_local_stores_len--;
2731
          if (last)
2732
            last->next_local_store = insn_info->next_local_store;
2733
          else
2734
            active_local_stores = insn_info->next_local_store;
2735
          free_store_info (insn_info);
2736
        }
2737
      else
2738
        last = insn_info;
2739
 
2740
      insn_info = insn_info->next_local_store;
2741
    }
2742
}
2743
 
2744
 
2745
/* Do all of step 1.  */
2746
 
2747
static void
2748
dse_step1 (void)
2749
{
2750
  basic_block bb;
2751
  bitmap regs_live = BITMAP_ALLOC (NULL);
2752
 
2753
  cselib_init (0);
2754
  all_blocks = BITMAP_ALLOC (NULL);
2755
  bitmap_set_bit (all_blocks, ENTRY_BLOCK);
2756
  bitmap_set_bit (all_blocks, EXIT_BLOCK);
2757
 
2758
  FOR_ALL_BB (bb)
2759
    {
2760
      insn_info_t ptr;
2761
      bb_info_t bb_info = (bb_info_t) pool_alloc (bb_info_pool);
2762
 
2763
      memset (bb_info, 0, sizeof (struct bb_info));
2764
      bitmap_set_bit (all_blocks, bb->index);
2765
      bb_info->regs_live = regs_live;
2766
 
2767
      bitmap_copy (regs_live, DF_LR_IN (bb));
2768
      df_simulate_initialize_forwards (bb, regs_live);
2769
 
2770
      bb_table[bb->index] = bb_info;
2771
      cselib_discard_hook = remove_useless_values;
2772
 
2773
      if (bb->index >= NUM_FIXED_BLOCKS)
2774
        {
2775
          rtx insn;
2776
 
2777
          cse_store_info_pool
2778
            = create_alloc_pool ("cse_store_info_pool",
2779
                                 sizeof (struct store_info), 100);
2780
          active_local_stores = NULL;
2781
          active_local_stores_len = 0;
2782
          cselib_clear_table ();
2783
 
2784
          /* Scan the insns.  */
2785
          FOR_BB_INSNS (bb, insn)
2786
            {
2787
              if (INSN_P (insn))
2788
                scan_insn (bb_info, insn);
2789
              cselib_process_insn (insn);
2790
              if (INSN_P (insn))
2791
                df_simulate_one_insn_forwards (bb, insn, regs_live);
2792
            }
2793
 
2794
          /* This is something of a hack, because the global algorithm
2795
             is supposed to take care of the case where stores go dead
2796
             at the end of the function.  However, the global
2797
             algorithm must take a more conservative view of block
2798
             mode reads than the local alg does.  So to get the case
2799
             where you have a store to the frame followed by a non
2800
             overlapping block more read, we look at the active local
2801
             stores at the end of the function and delete all of the
2802
             frame and spill based ones.  */
2803
          if (stores_off_frame_dead_at_return
2804
              && (EDGE_COUNT (bb->succs) == 0
2805
                  || (single_succ_p (bb)
2806
                      && single_succ (bb) == EXIT_BLOCK_PTR
2807
                      && ! crtl->calls_eh_return)))
2808
            {
2809
              insn_info_t i_ptr = active_local_stores;
2810
              while (i_ptr)
2811
                {
2812
                  store_info_t store_info = i_ptr->store_rec;
2813
 
2814
                  /* Skip the clobbers.  */
2815
                  while (!store_info->is_set)
2816
                    store_info = store_info->next;
2817
                  if (store_info->alias_set && !i_ptr->cannot_delete)
2818
                    delete_dead_store_insn (i_ptr);
2819
                  else
2820
                    if (store_info->group_id >= 0)
2821
                      {
2822
                        group_info_t group
2823
                          = VEC_index (group_info_t, rtx_group_vec, store_info->group_id);
2824
                        if (group->frame_related && !i_ptr->cannot_delete)
2825
                          delete_dead_store_insn (i_ptr);
2826
                      }
2827
 
2828
                  i_ptr = i_ptr->next_local_store;
2829
                }
2830
            }
2831
 
2832
          /* Get rid of the loads that were discovered in
2833
             replace_read.  Cselib is finished with this block.  */
2834
          while (deferred_change_list)
2835
            {
2836
              deferred_change_t next = deferred_change_list->next;
2837
 
2838
              /* There is no reason to validate this change.  That was
2839
                 done earlier.  */
2840
              *deferred_change_list->loc = deferred_change_list->reg;
2841
              pool_free (deferred_change_pool, deferred_change_list);
2842
              deferred_change_list = next;
2843
            }
2844
 
2845
          /* Get rid of all of the cselib based store_infos in this
2846
             block and mark the containing insns as not being
2847
             deletable.  */
2848
          ptr = bb_info->last_insn;
2849
          while (ptr)
2850
            {
2851
              if (ptr->contains_cselib_groups)
2852
                {
2853
                  store_info_t s_info = ptr->store_rec;
2854
                  while (s_info && !s_info->is_set)
2855
                    s_info = s_info->next;
2856
                  if (s_info
2857
                      && s_info->redundant_reason
2858
                      && s_info->redundant_reason->insn
2859
                      && !ptr->cannot_delete)
2860
                    {
2861
                      if (dump_file)
2862
                        fprintf (dump_file, "Locally deleting insn %d "
2863
                                            "because insn %d stores the "
2864
                                            "same value and couldn't be "
2865
                                            "eliminated\n",
2866
                                 INSN_UID (ptr->insn),
2867
                                 INSN_UID (s_info->redundant_reason->insn));
2868
                      delete_dead_store_insn (ptr);
2869
                    }
2870
                  if (s_info)
2871
                    s_info->redundant_reason = NULL;
2872
                  free_store_info (ptr);
2873
                }
2874
              else
2875
                {
2876
                  store_info_t s_info;
2877
 
2878
                  /* Free at least positions_needed bitmaps.  */
2879
                  for (s_info = ptr->store_rec; s_info; s_info = s_info->next)
2880
                    if (s_info->is_large)
2881
                      {
2882
                        BITMAP_FREE (s_info->positions_needed.large.bmap);
2883
                        s_info->is_large = false;
2884
                      }
2885
                }
2886
              ptr = ptr->prev_insn;
2887
            }
2888
 
2889
          free_alloc_pool (cse_store_info_pool);
2890
        }
2891
      bb_info->regs_live = NULL;
2892
    }
2893
 
2894
  BITMAP_FREE (regs_live);
2895
  cselib_finish ();
2896
  htab_empty (rtx_group_table);
2897
}
2898
 
2899
 
2900
/*----------------------------------------------------------------------------
2901
   Second step.
2902
 
2903
   Assign each byte position in the stores that we are going to
2904
   analyze globally to a position in the bitmaps.  Returns true if
2905
   there are any bit positions assigned.
2906
----------------------------------------------------------------------------*/
2907
 
2908
static void
2909
dse_step2_init (void)
2910
{
2911
  unsigned int i;
2912
  group_info_t group;
2913
 
2914
  FOR_EACH_VEC_ELT (group_info_t, rtx_group_vec, i, group)
2915
    {
2916
      /* For all non stack related bases, we only consider a store to
2917
         be deletable if there are two or more stores for that
2918
         position.  This is because it takes one store to make the
2919
         other store redundant.  However, for the stores that are
2920
         stack related, we consider them if there is only one store
2921
         for the position.  We do this because the stack related
2922
         stores can be deleted if their is no read between them and
2923
         the end of the function.
2924
 
2925
         To make this work in the current framework, we take the stack
2926
         related bases add all of the bits from store1 into store2.
2927
         This has the effect of making the eligible even if there is
2928
         only one store.   */
2929
 
2930
      if (stores_off_frame_dead_at_return && group->frame_related)
2931
        {
2932
          bitmap_ior_into (group->store2_n, group->store1_n);
2933
          bitmap_ior_into (group->store2_p, group->store1_p);
2934
          if (dump_file)
2935
            fprintf (dump_file, "group %d is frame related ", i);
2936
        }
2937
 
2938
      group->offset_map_size_n++;
2939
      group->offset_map_n = XNEWVEC (int, group->offset_map_size_n);
2940
      group->offset_map_size_p++;
2941
      group->offset_map_p = XNEWVEC (int, group->offset_map_size_p);
2942
      group->process_globally = false;
2943
      if (dump_file)
2944
        {
2945
          fprintf (dump_file, "group %d(%d+%d): ", i,
2946
                   (int)bitmap_count_bits (group->store2_n),
2947
                   (int)bitmap_count_bits (group->store2_p));
2948
          bitmap_print (dump_file, group->store2_n, "n ", " ");
2949
          bitmap_print (dump_file, group->store2_p, "p ", "\n");
2950
        }
2951
    }
2952
}
2953
 
2954
 
2955
/* Init the offset tables for the normal case.  */
2956
 
2957
static bool
2958
dse_step2_nospill (void)
2959
{
2960
  unsigned int i;
2961
  group_info_t group;
2962
  /* Position 0 is unused because 0 is used in the maps to mean
2963
     unused.  */
2964
  current_position = 1;
2965
  FOR_EACH_VEC_ELT (group_info_t, rtx_group_vec, i, group)
2966
    {
2967
      bitmap_iterator bi;
2968
      unsigned int j;
2969
 
2970
      if (group == clear_alias_group)
2971
        continue;
2972
 
2973
      memset (group->offset_map_n, 0, sizeof(int) * group->offset_map_size_n);
2974
      memset (group->offset_map_p, 0, sizeof(int) * group->offset_map_size_p);
2975
      bitmap_clear (group->group_kill);
2976
 
2977
      EXECUTE_IF_SET_IN_BITMAP (group->store2_n, 0, j, bi)
2978
        {
2979
          bitmap_set_bit (group->group_kill, current_position);
2980
          if (bitmap_bit_p (group->escaped_n, j))
2981
            bitmap_set_bit (kill_on_calls, current_position);
2982
          group->offset_map_n[j] = current_position++;
2983
          group->process_globally = true;
2984
        }
2985
      EXECUTE_IF_SET_IN_BITMAP (group->store2_p, 0, j, bi)
2986
        {
2987
          bitmap_set_bit (group->group_kill, current_position);
2988
          if (bitmap_bit_p (group->escaped_p, j))
2989
            bitmap_set_bit (kill_on_calls, current_position);
2990
          group->offset_map_p[j] = current_position++;
2991
          group->process_globally = true;
2992
        }
2993
    }
2994
  return current_position != 1;
2995
}
2996
 
2997
 
2998
/* Init the offset tables for the spill case.  */
2999
 
3000
static bool
3001
dse_step2_spill (void)
3002
{
3003
  unsigned int j;
3004
  group_info_t group = clear_alias_group;
3005
  bitmap_iterator bi;
3006
 
3007
  /* Position 0 is unused because 0 is used in the maps to mean
3008
     unused.  */
3009
  current_position = 1;
3010
 
3011
  if (dump_file)
3012
    {
3013
      bitmap_print (dump_file, clear_alias_sets,
3014
                    "clear alias sets              ", "\n");
3015
      bitmap_print (dump_file, disqualified_clear_alias_sets,
3016
                    "disqualified clear alias sets ", "\n");
3017
    }
3018
 
3019
  memset (group->offset_map_n, 0, sizeof(int) * group->offset_map_size_n);
3020
  memset (group->offset_map_p, 0, sizeof(int) * group->offset_map_size_p);
3021
  bitmap_clear (group->group_kill);
3022
 
3023
  /* Remove the disqualified positions from the store2_p set.  */
3024
  bitmap_and_compl_into (group->store2_p, disqualified_clear_alias_sets);
3025
 
3026
  /* We do not need to process the store2_n set because
3027
     alias_sets are always positive.  */
3028
  EXECUTE_IF_SET_IN_BITMAP (group->store2_p, 0, j, bi)
3029
    {
3030
      bitmap_set_bit (group->group_kill, current_position);
3031
      group->offset_map_p[j] = current_position++;
3032
      group->process_globally = true;
3033
    }
3034
 
3035
  return current_position != 1;
3036
}
3037
 
3038
 
3039
 
3040
/*----------------------------------------------------------------------------
3041
  Third step.
3042
 
3043
  Build the bit vectors for the transfer functions.
3044
----------------------------------------------------------------------------*/
3045
 
3046
 
3047
/* Note that this is NOT a general purpose function.  Any mem that has
3048
   an alias set registered here expected to be COMPLETELY unaliased:
3049
   i.e it's addresses are not and need not be examined.
3050
 
3051
   It is known that all references to this address will have this
3052
   alias set and there are NO other references to this address in the
3053
   function.
3054
 
3055
   Currently the only place that is known to be clean enough to use
3056
   this interface is the code that assigns the spill locations.
3057
 
3058
   All of the mems that have alias_sets registered are subjected to a
3059
   very powerful form of dse where function calls, volatile reads and
3060
   writes, and reads from random location are not taken into account.
3061
 
3062
   It is also assumed that these locations go dead when the function
3063
   returns.  This assumption could be relaxed if there were found to
3064
   be places that this assumption was not correct.
3065
 
3066
   The MODE is passed in and saved.  The mode of each load or store to
3067
   a mem with ALIAS_SET is checked against MEM.  If the size of that
3068
   load or store is different from MODE, processing is halted on this
3069
   alias set.  For the vast majority of aliases sets, all of the loads
3070
   and stores will use the same mode.  But vectors are treated
3071
   differently: the alias set is established for the entire vector,
3072
   but reload will insert loads and stores for individual elements and
3073
   we do not necessarily have the information to track those separate
3074
   elements.  So when we see a mode mismatch, we just bail.  */
3075
 
3076
 
3077
void
3078
dse_record_singleton_alias_set (alias_set_type alias_set,
3079
                                enum machine_mode mode)
3080
{
3081
  struct clear_alias_mode_holder tmp_holder;
3082
  struct clear_alias_mode_holder *entry;
3083
  void **slot;
3084
 
3085
  /* If we are not going to run dse, we need to return now or there
3086
     will be problems with allocating the bitmaps.  */
3087
  if ((!gate_dse()) || !alias_set)
3088
    return;
3089
 
3090
  if (!clear_alias_sets)
3091
    {
3092
      clear_alias_sets = BITMAP_ALLOC (NULL);
3093
      disqualified_clear_alias_sets = BITMAP_ALLOC (NULL);
3094
      clear_alias_mode_table = htab_create (11, clear_alias_mode_hash,
3095
                                            clear_alias_mode_eq, NULL);
3096
      clear_alias_mode_pool = create_alloc_pool ("clear_alias_mode_pool",
3097
                                                 sizeof (struct clear_alias_mode_holder), 100);
3098
    }
3099
 
3100
  bitmap_set_bit (clear_alias_sets, alias_set);
3101
 
3102
  tmp_holder.alias_set = alias_set;
3103
 
3104
  slot = htab_find_slot (clear_alias_mode_table, &tmp_holder, INSERT);
3105
  gcc_assert (*slot == NULL);
3106
 
3107
  *slot = entry =
3108
    (struct clear_alias_mode_holder *) pool_alloc (clear_alias_mode_pool);
3109
  entry->alias_set = alias_set;
3110
  entry->mode = mode;
3111
}
3112
 
3113
 
3114
/* Remove ALIAS_SET from the sets of stack slots being considered.  */
3115
 
3116
void
3117
dse_invalidate_singleton_alias_set (alias_set_type alias_set)
3118
{
3119
  if ((!gate_dse()) || !alias_set)
3120
    return;
3121
 
3122
  bitmap_clear_bit (clear_alias_sets, alias_set);
3123
}
3124
 
3125
 
3126
/* Look up the bitmap index for OFFSET in GROUP_INFO.  If it is not
3127
   there, return 0.  */
3128
 
3129
static int
3130
get_bitmap_index (group_info_t group_info, HOST_WIDE_INT offset)
3131
{
3132
  if (offset < 0)
3133
    {
3134
      HOST_WIDE_INT offset_p = -offset;
3135
      if (offset_p >= group_info->offset_map_size_n)
3136
        return 0;
3137
      return group_info->offset_map_n[offset_p];
3138
    }
3139
  else
3140
    {
3141
      if (offset >= group_info->offset_map_size_p)
3142
        return 0;
3143
      return group_info->offset_map_p[offset];
3144
    }
3145
}
3146
 
3147
 
3148
/* Process the STORE_INFOs into the bitmaps into GEN and KILL.  KILL
3149
   may be NULL. */
3150
 
3151
static void
3152
scan_stores_nospill (store_info_t store_info, bitmap gen, bitmap kill)
3153
{
3154
  while (store_info)
3155
    {
3156
      HOST_WIDE_INT i;
3157
      group_info_t group_info
3158
        = VEC_index (group_info_t, rtx_group_vec, store_info->group_id);
3159
      if (group_info->process_globally)
3160
        for (i = store_info->begin; i < store_info->end; i++)
3161
          {
3162
            int index = get_bitmap_index (group_info, i);
3163
            if (index != 0)
3164
              {
3165
                bitmap_set_bit (gen, index);
3166
                if (kill)
3167
                  bitmap_clear_bit (kill, index);
3168
              }
3169
          }
3170
      store_info = store_info->next;
3171
    }
3172
}
3173
 
3174
 
3175
/* Process the STORE_INFOs into the bitmaps into GEN and KILL.  KILL
3176
   may be NULL. */
3177
 
3178
static void
3179
scan_stores_spill (store_info_t store_info, bitmap gen, bitmap kill)
3180
{
3181
  while (store_info)
3182
    {
3183
      if (store_info->alias_set)
3184
        {
3185
          int index = get_bitmap_index (clear_alias_group,
3186
                                        store_info->alias_set);
3187
          if (index != 0)
3188
            {
3189
              bitmap_set_bit (gen, index);
3190
              if (kill)
3191
                bitmap_clear_bit (kill, index);
3192
            }
3193
        }
3194
      store_info = store_info->next;
3195
    }
3196
}
3197
 
3198
 
3199
/* Process the READ_INFOs into the bitmaps into GEN and KILL.  KILL
3200
   may be NULL.  */
3201
 
3202
static void
3203
scan_reads_nospill (insn_info_t insn_info, bitmap gen, bitmap kill)
3204
{
3205
  read_info_t read_info = insn_info->read_rec;
3206
  int i;
3207
  group_info_t group;
3208
 
3209
  /* If this insn reads the frame, kill all the frame related stores.  */
3210
  if (insn_info->frame_read)
3211
    {
3212
      FOR_EACH_VEC_ELT (group_info_t, rtx_group_vec, i, group)
3213
        if (group->process_globally && group->frame_related)
3214
          {
3215
            if (kill)
3216
              bitmap_ior_into (kill, group->group_kill);
3217
            bitmap_and_compl_into (gen, group->group_kill);
3218
          }
3219
    }
3220
  if (insn_info->non_frame_wild_read)
3221
    {
3222
      /* Kill all non-frame related stores.  Kill all stores of variables that
3223
         escape.  */
3224
      if (kill)
3225
        bitmap_ior_into (kill, kill_on_calls);
3226
      bitmap_and_compl_into (gen, kill_on_calls);
3227
      FOR_EACH_VEC_ELT (group_info_t, rtx_group_vec, i, group)
3228
        if (group->process_globally && !group->frame_related)
3229
          {
3230
            if (kill)
3231
              bitmap_ior_into (kill, group->group_kill);
3232
            bitmap_and_compl_into (gen, group->group_kill);
3233
          }
3234
    }
3235
  while (read_info)
3236
    {
3237
      FOR_EACH_VEC_ELT (group_info_t, rtx_group_vec, i, group)
3238
        {
3239
          if (group->process_globally)
3240
            {
3241
              if (i == read_info->group_id)
3242
                {
3243
                  if (read_info->begin > read_info->end)
3244
                    {
3245
                      /* Begin > end for block mode reads.  */
3246
                      if (kill)
3247
                        bitmap_ior_into (kill, group->group_kill);
3248
                      bitmap_and_compl_into (gen, group->group_kill);
3249
                    }
3250
                  else
3251
                    {
3252
                      /* The groups are the same, just process the
3253
                         offsets.  */
3254
                      HOST_WIDE_INT j;
3255
                      for (j = read_info->begin; j < read_info->end; j++)
3256
                        {
3257
                          int index = get_bitmap_index (group, j);
3258
                          if (index != 0)
3259
                            {
3260
                              if (kill)
3261
                                bitmap_set_bit (kill, index);
3262
                              bitmap_clear_bit (gen, index);
3263
                            }
3264
                        }
3265
                    }
3266
                }
3267
              else
3268
                {
3269
                  /* The groups are different, if the alias sets
3270
                     conflict, clear the entire group.  We only need
3271
                     to apply this test if the read_info is a cselib
3272
                     read.  Anything with a constant base cannot alias
3273
                     something else with a different constant
3274
                     base.  */
3275
                  if ((read_info->group_id < 0)
3276
                      && canon_true_dependence (group->base_mem,
3277
                                                GET_MODE (group->base_mem),
3278
                                                group->canon_base_addr,
3279
                                                read_info->mem, NULL_RTX))
3280
                    {
3281
                      if (kill)
3282
                        bitmap_ior_into (kill, group->group_kill);
3283
                      bitmap_and_compl_into (gen, group->group_kill);
3284
                    }
3285
                }
3286
            }
3287
        }
3288
 
3289
      read_info = read_info->next;
3290
    }
3291
}
3292
 
3293
/* Process the READ_INFOs into the bitmaps into GEN and KILL.  KILL
3294
   may be NULL.  */
3295
 
3296
static void
3297
scan_reads_spill (read_info_t read_info, bitmap gen, bitmap kill)
3298
{
3299
  while (read_info)
3300
    {
3301
      if (read_info->alias_set)
3302
        {
3303
          int index = get_bitmap_index (clear_alias_group,
3304
                                        read_info->alias_set);
3305
          if (index != 0)
3306
            {
3307
              if (kill)
3308
                bitmap_set_bit (kill, index);
3309
              bitmap_clear_bit (gen, index);
3310
            }
3311
        }
3312
 
3313
      read_info = read_info->next;
3314
    }
3315
}
3316
 
3317
 
3318
/* Return the insn in BB_INFO before the first wild read or if there
3319
   are no wild reads in the block, return the last insn.  */
3320
 
3321
static insn_info_t
3322
find_insn_before_first_wild_read (bb_info_t bb_info)
3323
{
3324
  insn_info_t insn_info = bb_info->last_insn;
3325
  insn_info_t last_wild_read = NULL;
3326
 
3327
  while (insn_info)
3328
    {
3329
      if (insn_info->wild_read)
3330
        {
3331
          last_wild_read = insn_info->prev_insn;
3332
          /* Block starts with wild read.  */
3333
          if (!last_wild_read)
3334
            return NULL;
3335
        }
3336
 
3337
      insn_info = insn_info->prev_insn;
3338
    }
3339
 
3340
  if (last_wild_read)
3341
    return last_wild_read;
3342
  else
3343
    return bb_info->last_insn;
3344
}
3345
 
3346
 
3347
/* Scan the insns in BB_INFO starting at PTR and going to the top of
3348
   the block in order to build the gen and kill sets for the block.
3349
   We start at ptr which may be the last insn in the block or may be
3350
   the first insn with a wild read.  In the latter case we are able to
3351
   skip the rest of the block because it just does not matter:
3352
   anything that happens is hidden by the wild read.  */
3353
 
3354
static void
3355
dse_step3_scan (bool for_spills, basic_block bb)
3356
{
3357
  bb_info_t bb_info = bb_table[bb->index];
3358
  insn_info_t insn_info;
3359
 
3360
  if (for_spills)
3361
    /* There are no wild reads in the spill case.  */
3362
    insn_info = bb_info->last_insn;
3363
  else
3364
    insn_info = find_insn_before_first_wild_read (bb_info);
3365
 
3366
  /* In the spill case or in the no_spill case if there is no wild
3367
     read in the block, we will need a kill set.  */
3368
  if (insn_info == bb_info->last_insn)
3369
    {
3370
      if (bb_info->kill)
3371
        bitmap_clear (bb_info->kill);
3372
      else
3373
        bb_info->kill = BITMAP_ALLOC (NULL);
3374
    }
3375
  else
3376
    if (bb_info->kill)
3377
      BITMAP_FREE (bb_info->kill);
3378
 
3379
  while (insn_info)
3380
    {
3381
      /* There may have been code deleted by the dce pass run before
3382
         this phase.  */
3383
      if (insn_info->insn && INSN_P (insn_info->insn))
3384
        {
3385
          /* Process the read(s) last.  */
3386
          if (for_spills)
3387
            {
3388
              scan_stores_spill (insn_info->store_rec, bb_info->gen, bb_info->kill);
3389
              scan_reads_spill (insn_info->read_rec, bb_info->gen, bb_info->kill);
3390
            }
3391
          else
3392
            {
3393
              scan_stores_nospill (insn_info->store_rec, bb_info->gen, bb_info->kill);
3394
              scan_reads_nospill (insn_info, bb_info->gen, bb_info->kill);
3395
            }
3396
        }
3397
 
3398
      insn_info = insn_info->prev_insn;
3399
    }
3400
}
3401
 
3402
 
3403
/* Set the gen set of the exit block, and also any block with no
3404
   successors that does not have a wild read.  */
3405
 
3406
static void
3407
dse_step3_exit_block_scan (bb_info_t bb_info)
3408
{
3409
  /* The gen set is all 0's for the exit block except for the
3410
     frame_pointer_group.  */
3411
 
3412
  if (stores_off_frame_dead_at_return)
3413
    {
3414
      unsigned int i;
3415
      group_info_t group;
3416
 
3417
      FOR_EACH_VEC_ELT (group_info_t, rtx_group_vec, i, group)
3418
        {
3419
          if (group->process_globally && group->frame_related)
3420
            bitmap_ior_into (bb_info->gen, group->group_kill);
3421
        }
3422
    }
3423
}
3424
 
3425
 
3426
/* Find all of the blocks that are not backwards reachable from the
3427
   exit block or any block with no successors (BB).  These are the
3428
   infinite loops or infinite self loops.  These blocks will still
3429
   have their bits set in UNREACHABLE_BLOCKS.  */
3430
 
3431
static void
3432
mark_reachable_blocks (sbitmap unreachable_blocks, basic_block bb)
3433
{
3434
  edge e;
3435
  edge_iterator ei;
3436
 
3437
  if (TEST_BIT (unreachable_blocks, bb->index))
3438
    {
3439
      RESET_BIT (unreachable_blocks, bb->index);
3440
      FOR_EACH_EDGE (e, ei, bb->preds)
3441
        {
3442
          mark_reachable_blocks (unreachable_blocks, e->src);
3443
        }
3444
    }
3445
}
3446
 
3447
/* Build the transfer functions for the function.  */
3448
 
3449
static void
3450
dse_step3 (bool for_spills)
3451
{
3452
  basic_block bb;
3453
  sbitmap unreachable_blocks = sbitmap_alloc (last_basic_block);
3454
  sbitmap_iterator sbi;
3455
  bitmap all_ones = NULL;
3456
  unsigned int i;
3457
 
3458
  sbitmap_ones (unreachable_blocks);
3459
 
3460
  FOR_ALL_BB (bb)
3461
    {
3462
      bb_info_t bb_info = bb_table[bb->index];
3463
      if (bb_info->gen)
3464
        bitmap_clear (bb_info->gen);
3465
      else
3466
        bb_info->gen = BITMAP_ALLOC (NULL);
3467
 
3468
      if (bb->index == ENTRY_BLOCK)
3469
        ;
3470
      else if (bb->index == EXIT_BLOCK)
3471
        dse_step3_exit_block_scan (bb_info);
3472
      else
3473
        dse_step3_scan (for_spills, bb);
3474
      if (EDGE_COUNT (bb->succs) == 0)
3475
        mark_reachable_blocks (unreachable_blocks, bb);
3476
 
3477
      /* If this is the second time dataflow is run, delete the old
3478
         sets.  */
3479
      if (bb_info->in)
3480
        BITMAP_FREE (bb_info->in);
3481
      if (bb_info->out)
3482
        BITMAP_FREE (bb_info->out);
3483
    }
3484
 
3485
  /* For any block in an infinite loop, we must initialize the out set
3486
     to all ones.  This could be expensive, but almost never occurs in
3487
     practice. However, it is common in regression tests.  */
3488
  EXECUTE_IF_SET_IN_SBITMAP (unreachable_blocks, 0, i, sbi)
3489
    {
3490
      if (bitmap_bit_p (all_blocks, i))
3491
        {
3492
          bb_info_t bb_info = bb_table[i];
3493
          if (!all_ones)
3494
            {
3495
              unsigned int j;
3496
              group_info_t group;
3497
 
3498
              all_ones = BITMAP_ALLOC (NULL);
3499
              FOR_EACH_VEC_ELT (group_info_t, rtx_group_vec, j, group)
3500
                bitmap_ior_into (all_ones, group->group_kill);
3501
            }
3502
          if (!bb_info->out)
3503
            {
3504
              bb_info->out = BITMAP_ALLOC (NULL);
3505
              bitmap_copy (bb_info->out, all_ones);
3506
            }
3507
        }
3508
    }
3509
 
3510
  if (all_ones)
3511
    BITMAP_FREE (all_ones);
3512
  sbitmap_free (unreachable_blocks);
3513
}
3514
 
3515
 
3516
 
3517
/*----------------------------------------------------------------------------
3518
   Fourth step.
3519
 
3520
   Solve the bitvector equations.
3521
----------------------------------------------------------------------------*/
3522
 
3523
 
3524
/* Confluence function for blocks with no successors.  Create an out
3525
   set from the gen set of the exit block.  This block logically has
3526
   the exit block as a successor.  */
3527
 
3528
 
3529
 
3530
static void
3531
dse_confluence_0 (basic_block bb)
3532
{
3533
  bb_info_t bb_info = bb_table[bb->index];
3534
 
3535
  if (bb->index == EXIT_BLOCK)
3536
    return;
3537
 
3538
  if (!bb_info->out)
3539
    {
3540
      bb_info->out = BITMAP_ALLOC (NULL);
3541
      bitmap_copy (bb_info->out, bb_table[EXIT_BLOCK]->gen);
3542
    }
3543
}
3544
 
3545
/* Propagate the information from the in set of the dest of E to the
3546
   out set of the src of E.  If the various in or out sets are not
3547
   there, that means they are all ones.  */
3548
 
3549
static bool
3550
dse_confluence_n (edge e)
3551
{
3552
  bb_info_t src_info = bb_table[e->src->index];
3553
  bb_info_t dest_info = bb_table[e->dest->index];
3554
 
3555
  if (dest_info->in)
3556
    {
3557
      if (src_info->out)
3558
        bitmap_and_into (src_info->out, dest_info->in);
3559
      else
3560
        {
3561
          src_info->out = BITMAP_ALLOC (NULL);
3562
          bitmap_copy (src_info->out, dest_info->in);
3563
        }
3564
    }
3565
  return true;
3566
}
3567
 
3568
 
3569
/* Propagate the info from the out to the in set of BB_INDEX's basic
3570
   block.  There are three cases:
3571
 
3572
   1) The block has no kill set.  In this case the kill set is all
3573
   ones.  It does not matter what the out set of the block is, none of
3574
   the info can reach the top.  The only thing that reaches the top is
3575
   the gen set and we just copy the set.
3576
 
3577
   2) There is a kill set but no out set and bb has successors.  In
3578
   this case we just return. Eventually an out set will be created and
3579
   it is better to wait than to create a set of ones.
3580
 
3581
   3) There is both a kill and out set.  We apply the obvious transfer
3582
   function.
3583
*/
3584
 
3585
static bool
3586
dse_transfer_function (int bb_index)
3587
{
3588
  bb_info_t bb_info = bb_table[bb_index];
3589
 
3590
  if (bb_info->kill)
3591
    {
3592
      if (bb_info->out)
3593
        {
3594
          /* Case 3 above.  */
3595
          if (bb_info->in)
3596
            return bitmap_ior_and_compl (bb_info->in, bb_info->gen,
3597
                                         bb_info->out, bb_info->kill);
3598
          else
3599
            {
3600
              bb_info->in = BITMAP_ALLOC (NULL);
3601
              bitmap_ior_and_compl (bb_info->in, bb_info->gen,
3602
                                    bb_info->out, bb_info->kill);
3603
              return true;
3604
            }
3605
        }
3606
      else
3607
        /* Case 2 above.  */
3608
        return false;
3609
    }
3610
  else
3611
    {
3612
      /* Case 1 above.  If there is already an in set, nothing
3613
         happens.  */
3614
      if (bb_info->in)
3615
        return false;
3616
      else
3617
        {
3618
          bb_info->in = BITMAP_ALLOC (NULL);
3619
          bitmap_copy (bb_info->in, bb_info->gen);
3620
          return true;
3621
        }
3622
    }
3623
}
3624
 
3625
/* Solve the dataflow equations.  */
3626
 
3627
static void
3628
dse_step4 (void)
3629
{
3630
  df_simple_dataflow (DF_BACKWARD, NULL, dse_confluence_0,
3631
                      dse_confluence_n, dse_transfer_function,
3632
                      all_blocks, df_get_postorder (DF_BACKWARD),
3633
                      df_get_n_blocks (DF_BACKWARD));
3634
  if (dump_file)
3635
    {
3636
      basic_block bb;
3637
 
3638
      fprintf (dump_file, "\n\n*** Global dataflow info after analysis.\n");
3639
      FOR_ALL_BB (bb)
3640
        {
3641
          bb_info_t bb_info = bb_table[bb->index];
3642
 
3643
          df_print_bb_index (bb, dump_file);
3644
          if (bb_info->in)
3645
            bitmap_print (dump_file, bb_info->in, "  in:   ", "\n");
3646
          else
3647
            fprintf (dump_file, "  in:   *MISSING*\n");
3648
          if (bb_info->gen)
3649
            bitmap_print (dump_file, bb_info->gen, "  gen:  ", "\n");
3650
          else
3651
            fprintf (dump_file, "  gen:  *MISSING*\n");
3652
          if (bb_info->kill)
3653
            bitmap_print (dump_file, bb_info->kill, "  kill: ", "\n");
3654
          else
3655
            fprintf (dump_file, "  kill: *MISSING*\n");
3656
          if (bb_info->out)
3657
            bitmap_print (dump_file, bb_info->out, "  out:  ", "\n");
3658
          else
3659
            fprintf (dump_file, "  out:  *MISSING*\n\n");
3660
        }
3661
    }
3662
}
3663
 
3664
 
3665
 
3666
/*----------------------------------------------------------------------------
3667
   Fifth step.
3668
 
3669
   Delete the stores that can only be deleted using the global information.
3670
----------------------------------------------------------------------------*/
3671
 
3672
 
3673
static void
3674
dse_step5_nospill (void)
3675
{
3676
  basic_block bb;
3677
  FOR_EACH_BB (bb)
3678
    {
3679
      bb_info_t bb_info = bb_table[bb->index];
3680
      insn_info_t insn_info = bb_info->last_insn;
3681
      bitmap v = bb_info->out;
3682
 
3683
      while (insn_info)
3684
        {
3685
          bool deleted = false;
3686
          if (dump_file && insn_info->insn)
3687
            {
3688
              fprintf (dump_file, "starting to process insn %d\n",
3689
                       INSN_UID (insn_info->insn));
3690
              bitmap_print (dump_file, v, "  v:  ", "\n");
3691
            }
3692
 
3693
          /* There may have been code deleted by the dce pass run before
3694
             this phase.  */
3695
          if (insn_info->insn
3696
              && INSN_P (insn_info->insn)
3697
              && (!insn_info->cannot_delete)
3698
              && (!bitmap_empty_p (v)))
3699
            {
3700
              store_info_t store_info = insn_info->store_rec;
3701
 
3702
              /* Try to delete the current insn.  */
3703
              deleted = true;
3704
 
3705
              /* Skip the clobbers.  */
3706
              while (!store_info->is_set)
3707
                store_info = store_info->next;
3708
 
3709
              if (store_info->alias_set)
3710
                deleted = false;
3711
              else
3712
                {
3713
                  HOST_WIDE_INT i;
3714
                  group_info_t group_info
3715
                    = VEC_index (group_info_t, rtx_group_vec, store_info->group_id);
3716
 
3717
                  for (i = store_info->begin; i < store_info->end; i++)
3718
                    {
3719
                      int index = get_bitmap_index (group_info, i);
3720
 
3721
                      if (dump_file)
3722
                        fprintf (dump_file, "i = %d, index = %d\n", (int)i, index);
3723
                      if (index == 0 || !bitmap_bit_p (v, index))
3724
                        {
3725
                          if (dump_file)
3726
                            fprintf (dump_file, "failing at i = %d\n", (int)i);
3727
                          deleted = false;
3728
                          break;
3729
                        }
3730
                    }
3731
                }
3732
              if (deleted)
3733
                {
3734
                  if (dbg_cnt (dse)
3735
                      && check_for_inc_dec_1 (insn_info))
3736
                    {
3737
                      delete_insn (insn_info->insn);
3738
                      insn_info->insn = NULL;
3739
                      globally_deleted++;
3740
                    }
3741
                }
3742
            }
3743
          /* We do want to process the local info if the insn was
3744
             deleted.  For instance, if the insn did a wild read, we
3745
             no longer need to trash the info.  */
3746
          if (insn_info->insn
3747
              && INSN_P (insn_info->insn)
3748
              && (!deleted))
3749
            {
3750
              scan_stores_nospill (insn_info->store_rec, v, NULL);
3751
              if (insn_info->wild_read)
3752
                {
3753
                  if (dump_file)
3754
                    fprintf (dump_file, "wild read\n");
3755
                  bitmap_clear (v);
3756
                }
3757
              else if (insn_info->read_rec
3758
                       || insn_info->non_frame_wild_read)
3759
                {
3760
                  if (dump_file && !insn_info->non_frame_wild_read)
3761
                    fprintf (dump_file, "regular read\n");
3762
                  else if (dump_file)
3763
                    fprintf (dump_file, "non-frame wild read\n");
3764
                  scan_reads_nospill (insn_info, v, NULL);
3765
                }
3766
            }
3767
 
3768
          insn_info = insn_info->prev_insn;
3769
        }
3770
    }
3771
}
3772
 
3773
 
3774
static void
3775
dse_step5_spill (void)
3776
{
3777
  basic_block bb;
3778
  FOR_EACH_BB (bb)
3779
    {
3780
      bb_info_t bb_info = bb_table[bb->index];
3781
      insn_info_t insn_info = bb_info->last_insn;
3782
      bitmap v = bb_info->out;
3783
 
3784
      while (insn_info)
3785
        {
3786
          bool deleted = false;
3787
          /* There may have been code deleted by the dce pass run before
3788
             this phase.  */
3789
          if (insn_info->insn
3790
              && INSN_P (insn_info->insn)
3791
              && (!insn_info->cannot_delete)
3792
              && (!bitmap_empty_p (v)))
3793
            {
3794
              /* Try to delete the current insn.  */
3795
              store_info_t store_info = insn_info->store_rec;
3796
              deleted = true;
3797
 
3798
              while (store_info)
3799
                {
3800
                  if (store_info->alias_set)
3801
                    {
3802
                      int index = get_bitmap_index (clear_alias_group,
3803
                                                    store_info->alias_set);
3804
                      if (index == 0 || !bitmap_bit_p (v, index))
3805
                        {
3806
                          deleted = false;
3807
                          break;
3808
                        }
3809
                    }
3810
                  else
3811
                    deleted = false;
3812
                  store_info = store_info->next;
3813
                }
3814
              if (deleted && dbg_cnt (dse)
3815
                  && check_for_inc_dec_1 (insn_info))
3816
                {
3817
                  if (dump_file)
3818
                    fprintf (dump_file, "Spill deleting insn %d\n",
3819
                             INSN_UID (insn_info->insn));
3820
                  delete_insn (insn_info->insn);
3821
                  spill_deleted++;
3822
                  insn_info->insn = NULL;
3823
                }
3824
            }
3825
 
3826
          if (insn_info->insn
3827
              && INSN_P (insn_info->insn)
3828
              && (!deleted))
3829
            {
3830
              scan_stores_spill (insn_info->store_rec, v, NULL);
3831
              scan_reads_spill (insn_info->read_rec, v, NULL);
3832
            }
3833
 
3834
          insn_info = insn_info->prev_insn;
3835
        }
3836
    }
3837
}
3838
 
3839
 
3840
 
3841
/*----------------------------------------------------------------------------
3842
   Sixth step.
3843
 
3844
   Delete stores made redundant by earlier stores (which store the same
3845
   value) that couldn't be eliminated.
3846
----------------------------------------------------------------------------*/
3847
 
3848
static void
3849
dse_step6 (void)
3850
{
3851
  basic_block bb;
3852
 
3853
  FOR_ALL_BB (bb)
3854
    {
3855
      bb_info_t bb_info = bb_table[bb->index];
3856
      insn_info_t insn_info = bb_info->last_insn;
3857
 
3858
      while (insn_info)
3859
        {
3860
          /* There may have been code deleted by the dce pass run before
3861
             this phase.  */
3862
          if (insn_info->insn
3863
              && INSN_P (insn_info->insn)
3864
              && !insn_info->cannot_delete)
3865
            {
3866
              store_info_t s_info = insn_info->store_rec;
3867
 
3868
              while (s_info && !s_info->is_set)
3869
                s_info = s_info->next;
3870
              if (s_info
3871
                  && s_info->redundant_reason
3872
                  && s_info->redundant_reason->insn
3873
                  && INSN_P (s_info->redundant_reason->insn))
3874
                {
3875
                  rtx rinsn = s_info->redundant_reason->insn;
3876
                  if (dump_file)
3877
                    fprintf (dump_file, "Locally deleting insn %d "
3878
                                        "because insn %d stores the "
3879
                                        "same value and couldn't be "
3880
                                        "eliminated\n",
3881
                                        INSN_UID (insn_info->insn),
3882
                                        INSN_UID (rinsn));
3883
                  delete_dead_store_insn (insn_info);
3884
                }
3885
            }
3886
          insn_info = insn_info->prev_insn;
3887
        }
3888
    }
3889
}
3890
 
3891
/*----------------------------------------------------------------------------
3892
   Seventh step.
3893
 
3894
   Destroy everything left standing.
3895
----------------------------------------------------------------------------*/
3896
 
3897
static void
3898
dse_step7 (bool global_done)
3899
{
3900
  unsigned int i;
3901
  group_info_t group;
3902
  basic_block bb;
3903
 
3904
  FOR_EACH_VEC_ELT (group_info_t, rtx_group_vec, i, group)
3905
    {
3906
      free (group->offset_map_n);
3907
      free (group->offset_map_p);
3908
      BITMAP_FREE (group->store1_n);
3909
      BITMAP_FREE (group->store1_p);
3910
      BITMAP_FREE (group->store2_n);
3911
      BITMAP_FREE (group->store2_p);
3912
      BITMAP_FREE (group->escaped_n);
3913
      BITMAP_FREE (group->escaped_p);
3914
      BITMAP_FREE (group->group_kill);
3915
    }
3916
 
3917
  if (global_done)
3918
    FOR_ALL_BB (bb)
3919
      {
3920
        bb_info_t bb_info = bb_table[bb->index];
3921
        BITMAP_FREE (bb_info->gen);
3922
        if (bb_info->kill)
3923
          BITMAP_FREE (bb_info->kill);
3924
        if (bb_info->in)
3925
          BITMAP_FREE (bb_info->in);
3926
        if (bb_info->out)
3927
          BITMAP_FREE (bb_info->out);
3928
      }
3929
 
3930
  if (clear_alias_sets)
3931
    {
3932
      BITMAP_FREE (clear_alias_sets);
3933
      BITMAP_FREE (disqualified_clear_alias_sets);
3934
      free_alloc_pool (clear_alias_mode_pool);
3935
      htab_delete (clear_alias_mode_table);
3936
    }
3937
 
3938
  end_alias_analysis ();
3939
  free (bb_table);
3940
  htab_delete (rtx_group_table);
3941
  VEC_free (group_info_t, heap, rtx_group_vec);
3942
  BITMAP_FREE (all_blocks);
3943
  BITMAP_FREE (scratch);
3944
  BITMAP_FREE (kill_on_calls);
3945
 
3946
  free_alloc_pool (rtx_store_info_pool);
3947
  free_alloc_pool (read_info_pool);
3948
  free_alloc_pool (insn_info_pool);
3949
  free_alloc_pool (bb_info_pool);
3950
  free_alloc_pool (rtx_group_info_pool);
3951
  free_alloc_pool (deferred_change_pool);
3952
}
3953
 
3954
 
3955
/* -------------------------------------------------------------------------
3956
   DSE
3957
   ------------------------------------------------------------------------- */
3958
 
3959
/* Callback for running pass_rtl_dse.  */
3960
 
3961
static unsigned int
3962
rest_of_handle_dse (void)
3963
{
3964
  bool did_global = false;
3965
 
3966
  df_set_flags (DF_DEFER_INSN_RESCAN);
3967
 
3968
  /* Need the notes since we must track live hardregs in the forwards
3969
     direction.  */
3970
  df_note_add_problem ();
3971
  df_analyze ();
3972
 
3973
  dse_step0 ();
3974
  dse_step1 ();
3975
  dse_step2_init ();
3976
  if (dse_step2_nospill ())
3977
    {
3978
      df_set_flags (DF_LR_RUN_DCE);
3979
      df_analyze ();
3980
      did_global = true;
3981
      if (dump_file)
3982
        fprintf (dump_file, "doing global processing\n");
3983
      dse_step3 (false);
3984
      dse_step4 ();
3985
      dse_step5_nospill ();
3986
    }
3987
 
3988
  /* For the instance of dse that runs after reload, we make a special
3989
     pass to process the spills.  These are special in that they are
3990
     totally transparent, i.e, there is no aliasing issues that need
3991
     to be considered.  This means that the wild reads that kill
3992
     everything else do not apply here.  */
3993
  if (clear_alias_sets && dse_step2_spill ())
3994
    {
3995
      if (!did_global)
3996
        {
3997
          df_set_flags (DF_LR_RUN_DCE);
3998
          df_analyze ();
3999
        }
4000
      did_global = true;
4001
      if (dump_file)
4002
        fprintf (dump_file, "doing global spill processing\n");
4003
      dse_step3 (true);
4004
      dse_step4 ();
4005
      dse_step5_spill ();
4006
    }
4007
 
4008
  dse_step6 ();
4009
  dse_step7 (did_global);
4010
 
4011
  if (dump_file)
4012
    fprintf (dump_file, "dse: local deletions = %d, global deletions = %d, spill deletions = %d\n",
4013
             locally_deleted, globally_deleted, spill_deleted);
4014
  return 0;
4015
}
4016
 
4017
static bool
4018
gate_dse (void)
4019
{
4020
  return gate_dse1 () || gate_dse2 ();
4021
}
4022
 
4023
static bool
4024
gate_dse1 (void)
4025
{
4026
  return optimize > 0 && flag_dse
4027
    && dbg_cnt (dse1);
4028
}
4029
 
4030
static bool
4031
gate_dse2 (void)
4032
{
4033
  return optimize > 0 && flag_dse
4034
    && dbg_cnt (dse2);
4035
}
4036
 
4037
struct rtl_opt_pass pass_rtl_dse1 =
4038
{
4039
 {
4040
  RTL_PASS,
4041
  "dse1",                               /* name */
4042
  gate_dse1,                            /* gate */
4043
  rest_of_handle_dse,                   /* execute */
4044
  NULL,                                 /* sub */
4045
  NULL,                                 /* next */
4046
  0,                                    /* static_pass_number */
4047
  TV_DSE1,                              /* tv_id */
4048
  0,                                    /* properties_required */
4049
  0,                                    /* properties_provided */
4050
  0,                                    /* properties_destroyed */
4051
  0,                                    /* todo_flags_start */
4052
  TODO_df_finish | TODO_verify_rtl_sharing |
4053
  TODO_ggc_collect                      /* todo_flags_finish */
4054
 }
4055
};
4056
 
4057
struct rtl_opt_pass pass_rtl_dse2 =
4058
{
4059
 {
4060
  RTL_PASS,
4061
  "dse2",                               /* name */
4062
  gate_dse2,                            /* gate */
4063
  rest_of_handle_dse,                   /* execute */
4064
  NULL,                                 /* sub */
4065
  NULL,                                 /* next */
4066
  0,                                    /* static_pass_number */
4067
  TV_DSE2,                              /* tv_id */
4068
  0,                                    /* properties_required */
4069
  0,                                    /* properties_provided */
4070
  0,                                    /* properties_destroyed */
4071
  0,                                    /* todo_flags_start */
4072
  TODO_df_finish | TODO_verify_rtl_sharing |
4073
  TODO_ggc_collect                      /* todo_flags_finish */
4074
 }
4075
};

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.