OpenCores
URL https://opencores.org/ocsvn/openrisc_2011-10-31/openrisc_2011-10-31/trunk

Subversion Repositories openrisc_2011-10-31

[/] [openrisc/] [trunk/] [gnu-src/] [gcc-4.2.2/] [gcc/] [cse.c] - Blame information for rev 293

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 38 julius
/* Common subexpression elimination for GNU compiler.
2
   Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3
   1999, 2000, 2001, 2002, 2003, 2004, 2005, 2007 Free Software Foundation, Inc.
4
 
5
This file is part of GCC.
6
 
7
GCC is free software; you can redistribute it and/or modify it under
8
the terms of the GNU General Public License as published by the Free
9
Software Foundation; either version 3, or (at your option) any later
10
version.
11
 
12
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13
WARRANTY; without even the implied warranty of MERCHANTABILITY or
14
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15
for more details.
16
 
17
You should have received a copy of the GNU General Public License
18
along with GCC; see the file COPYING3.  If not see
19
<http://www.gnu.org/licenses/>.  */
20
 
21
#include "config.h"
22
/* stdio.h must precede rtl.h for FFS.  */
23
#include "system.h"
24
#include "coretypes.h"
25
#include "tm.h"
26
#include "rtl.h"
27
#include "tm_p.h"
28
#include "hard-reg-set.h"
29
#include "regs.h"
30
#include "basic-block.h"
31
#include "flags.h"
32
#include "real.h"
33
#include "insn-config.h"
34
#include "recog.h"
35
#include "function.h"
36
#include "expr.h"
37
#include "toplev.h"
38
#include "output.h"
39
#include "ggc.h"
40
#include "timevar.h"
41
#include "except.h"
42
#include "target.h"
43
#include "params.h"
44
#include "rtlhooks-def.h"
45
#include "tree-pass.h"
46
 
47
/* The basic idea of common subexpression elimination is to go
48
   through the code, keeping a record of expressions that would
49
   have the same value at the current scan point, and replacing
50
   expressions encountered with the cheapest equivalent expression.
51
 
52
   It is too complicated to keep track of the different possibilities
53
   when control paths merge in this code; so, at each label, we forget all
54
   that is known and start fresh.  This can be described as processing each
55
   extended basic block separately.  We have a separate pass to perform
56
   global CSE.
57
 
58
   Note CSE can turn a conditional or computed jump into a nop or
59
   an unconditional jump.  When this occurs we arrange to run the jump
60
   optimizer after CSE to delete the unreachable code.
61
 
62
   We use two data structures to record the equivalent expressions:
63
   a hash table for most expressions, and a vector of "quantity
64
   numbers" to record equivalent (pseudo) registers.
65
 
66
   The use of the special data structure for registers is desirable
67
   because it is faster.  It is possible because registers references
68
   contain a fairly small number, the register number, taken from
69
   a contiguously allocated series, and two register references are
70
   identical if they have the same number.  General expressions
71
   do not have any such thing, so the only way to retrieve the
72
   information recorded on an expression other than a register
73
   is to keep it in a hash table.
74
 
75
Registers and "quantity numbers":
76
 
77
   At the start of each basic block, all of the (hardware and pseudo)
78
   registers used in the function are given distinct quantity
79
   numbers to indicate their contents.  During scan, when the code
80
   copies one register into another, we copy the quantity number.
81
   When a register is loaded in any other way, we allocate a new
82
   quantity number to describe the value generated by this operation.
83
   `REG_QTY (N)' records what quantity register N is currently thought
84
   of as containing.
85
 
86
   All real quantity numbers are greater than or equal to zero.
87
   If register N has not been assigned a quantity, `REG_QTY (N)' will
88
   equal -N - 1, which is always negative.
89
 
90
   Quantity numbers below zero do not exist and none of the `qty_table'
91
   entries should be referenced with a negative index.
92
 
93
   We also maintain a bidirectional chain of registers for each
94
   quantity number.  The `qty_table` members `first_reg' and `last_reg',
95
   and `reg_eqv_table' members `next' and `prev' hold these chains.
96
 
97
   The first register in a chain is the one whose lifespan is least local.
98
   Among equals, it is the one that was seen first.
99
   We replace any equivalent register with that one.
100
 
101
   If two registers have the same quantity number, it must be true that
102
   REG expressions with qty_table `mode' must be in the hash table for both
103
   registers and must be in the same class.
104
 
105
   The converse is not true.  Since hard registers may be referenced in
106
   any mode, two REG expressions might be equivalent in the hash table
107
   but not have the same quantity number if the quantity number of one
108
   of the registers is not the same mode as those expressions.
109
 
110
Constants and quantity numbers
111
 
112
   When a quantity has a known constant value, that value is stored
113
   in the appropriate qty_table `const_rtx'.  This is in addition to
114
   putting the constant in the hash table as is usual for non-regs.
115
 
116
   Whether a reg or a constant is preferred is determined by the configuration
117
   macro CONST_COSTS and will often depend on the constant value.  In any
118
   event, expressions containing constants can be simplified, by fold_rtx.
119
 
120
   When a quantity has a known nearly constant value (such as an address
121
   of a stack slot), that value is stored in the appropriate qty_table
122
   `const_rtx'.
123
 
124
   Integer constants don't have a machine mode.  However, cse
125
   determines the intended machine mode from the destination
126
   of the instruction that moves the constant.  The machine mode
127
   is recorded in the hash table along with the actual RTL
128
   constant expression so that different modes are kept separate.
129
 
130
Other expressions:
131
 
132
   To record known equivalences among expressions in general
133
   we use a hash table called `table'.  It has a fixed number of buckets
134
   that contain chains of `struct table_elt' elements for expressions.
135
   These chains connect the elements whose expressions have the same
136
   hash codes.
137
 
138
   Other chains through the same elements connect the elements which
139
   currently have equivalent values.
140
 
141
   Register references in an expression are canonicalized before hashing
142
   the expression.  This is done using `reg_qty' and qty_table `first_reg'.
143
   The hash code of a register reference is computed using the quantity
144
   number, not the register number.
145
 
146
   When the value of an expression changes, it is necessary to remove from the
147
   hash table not just that expression but all expressions whose values
148
   could be different as a result.
149
 
150
     1. If the value changing is in memory, except in special cases
151
     ANYTHING referring to memory could be changed.  That is because
152
     nobody knows where a pointer does not point.
153
     The function `invalidate_memory' removes what is necessary.
154
 
155
     The special cases are when the address is constant or is
156
     a constant plus a fixed register such as the frame pointer
157
     or a static chain pointer.  When such addresses are stored in,
158
     we can tell exactly which other such addresses must be invalidated
159
     due to overlap.  `invalidate' does this.
160
     All expressions that refer to non-constant
161
     memory addresses are also invalidated.  `invalidate_memory' does this.
162
 
163
     2. If the value changing is a register, all expressions
164
     containing references to that register, and only those,
165
     must be removed.
166
 
167
   Because searching the entire hash table for expressions that contain
168
   a register is very slow, we try to figure out when it isn't necessary.
169
   Precisely, this is necessary only when expressions have been
170
   entered in the hash table using this register, and then the value has
171
   changed, and then another expression wants to be added to refer to
172
   the register's new value.  This sequence of circumstances is rare
173
   within any one basic block.
174
 
175
   `REG_TICK' and `REG_IN_TABLE', accessors for members of
176
   cse_reg_info, are used to detect this case.  REG_TICK (i) is
177
   incremented whenever a value is stored in register i.
178
   REG_IN_TABLE (i) holds -1 if no references to register i have been
179
   entered in the table; otherwise, it contains the value REG_TICK (i)
180
   had when the references were entered.  If we want to enter a
181
   reference and REG_IN_TABLE (i) != REG_TICK (i), we must scan and
182
   remove old references.  Until we want to enter a new entry, the
183
   mere fact that the two vectors don't match makes the entries be
184
   ignored if anyone tries to match them.
185
 
186
   Registers themselves are entered in the hash table as well as in
187
   the equivalent-register chains.  However, `REG_TICK' and
188
   `REG_IN_TABLE' do not apply to expressions which are simple
189
   register references.  These expressions are removed from the table
190
   immediately when they become invalid, and this can be done even if
191
   we do not immediately search for all the expressions that refer to
192
   the register.
193
 
194
   A CLOBBER rtx in an instruction invalidates its operand for further
195
   reuse.  A CLOBBER or SET rtx whose operand is a MEM:BLK
196
   invalidates everything that resides in memory.
197
 
198
Related expressions:
199
 
200
   Constant expressions that differ only by an additive integer
201
   are called related.  When a constant expression is put in
202
   the table, the related expression with no constant term
203
   is also entered.  These are made to point at each other
204
   so that it is possible to find out if there exists any
205
   register equivalent to an expression related to a given expression.  */
206
 
207
/* Length of qty_table vector.  We know in advance we will not need
208
   a quantity number this big.  */
209
 
210
static int max_qty;
211
 
212
/* Next quantity number to be allocated.
213
   This is 1 + the largest number needed so far.  */
214
 
215
static int next_qty;
216
 
217
/* Per-qty information tracking.
218
 
219
   `first_reg' and `last_reg' track the head and tail of the
220
   chain of registers which currently contain this quantity.
221
 
222
   `mode' contains the machine mode of this quantity.
223
 
224
   `const_rtx' holds the rtx of the constant value of this
225
   quantity, if known.  A summations of the frame/arg pointer
226
   and a constant can also be entered here.  When this holds
227
   a known value, `const_insn' is the insn which stored the
228
   constant value.
229
 
230
   `comparison_{code,const,qty}' are used to track when a
231
   comparison between a quantity and some constant or register has
232
   been passed.  In such a case, we know the results of the comparison
233
   in case we see it again.  These members record a comparison that
234
   is known to be true.  `comparison_code' holds the rtx code of such
235
   a comparison, else it is set to UNKNOWN and the other two
236
   comparison members are undefined.  `comparison_const' holds
237
   the constant being compared against, or zero if the comparison
238
   is not against a constant.  `comparison_qty' holds the quantity
239
   being compared against when the result is known.  If the comparison
240
   is not with a register, `comparison_qty' is -1.  */
241
 
242
struct qty_table_elem
243
{
244
  rtx const_rtx;
245
  rtx const_insn;
246
  rtx comparison_const;
247
  int comparison_qty;
248
  unsigned int first_reg, last_reg;
249
  /* The sizes of these fields should match the sizes of the
250
     code and mode fields of struct rtx_def (see rtl.h).  */
251
  ENUM_BITFIELD(rtx_code) comparison_code : 16;
252
  ENUM_BITFIELD(machine_mode) mode : 8;
253
};
254
 
255
/* The table of all qtys, indexed by qty number.  */
256
static struct qty_table_elem *qty_table;
257
 
258
/* Structure used to pass arguments via for_each_rtx to function
259
   cse_change_cc_mode.  */
260
struct change_cc_mode_args
261
{
262
  rtx insn;
263
  rtx newreg;
264
};
265
 
266
#ifdef HAVE_cc0
267
/* For machines that have a CC0, we do not record its value in the hash
268
   table since its use is guaranteed to be the insn immediately following
269
   its definition and any other insn is presumed to invalidate it.
270
 
271
   Instead, we store below the value last assigned to CC0.  If it should
272
   happen to be a constant, it is stored in preference to the actual
273
   assigned value.  In case it is a constant, we store the mode in which
274
   the constant should be interpreted.  */
275
 
276
static rtx prev_insn_cc0;
277
static enum machine_mode prev_insn_cc0_mode;
278
 
279
/* Previous actual insn.  0 if at first insn of basic block.  */
280
 
281
static rtx prev_insn;
282
#endif
283
 
284
/* Insn being scanned.  */
285
 
286
static rtx this_insn;
287
 
288
/* Index by register number, gives the number of the next (or
289
   previous) register in the chain of registers sharing the same
290
   value.
291
 
292
   Or -1 if this register is at the end of the chain.
293
 
294
   If REG_QTY (N) == -N - 1, reg_eqv_table[N].next is undefined.  */
295
 
296
/* Per-register equivalence chain.  */
297
struct reg_eqv_elem
298
{
299
  int next, prev;
300
};
301
 
302
/* The table of all register equivalence chains.  */
303
static struct reg_eqv_elem *reg_eqv_table;
304
 
305
struct cse_reg_info
306
{
307
  /* The timestamp at which this register is initialized.  */
308
  unsigned int timestamp;
309
 
310
  /* The quantity number of the register's current contents.  */
311
  int reg_qty;
312
 
313
  /* The number of times the register has been altered in the current
314
     basic block.  */
315
  int reg_tick;
316
 
317
  /* The REG_TICK value at which rtx's containing this register are
318
     valid in the hash table.  If this does not equal the current
319
     reg_tick value, such expressions existing in the hash table are
320
     invalid.  */
321
  int reg_in_table;
322
 
323
  /* The SUBREG that was set when REG_TICK was last incremented.  Set
324
     to -1 if the last store was to the whole register, not a subreg.  */
325
  unsigned int subreg_ticked;
326
};
327
 
328
/* A table of cse_reg_info indexed by register numbers.  */
329
static struct cse_reg_info *cse_reg_info_table;
330
 
331
/* The size of the above table.  */
332
static unsigned int cse_reg_info_table_size;
333
 
334
/* The index of the first entry that has not been initialized.  */
335
static unsigned int cse_reg_info_table_first_uninitialized;
336
 
337
/* The timestamp at the beginning of the current run of
338
   cse_basic_block.  We increment this variable at the beginning of
339
   the current run of cse_basic_block.  The timestamp field of a
340
   cse_reg_info entry matches the value of this variable if and only
341
   if the entry has been initialized during the current run of
342
   cse_basic_block.  */
343
static unsigned int cse_reg_info_timestamp;
344
 
345
/* A HARD_REG_SET containing all the hard registers for which there is
346
   currently a REG expression in the hash table.  Note the difference
347
   from the above variables, which indicate if the REG is mentioned in some
348
   expression in the table.  */
349
 
350
static HARD_REG_SET hard_regs_in_table;
351
 
352
/* CUID of insn that starts the basic block currently being cse-processed.  */
353
 
354
static int cse_basic_block_start;
355
 
356
/* CUID of insn that ends the basic block currently being cse-processed.  */
357
 
358
static int cse_basic_block_end;
359
 
360
/* Vector mapping INSN_UIDs to cuids.
361
   The cuids are like uids but increase monotonically always.
362
   We use them to see whether a reg is used outside a given basic block.  */
363
 
364
static int *uid_cuid;
365
 
366
/* Highest UID in UID_CUID.  */
367
static int max_uid;
368
 
369
/* Get the cuid of an insn.  */
370
 
371
#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
372
 
373
/* Nonzero if this pass has made changes, and therefore it's
374
   worthwhile to run the garbage collector.  */
375
 
376
static int cse_altered;
377
 
378
/* Nonzero if cse has altered conditional jump insns
379
   in such a way that jump optimization should be redone.  */
380
 
381
static int cse_jumps_altered;
382
 
383
/* Nonzero if we put a LABEL_REF into the hash table for an INSN without a
384
   REG_LABEL, we have to rerun jump after CSE to put in the note.  */
385
static int recorded_label_ref;
386
 
387
/* canon_hash stores 1 in do_not_record
388
   if it notices a reference to CC0, PC, or some other volatile
389
   subexpression.  */
390
 
391
static int do_not_record;
392
 
393
/* canon_hash stores 1 in hash_arg_in_memory
394
   if it notices a reference to memory within the expression being hashed.  */
395
 
396
static int hash_arg_in_memory;
397
 
398
/* The hash table contains buckets which are chains of `struct table_elt's,
399
   each recording one expression's information.
400
   That expression is in the `exp' field.
401
 
402
   The canon_exp field contains a canonical (from the point of view of
403
   alias analysis) version of the `exp' field.
404
 
405
   Those elements with the same hash code are chained in both directions
406
   through the `next_same_hash' and `prev_same_hash' fields.
407
 
408
   Each set of expressions with equivalent values
409
   are on a two-way chain through the `next_same_value'
410
   and `prev_same_value' fields, and all point with
411
   the `first_same_value' field at the first element in
412
   that chain.  The chain is in order of increasing cost.
413
   Each element's cost value is in its `cost' field.
414
 
415
   The `in_memory' field is nonzero for elements that
416
   involve any reference to memory.  These elements are removed
417
   whenever a write is done to an unidentified location in memory.
418
   To be safe, we assume that a memory address is unidentified unless
419
   the address is either a symbol constant or a constant plus
420
   the frame pointer or argument pointer.
421
 
422
   The `related_value' field is used to connect related expressions
423
   (that differ by adding an integer).
424
   The related expressions are chained in a circular fashion.
425
   `related_value' is zero for expressions for which this
426
   chain is not useful.
427
 
428
   The `cost' field stores the cost of this element's expression.
429
   The `regcost' field stores the value returned by approx_reg_cost for
430
   this element's expression.
431
 
432
   The `is_const' flag is set if the element is a constant (including
433
   a fixed address).
434
 
435
   The `flag' field is used as a temporary during some search routines.
436
 
437
   The `mode' field is usually the same as GET_MODE (`exp'), but
438
   if `exp' is a CONST_INT and has no machine mode then the `mode'
439
   field is the mode it was being used as.  Each constant is
440
   recorded separately for each mode it is used with.  */
441
 
442
struct table_elt
443
{
444
  rtx exp;
445
  rtx canon_exp;
446
  struct table_elt *next_same_hash;
447
  struct table_elt *prev_same_hash;
448
  struct table_elt *next_same_value;
449
  struct table_elt *prev_same_value;
450
  struct table_elt *first_same_value;
451
  struct table_elt *related_value;
452
  int cost;
453
  int regcost;
454
  /* The size of this field should match the size
455
     of the mode field of struct rtx_def (see rtl.h).  */
456
  ENUM_BITFIELD(machine_mode) mode : 8;
457
  char in_memory;
458
  char is_const;
459
  char flag;
460
};
461
 
462
/* We don't want a lot of buckets, because we rarely have very many
463
   things stored in the hash table, and a lot of buckets slows
464
   down a lot of loops that happen frequently.  */
465
#define HASH_SHIFT      5
466
#define HASH_SIZE       (1 << HASH_SHIFT)
467
#define HASH_MASK       (HASH_SIZE - 1)
468
 
469
/* Compute hash code of X in mode M.  Special-case case where X is a pseudo
470
   register (hard registers may require `do_not_record' to be set).  */
471
 
472
#define HASH(X, M)      \
473
 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER      \
474
  ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X)))    \
475
  : canon_hash (X, M)) & HASH_MASK)
476
 
477
/* Like HASH, but without side-effects.  */
478
#define SAFE_HASH(X, M) \
479
 ((REG_P (X) && REGNO (X) >= FIRST_PSEUDO_REGISTER      \
480
  ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X)))    \
481
  : safe_hash (X, M)) & HASH_MASK)
482
 
483
/* Determine whether register number N is considered a fixed register for the
484
   purpose of approximating register costs.
485
   It is desirable to replace other regs with fixed regs, to reduce need for
486
   non-fixed hard regs.
487
   A reg wins if it is either the frame pointer or designated as fixed.  */
488
#define FIXED_REGNO_P(N)  \
489
  ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
490
   || fixed_regs[N] || global_regs[N])
491
 
492
/* Compute cost of X, as stored in the `cost' field of a table_elt.  Fixed
493
   hard registers and pointers into the frame are the cheapest with a cost
494
   of 0.  Next come pseudos with a cost of one and other hard registers with
495
   a cost of 2.  Aside from these special cases, call `rtx_cost'.  */
496
 
497
#define CHEAP_REGNO(N)                                                  \
498
  (REGNO_PTR_FRAME_P(N)                                                 \
499
   || (HARD_REGISTER_NUM_P (N)                                          \
500
       && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
501
 
502
#define COST(X) (REG_P (X) ? 0 : notreg_cost (X, SET))
503
#define COST_IN(X,OUTER) (REG_P (X) ? 0 : notreg_cost (X, OUTER))
504
 
505
/* Get the number of times this register has been updated in this
506
   basic block.  */
507
 
508
#define REG_TICK(N) (get_cse_reg_info (N)->reg_tick)
509
 
510
/* Get the point at which REG was recorded in the table.  */
511
 
512
#define REG_IN_TABLE(N) (get_cse_reg_info (N)->reg_in_table)
513
 
514
/* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
515
   SUBREG).  */
516
 
517
#define SUBREG_TICKED(N) (get_cse_reg_info (N)->subreg_ticked)
518
 
519
/* Get the quantity number for REG.  */
520
 
521
#define REG_QTY(N) (get_cse_reg_info (N)->reg_qty)
522
 
523
/* Determine if the quantity number for register X represents a valid index
524
   into the qty_table.  */
525
 
526
#define REGNO_QTY_VALID_P(N) (REG_QTY (N) >= 0)
527
 
528
static struct table_elt *table[HASH_SIZE];
529
 
530
/* Number of elements in the hash table.  */
531
 
532
static unsigned int table_size;
533
 
534
/* Chain of `struct table_elt's made so far for this function
535
   but currently removed from the table.  */
536
 
537
static struct table_elt *free_element_chain;
538
 
539
/* Set to the cost of a constant pool reference if one was found for a
540
   symbolic constant.  If this was found, it means we should try to
541
   convert constants into constant pool entries if they don't fit in
542
   the insn.  */
543
 
544
static int constant_pool_entries_cost;
545
static int constant_pool_entries_regcost;
546
 
547
/* This data describes a block that will be processed by cse_basic_block.  */
548
 
549
struct cse_basic_block_data
550
{
551
  /* Lowest CUID value of insns in block.  */
552
  int low_cuid;
553
  /* Highest CUID value of insns in block.  */
554
  int high_cuid;
555
  /* Total number of SETs in block.  */
556
  int nsets;
557
  /* Last insn in the block.  */
558
  rtx last;
559
  /* Size of current branch path, if any.  */
560
  int path_size;
561
  /* Current branch path, indicating which branches will be taken.  */
562
  struct branch_path
563
    {
564
      /* The branch insn.  */
565
      rtx branch;
566
      /* Whether it should be taken or not.  AROUND is the same as taken
567
         except that it is used when the destination label is not preceded
568
       by a BARRIER.  */
569
      enum taken {PATH_TAKEN, PATH_NOT_TAKEN, PATH_AROUND} status;
570
    } *path;
571
};
572
 
573
static bool fixed_base_plus_p (rtx x);
574
static int notreg_cost (rtx, enum rtx_code);
575
static int approx_reg_cost_1 (rtx *, void *);
576
static int approx_reg_cost (rtx);
577
static int preferable (int, int, int, int);
578
static void new_basic_block (void);
579
static void make_new_qty (unsigned int, enum machine_mode);
580
static void make_regs_eqv (unsigned int, unsigned int);
581
static void delete_reg_equiv (unsigned int);
582
static int mention_regs (rtx);
583
static int insert_regs (rtx, struct table_elt *, int);
584
static void remove_from_table (struct table_elt *, unsigned);
585
static struct table_elt *lookup (rtx, unsigned, enum machine_mode);
586
static struct table_elt *lookup_for_remove (rtx, unsigned, enum machine_mode);
587
static rtx lookup_as_function (rtx, enum rtx_code);
588
static struct table_elt *insert (rtx, struct table_elt *, unsigned,
589
                                 enum machine_mode);
590
static void merge_equiv_classes (struct table_elt *, struct table_elt *);
591
static void invalidate (rtx, enum machine_mode);
592
static int cse_rtx_varies_p (rtx, int);
593
static void remove_invalid_refs (unsigned int);
594
static void remove_invalid_subreg_refs (unsigned int, unsigned int,
595
                                        enum machine_mode);
596
static void rehash_using_reg (rtx);
597
static void invalidate_memory (void);
598
static void invalidate_for_call (void);
599
static rtx use_related_value (rtx, struct table_elt *);
600
 
601
static inline unsigned canon_hash (rtx, enum machine_mode);
602
static inline unsigned safe_hash (rtx, enum machine_mode);
603
static unsigned hash_rtx_string (const char *);
604
 
605
static rtx canon_reg (rtx, rtx);
606
static void find_best_addr (rtx, rtx *, enum machine_mode);
607
static enum rtx_code find_comparison_args (enum rtx_code, rtx *, rtx *,
608
                                           enum machine_mode *,
609
                                           enum machine_mode *);
610
static rtx fold_rtx (rtx, rtx);
611
static rtx equiv_constant (rtx);
612
static void record_jump_equiv (rtx, int);
613
static void record_jump_cond (enum rtx_code, enum machine_mode, rtx, rtx,
614
                              int);
615
static void cse_insn (rtx, rtx);
616
static void cse_end_of_basic_block (rtx, struct cse_basic_block_data *,
617
                                    int, int);
618
static int addr_affects_sp_p (rtx);
619
static void invalidate_from_clobbers (rtx);
620
static rtx cse_process_notes (rtx, rtx);
621
static void invalidate_skipped_set (rtx, rtx, void *);
622
static void invalidate_skipped_block (rtx);
623
static rtx cse_basic_block (rtx, rtx, struct branch_path *);
624
static void count_reg_usage (rtx, int *, rtx, int);
625
static int check_for_label_ref (rtx *, void *);
626
extern void dump_class (struct table_elt*);
627
static void get_cse_reg_info_1 (unsigned int regno);
628
static struct cse_reg_info * get_cse_reg_info (unsigned int regno);
629
static int check_dependence (rtx *, void *);
630
 
631
static void flush_hash_table (void);
632
static bool insn_live_p (rtx, int *);
633
static bool set_live_p (rtx, rtx, int *);
634
static bool dead_libcall_p (rtx, int *);
635
static int cse_change_cc_mode (rtx *, void *);
636
static void cse_change_cc_mode_insn (rtx, rtx);
637
static void cse_change_cc_mode_insns (rtx, rtx, rtx);
638
static enum machine_mode cse_cc_succs (basic_block, rtx, rtx, bool);
639
 
640
 
641
#undef RTL_HOOKS_GEN_LOWPART
642
#define RTL_HOOKS_GEN_LOWPART           gen_lowpart_if_possible
643
 
644
static const struct rtl_hooks cse_rtl_hooks = RTL_HOOKS_INITIALIZER;
645
 
646
/* Nonzero if X has the form (PLUS frame-pointer integer).  We check for
647
   virtual regs here because the simplify_*_operation routines are called
648
   by integrate.c, which is called before virtual register instantiation.  */
649
 
650
static bool
651
fixed_base_plus_p (rtx x)
652
{
653
  switch (GET_CODE (x))
654
    {
655
    case REG:
656
      if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx)
657
        return true;
658
      if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
659
        return true;
660
      if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
661
          && REGNO (x) <= LAST_VIRTUAL_REGISTER)
662
        return true;
663
      return false;
664
 
665
    case PLUS:
666
      if (GET_CODE (XEXP (x, 1)) != CONST_INT)
667
        return false;
668
      return fixed_base_plus_p (XEXP (x, 0));
669
 
670
    default:
671
      return false;
672
    }
673
}
674
 
675
/* Dump the expressions in the equivalence class indicated by CLASSP.
676
   This function is used only for debugging.  */
677
void
678
dump_class (struct table_elt *classp)
679
{
680
  struct table_elt *elt;
681
 
682
  fprintf (stderr, "Equivalence chain for ");
683
  print_rtl (stderr, classp->exp);
684
  fprintf (stderr, ": \n");
685
 
686
  for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
687
    {
688
      print_rtl (stderr, elt->exp);
689
      fprintf (stderr, "\n");
690
    }
691
}
692
 
693
/* Subroutine of approx_reg_cost; called through for_each_rtx.  */
694
 
695
static int
696
approx_reg_cost_1 (rtx *xp, void *data)
697
{
698
  rtx x = *xp;
699
  int *cost_p = data;
700
 
701
  if (x && REG_P (x))
702
    {
703
      unsigned int regno = REGNO (x);
704
 
705
      if (! CHEAP_REGNO (regno))
706
        {
707
          if (regno < FIRST_PSEUDO_REGISTER)
708
            {
709
              if (SMALL_REGISTER_CLASSES)
710
                return 1;
711
              *cost_p += 2;
712
            }
713
          else
714
            *cost_p += 1;
715
        }
716
    }
717
 
718
  return 0;
719
}
720
 
721
/* Return an estimate of the cost of the registers used in an rtx.
722
   This is mostly the number of different REG expressions in the rtx;
723
   however for some exceptions like fixed registers we use a cost of
724
   0.  If any other hard register reference occurs, return MAX_COST.  */
725
 
726
static int
727
approx_reg_cost (rtx x)
728
{
729
  int cost = 0;
730
 
731
  if (for_each_rtx (&x, approx_reg_cost_1, (void *) &cost))
732
    return MAX_COST;
733
 
734
  return cost;
735
}
736
 
737
/* Returns a canonical version of X for the address, from the point of view,
738
   that all multiplications are represented as MULT instead of the multiply
739
   by a power of 2 being represented as ASHIFT.  */
740
 
741
static rtx
742
canon_for_address (rtx x)
743
{
744
  enum rtx_code code;
745
  enum machine_mode mode;
746
  rtx new = 0;
747
  int i;
748
  const char *fmt;
749
 
750
  if (!x)
751
    return x;
752
 
753
  code = GET_CODE (x);
754
  mode = GET_MODE (x);
755
 
756
  switch (code)
757
    {
758
    case ASHIFT:
759
      if (GET_CODE (XEXP (x, 1)) == CONST_INT
760
          && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode)
761
          && INTVAL (XEXP (x, 1)) >= 0)
762
        {
763
          new = canon_for_address (XEXP (x, 0));
764
          new = gen_rtx_MULT (mode, new,
765
                              gen_int_mode ((HOST_WIDE_INT) 1
766
                                            << INTVAL (XEXP (x, 1)),
767
                                            mode));
768
        }
769
      break;
770
    default:
771
      break;
772
 
773
    }
774
  if (new)
775
    return new;
776
 
777
  /* Now recursively process each operand of this operation.  */
778
  fmt = GET_RTX_FORMAT (code);
779
  for (i = 0; i < GET_RTX_LENGTH (code); i++)
780
    if (fmt[i] == 'e')
781
      {
782
        new = canon_for_address (XEXP (x, i));
783
        XEXP (x, i) = new;
784
      }
785
  return x;
786
}
787
 
788
/* Return a negative value if an rtx A, whose costs are given by COST_A
789
   and REGCOST_A, is more desirable than an rtx B.
790
   Return a positive value if A is less desirable, or 0 if the two are
791
   equally good.  */
792
static int
793
preferable (int cost_a, int regcost_a, int cost_b, int regcost_b)
794
{
795
  /* First, get rid of cases involving expressions that are entirely
796
     unwanted.  */
797
  if (cost_a != cost_b)
798
    {
799
      if (cost_a == MAX_COST)
800
        return 1;
801
      if (cost_b == MAX_COST)
802
        return -1;
803
    }
804
 
805
  /* Avoid extending lifetimes of hardregs.  */
806
  if (regcost_a != regcost_b)
807
    {
808
      if (regcost_a == MAX_COST)
809
        return 1;
810
      if (regcost_b == MAX_COST)
811
        return -1;
812
    }
813
 
814
  /* Normal operation costs take precedence.  */
815
  if (cost_a != cost_b)
816
    return cost_a - cost_b;
817
  /* Only if these are identical consider effects on register pressure.  */
818
  if (regcost_a != regcost_b)
819
    return regcost_a - regcost_b;
820
  return 0;
821
}
822
 
823
/* Internal function, to compute cost when X is not a register; called
824
   from COST macro to keep it simple.  */
825
 
826
static int
827
notreg_cost (rtx x, enum rtx_code outer)
828
{
829
  return ((GET_CODE (x) == SUBREG
830
           && REG_P (SUBREG_REG (x))
831
           && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
832
           && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
833
           && (GET_MODE_SIZE (GET_MODE (x))
834
               < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
835
           && subreg_lowpart_p (x)
836
           && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
837
                                     GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
838
          ? 0
839
          : rtx_cost (x, outer) * 2);
840
}
841
 
842
 
843
/* Initialize CSE_REG_INFO_TABLE.  */
844
 
845
static void
846
init_cse_reg_info (unsigned int nregs)
847
{
848
  /* Do we need to grow the table?  */
849
  if (nregs > cse_reg_info_table_size)
850
    {
851
      unsigned int new_size;
852
 
853
      if (cse_reg_info_table_size < 2048)
854
        {
855
          /* Compute a new size that is a power of 2 and no smaller
856
             than the large of NREGS and 64.  */
857
          new_size = (cse_reg_info_table_size
858
                      ? cse_reg_info_table_size : 64);
859
 
860
          while (new_size < nregs)
861
            new_size *= 2;
862
        }
863
      else
864
        {
865
          /* If we need a big table, allocate just enough to hold
866
             NREGS registers.  */
867
          new_size = nregs;
868
        }
869
 
870
      /* Reallocate the table with NEW_SIZE entries.  */
871
      if (cse_reg_info_table)
872
        free (cse_reg_info_table);
873
      cse_reg_info_table = XNEWVEC (struct cse_reg_info, new_size);
874
      cse_reg_info_table_size = new_size;
875
      cse_reg_info_table_first_uninitialized = 0;
876
    }
877
 
878
  /* Do we have all of the first NREGS entries initialized?  */
879
  if (cse_reg_info_table_first_uninitialized < nregs)
880
    {
881
      unsigned int old_timestamp = cse_reg_info_timestamp - 1;
882
      unsigned int i;
883
 
884
      /* Put the old timestamp on newly allocated entries so that they
885
         will all be considered out of date.  We do not touch those
886
         entries beyond the first NREGS entries to be nice to the
887
         virtual memory.  */
888
      for (i = cse_reg_info_table_first_uninitialized; i < nregs; i++)
889
        cse_reg_info_table[i].timestamp = old_timestamp;
890
 
891
      cse_reg_info_table_first_uninitialized = nregs;
892
    }
893
}
894
 
895
/* Given REGNO, initialize the cse_reg_info entry for REGNO.  */
896
 
897
static void
898
get_cse_reg_info_1 (unsigned int regno)
899
{
900
  /* Set TIMESTAMP field to CSE_REG_INFO_TIMESTAMP so that this
901
     entry will be considered to have been initialized.  */
902
  cse_reg_info_table[regno].timestamp = cse_reg_info_timestamp;
903
 
904
  /* Initialize the rest of the entry.  */
905
  cse_reg_info_table[regno].reg_tick = 1;
906
  cse_reg_info_table[regno].reg_in_table = -1;
907
  cse_reg_info_table[regno].subreg_ticked = -1;
908
  cse_reg_info_table[regno].reg_qty = -regno - 1;
909
}
910
 
911
/* Find a cse_reg_info entry for REGNO.  */
912
 
913
static inline struct cse_reg_info *
914
get_cse_reg_info (unsigned int regno)
915
{
916
  struct cse_reg_info *p = &cse_reg_info_table[regno];
917
 
918
  /* If this entry has not been initialized, go ahead and initialize
919
     it.  */
920
  if (p->timestamp != cse_reg_info_timestamp)
921
    get_cse_reg_info_1 (regno);
922
 
923
  return p;
924
}
925
 
926
/* Clear the hash table and initialize each register with its own quantity,
927
   for a new basic block.  */
928
 
929
static void
930
new_basic_block (void)
931
{
932
  int i;
933
 
934
  next_qty = 0;
935
 
936
  /* Invalidate cse_reg_info_table.  */
937
  cse_reg_info_timestamp++;
938
 
939
  /* Clear out hash table state for this pass.  */
940
  CLEAR_HARD_REG_SET (hard_regs_in_table);
941
 
942
  /* The per-quantity values used to be initialized here, but it is
943
     much faster to initialize each as it is made in `make_new_qty'.  */
944
 
945
  for (i = 0; i < HASH_SIZE; i++)
946
    {
947
      struct table_elt *first;
948
 
949
      first = table[i];
950
      if (first != NULL)
951
        {
952
          struct table_elt *last = first;
953
 
954
          table[i] = NULL;
955
 
956
          while (last->next_same_hash != NULL)
957
            last = last->next_same_hash;
958
 
959
          /* Now relink this hash entire chain into
960
             the free element list.  */
961
 
962
          last->next_same_hash = free_element_chain;
963
          free_element_chain = first;
964
        }
965
    }
966
 
967
  table_size = 0;
968
 
969
#ifdef HAVE_cc0
970
  prev_insn = 0;
971
  prev_insn_cc0 = 0;
972
#endif
973
}
974
 
975
/* Say that register REG contains a quantity in mode MODE not in any
976
   register before and initialize that quantity.  */
977
 
978
static void
979
make_new_qty (unsigned int reg, enum machine_mode mode)
980
{
981
  int q;
982
  struct qty_table_elem *ent;
983
  struct reg_eqv_elem *eqv;
984
 
985
  gcc_assert (next_qty < max_qty);
986
 
987
  q = REG_QTY (reg) = next_qty++;
988
  ent = &qty_table[q];
989
  ent->first_reg = reg;
990
  ent->last_reg = reg;
991
  ent->mode = mode;
992
  ent->const_rtx = ent->const_insn = NULL_RTX;
993
  ent->comparison_code = UNKNOWN;
994
 
995
  eqv = &reg_eqv_table[reg];
996
  eqv->next = eqv->prev = -1;
997
}
998
 
999
/* Make reg NEW equivalent to reg OLD.
1000
   OLD is not changing; NEW is.  */
1001
 
1002
static void
1003
make_regs_eqv (unsigned int new, unsigned int old)
1004
{
1005
  unsigned int lastr, firstr;
1006
  int q = REG_QTY (old);
1007
  struct qty_table_elem *ent;
1008
 
1009
  ent = &qty_table[q];
1010
 
1011
  /* Nothing should become eqv until it has a "non-invalid" qty number.  */
1012
  gcc_assert (REGNO_QTY_VALID_P (old));
1013
 
1014
  REG_QTY (new) = q;
1015
  firstr = ent->first_reg;
1016
  lastr = ent->last_reg;
1017
 
1018
  /* Prefer fixed hard registers to anything.  Prefer pseudo regs to other
1019
     hard regs.  Among pseudos, if NEW will live longer than any other reg
1020
     of the same qty, and that is beyond the current basic block,
1021
     make it the new canonical replacement for this qty.  */
1022
  if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
1023
      /* Certain fixed registers might be of the class NO_REGS.  This means
1024
         that not only can they not be allocated by the compiler, but
1025
         they cannot be used in substitutions or canonicalizations
1026
         either.  */
1027
      && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
1028
      && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
1029
          || (new >= FIRST_PSEUDO_REGISTER
1030
              && (firstr < FIRST_PSEUDO_REGISTER
1031
                  || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
1032
                       || (uid_cuid[REGNO_FIRST_UID (new)]
1033
                           < cse_basic_block_start))
1034
                      && (uid_cuid[REGNO_LAST_UID (new)]
1035
                          > uid_cuid[REGNO_LAST_UID (firstr)]))))))
1036
    {
1037
      reg_eqv_table[firstr].prev = new;
1038
      reg_eqv_table[new].next = firstr;
1039
      reg_eqv_table[new].prev = -1;
1040
      ent->first_reg = new;
1041
    }
1042
  else
1043
    {
1044
      /* If NEW is a hard reg (known to be non-fixed), insert at end.
1045
         Otherwise, insert before any non-fixed hard regs that are at the
1046
         end.  Registers of class NO_REGS cannot be used as an
1047
         equivalent for anything.  */
1048
      while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
1049
             && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1050
             && new >= FIRST_PSEUDO_REGISTER)
1051
        lastr = reg_eqv_table[lastr].prev;
1052
      reg_eqv_table[new].next = reg_eqv_table[lastr].next;
1053
      if (reg_eqv_table[lastr].next >= 0)
1054
        reg_eqv_table[reg_eqv_table[lastr].next].prev = new;
1055
      else
1056
        qty_table[q].last_reg = new;
1057
      reg_eqv_table[lastr].next = new;
1058
      reg_eqv_table[new].prev = lastr;
1059
    }
1060
}
1061
 
1062
/* Remove REG from its equivalence class.  */
1063
 
1064
static void
1065
delete_reg_equiv (unsigned int reg)
1066
{
1067
  struct qty_table_elem *ent;
1068
  int q = REG_QTY (reg);
1069
  int p, n;
1070
 
1071
  /* If invalid, do nothing.  */
1072
  if (! REGNO_QTY_VALID_P (reg))
1073
    return;
1074
 
1075
  ent = &qty_table[q];
1076
 
1077
  p = reg_eqv_table[reg].prev;
1078
  n = reg_eqv_table[reg].next;
1079
 
1080
  if (n != -1)
1081
    reg_eqv_table[n].prev = p;
1082
  else
1083
    ent->last_reg = p;
1084
  if (p != -1)
1085
    reg_eqv_table[p].next = n;
1086
  else
1087
    ent->first_reg = n;
1088
 
1089
  REG_QTY (reg) = -reg - 1;
1090
}
1091
 
1092
/* Remove any invalid expressions from the hash table
1093
   that refer to any of the registers contained in expression X.
1094
 
1095
   Make sure that newly inserted references to those registers
1096
   as subexpressions will be considered valid.
1097
 
1098
   mention_regs is not called when a register itself
1099
   is being stored in the table.
1100
 
1101
   Return 1 if we have done something that may have changed the hash code
1102
   of X.  */
1103
 
1104
static int
1105
mention_regs (rtx x)
1106
{
1107
  enum rtx_code code;
1108
  int i, j;
1109
  const char *fmt;
1110
  int changed = 0;
1111
 
1112
  if (x == 0)
1113
    return 0;
1114
 
1115
  code = GET_CODE (x);
1116
  if (code == REG)
1117
    {
1118
      unsigned int regno = REGNO (x);
1119
      unsigned int endregno
1120
        = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1121
                   : hard_regno_nregs[regno][GET_MODE (x)]);
1122
      unsigned int i;
1123
 
1124
      for (i = regno; i < endregno; i++)
1125
        {
1126
          if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1127
            remove_invalid_refs (i);
1128
 
1129
          REG_IN_TABLE (i) = REG_TICK (i);
1130
          SUBREG_TICKED (i) = -1;
1131
        }
1132
 
1133
      return 0;
1134
    }
1135
 
1136
  /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1137
     pseudo if they don't use overlapping words.  We handle only pseudos
1138
     here for simplicity.  */
1139
  if (code == SUBREG && REG_P (SUBREG_REG (x))
1140
      && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1141
    {
1142
      unsigned int i = REGNO (SUBREG_REG (x));
1143
 
1144
      if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1145
        {
1146
          /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1147
             the last store to this register really stored into this
1148
             subreg, then remove the memory of this subreg.
1149
             Otherwise, remove any memory of the entire register and
1150
             all its subregs from the table.  */
1151
          if (REG_TICK (i) - REG_IN_TABLE (i) > 1
1152
              || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x)))
1153
            remove_invalid_refs (i);
1154
          else
1155
            remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1156
        }
1157
 
1158
      REG_IN_TABLE (i) = REG_TICK (i);
1159
      SUBREG_TICKED (i) = REGNO (SUBREG_REG (x));
1160
      return 0;
1161
    }
1162
 
1163
  /* If X is a comparison or a COMPARE and either operand is a register
1164
     that does not have a quantity, give it one.  This is so that a later
1165
     call to record_jump_equiv won't cause X to be assigned a different
1166
     hash code and not found in the table after that call.
1167
 
1168
     It is not necessary to do this here, since rehash_using_reg can
1169
     fix up the table later, but doing this here eliminates the need to
1170
     call that expensive function in the most common case where the only
1171
     use of the register is in the comparison.  */
1172
 
1173
  if (code == COMPARE || COMPARISON_P (x))
1174
    {
1175
      if (REG_P (XEXP (x, 0))
1176
          && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1177
        if (insert_regs (XEXP (x, 0), NULL, 0))
1178
          {
1179
            rehash_using_reg (XEXP (x, 0));
1180
            changed = 1;
1181
          }
1182
 
1183
      if (REG_P (XEXP (x, 1))
1184
          && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1185
        if (insert_regs (XEXP (x, 1), NULL, 0))
1186
          {
1187
            rehash_using_reg (XEXP (x, 1));
1188
            changed = 1;
1189
          }
1190
    }
1191
 
1192
  fmt = GET_RTX_FORMAT (code);
1193
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1194
    if (fmt[i] == 'e')
1195
      changed |= mention_regs (XEXP (x, i));
1196
    else if (fmt[i] == 'E')
1197
      for (j = 0; j < XVECLEN (x, i); j++)
1198
        changed |= mention_regs (XVECEXP (x, i, j));
1199
 
1200
  return changed;
1201
}
1202
 
1203
/* Update the register quantities for inserting X into the hash table
1204
   with a value equivalent to CLASSP.
1205
   (If the class does not contain a REG, it is irrelevant.)
1206
   If MODIFIED is nonzero, X is a destination; it is being modified.
1207
   Note that delete_reg_equiv should be called on a register
1208
   before insert_regs is done on that register with MODIFIED != 0.
1209
 
1210
   Nonzero value means that elements of reg_qty have changed
1211
   so X's hash code may be different.  */
1212
 
1213
static int
1214
insert_regs (rtx x, struct table_elt *classp, int modified)
1215
{
1216
  if (REG_P (x))
1217
    {
1218
      unsigned int regno = REGNO (x);
1219
      int qty_valid;
1220
 
1221
      /* If REGNO is in the equivalence table already but is of the
1222
         wrong mode for that equivalence, don't do anything here.  */
1223
 
1224
      qty_valid = REGNO_QTY_VALID_P (regno);
1225
      if (qty_valid)
1226
        {
1227
          struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1228
 
1229
          if (ent->mode != GET_MODE (x))
1230
            return 0;
1231
        }
1232
 
1233
      if (modified || ! qty_valid)
1234
        {
1235
          if (classp)
1236
            for (classp = classp->first_same_value;
1237
                 classp != 0;
1238
                 classp = classp->next_same_value)
1239
              if (REG_P (classp->exp)
1240
                  && GET_MODE (classp->exp) == GET_MODE (x))
1241
                {
1242
                  unsigned c_regno = REGNO (classp->exp);
1243
 
1244
                  gcc_assert (REGNO_QTY_VALID_P (c_regno));
1245
 
1246
                  /* Suppose that 5 is hard reg and 100 and 101 are
1247
                     pseudos.  Consider
1248
 
1249
                     (set (reg:si 100) (reg:si 5))
1250
                     (set (reg:si 5) (reg:si 100))
1251
                     (set (reg:di 101) (reg:di 5))
1252
 
1253
                     We would now set REG_QTY (101) = REG_QTY (5), but the
1254
                     entry for 5 is in SImode.  When we use this later in
1255
                     copy propagation, we get the register in wrong mode.  */
1256
                  if (qty_table[REG_QTY (c_regno)].mode != GET_MODE (x))
1257
                    continue;
1258
 
1259
                  make_regs_eqv (regno, c_regno);
1260
                  return 1;
1261
                }
1262
 
1263
          /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1264
             than REG_IN_TABLE to find out if there was only a single preceding
1265
             invalidation - for the SUBREG - or another one, which would be
1266
             for the full register.  However, if we find here that REG_TICK
1267
             indicates that the register is invalid, it means that it has
1268
             been invalidated in a separate operation.  The SUBREG might be used
1269
             now (then this is a recursive call), or we might use the full REG
1270
             now and a SUBREG of it later.  So bump up REG_TICK so that
1271
             mention_regs will do the right thing.  */
1272
          if (! modified
1273
              && REG_IN_TABLE (regno) >= 0
1274
              && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1275
            REG_TICK (regno)++;
1276
          make_new_qty (regno, GET_MODE (x));
1277
          return 1;
1278
        }
1279
 
1280
      return 0;
1281
    }
1282
 
1283
  /* If X is a SUBREG, we will likely be inserting the inner register in the
1284
     table.  If that register doesn't have an assigned quantity number at
1285
     this point but does later, the insertion that we will be doing now will
1286
     not be accessible because its hash code will have changed.  So assign
1287
     a quantity number now.  */
1288
 
1289
  else if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x))
1290
           && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1291
    {
1292
      insert_regs (SUBREG_REG (x), NULL, 0);
1293
      mention_regs (x);
1294
      return 1;
1295
    }
1296
  else
1297
    return mention_regs (x);
1298
}
1299
 
1300
/* Look in or update the hash table.  */
1301
 
1302
/* Remove table element ELT from use in the table.
1303
   HASH is its hash code, made using the HASH macro.
1304
   It's an argument because often that is known in advance
1305
   and we save much time not recomputing it.  */
1306
 
1307
static void
1308
remove_from_table (struct table_elt *elt, unsigned int hash)
1309
{
1310
  if (elt == 0)
1311
    return;
1312
 
1313
  /* Mark this element as removed.  See cse_insn.  */
1314
  elt->first_same_value = 0;
1315
 
1316
  /* Remove the table element from its equivalence class.  */
1317
 
1318
  {
1319
    struct table_elt *prev = elt->prev_same_value;
1320
    struct table_elt *next = elt->next_same_value;
1321
 
1322
    if (next)
1323
      next->prev_same_value = prev;
1324
 
1325
    if (prev)
1326
      prev->next_same_value = next;
1327
    else
1328
      {
1329
        struct table_elt *newfirst = next;
1330
        while (next)
1331
          {
1332
            next->first_same_value = newfirst;
1333
            next = next->next_same_value;
1334
          }
1335
      }
1336
  }
1337
 
1338
  /* Remove the table element from its hash bucket.  */
1339
 
1340
  {
1341
    struct table_elt *prev = elt->prev_same_hash;
1342
    struct table_elt *next = elt->next_same_hash;
1343
 
1344
    if (next)
1345
      next->prev_same_hash = prev;
1346
 
1347
    if (prev)
1348
      prev->next_same_hash = next;
1349
    else if (table[hash] == elt)
1350
      table[hash] = next;
1351
    else
1352
      {
1353
        /* This entry is not in the proper hash bucket.  This can happen
1354
           when two classes were merged by `merge_equiv_classes'.  Search
1355
           for the hash bucket that it heads.  This happens only very
1356
           rarely, so the cost is acceptable.  */
1357
        for (hash = 0; hash < HASH_SIZE; hash++)
1358
          if (table[hash] == elt)
1359
            table[hash] = next;
1360
      }
1361
  }
1362
 
1363
  /* Remove the table element from its related-value circular chain.  */
1364
 
1365
  if (elt->related_value != 0 && elt->related_value != elt)
1366
    {
1367
      struct table_elt *p = elt->related_value;
1368
 
1369
      while (p->related_value != elt)
1370
        p = p->related_value;
1371
      p->related_value = elt->related_value;
1372
      if (p->related_value == p)
1373
        p->related_value = 0;
1374
    }
1375
 
1376
  /* Now add it to the free element chain.  */
1377
  elt->next_same_hash = free_element_chain;
1378
  free_element_chain = elt;
1379
 
1380
  table_size--;
1381
}
1382
 
1383
/* Look up X in the hash table and return its table element,
1384
   or 0 if X is not in the table.
1385
 
1386
   MODE is the machine-mode of X, or if X is an integer constant
1387
   with VOIDmode then MODE is the mode with which X will be used.
1388
 
1389
   Here we are satisfied to find an expression whose tree structure
1390
   looks like X.  */
1391
 
1392
static struct table_elt *
1393
lookup (rtx x, unsigned int hash, enum machine_mode mode)
1394
{
1395
  struct table_elt *p;
1396
 
1397
  for (p = table[hash]; p; p = p->next_same_hash)
1398
    if (mode == p->mode && ((x == p->exp && REG_P (x))
1399
                            || exp_equiv_p (x, p->exp, !REG_P (x), false)))
1400
      return p;
1401
 
1402
  return 0;
1403
}
1404
 
1405
/* Like `lookup' but don't care whether the table element uses invalid regs.
1406
   Also ignore discrepancies in the machine mode of a register.  */
1407
 
1408
static struct table_elt *
1409
lookup_for_remove (rtx x, unsigned int hash, enum machine_mode mode)
1410
{
1411
  struct table_elt *p;
1412
 
1413
  if (REG_P (x))
1414
    {
1415
      unsigned int regno = REGNO (x);
1416
 
1417
      /* Don't check the machine mode when comparing registers;
1418
         invalidating (REG:SI 0) also invalidates (REG:DF 0).  */
1419
      for (p = table[hash]; p; p = p->next_same_hash)
1420
        if (REG_P (p->exp)
1421
            && REGNO (p->exp) == regno)
1422
          return p;
1423
    }
1424
  else
1425
    {
1426
      for (p = table[hash]; p; p = p->next_same_hash)
1427
        if (mode == p->mode
1428
            && (x == p->exp || exp_equiv_p (x, p->exp, 0, false)))
1429
          return p;
1430
    }
1431
 
1432
  return 0;
1433
}
1434
 
1435
/* Look for an expression equivalent to X and with code CODE.
1436
   If one is found, return that expression.  */
1437
 
1438
static rtx
1439
lookup_as_function (rtx x, enum rtx_code code)
1440
{
1441
  struct table_elt *p
1442
    = lookup (x, SAFE_HASH (x, VOIDmode), GET_MODE (x));
1443
 
1444
  /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1445
     long as we are narrowing.  So if we looked in vain for a mode narrower
1446
     than word_mode before, look for word_mode now.  */
1447
  if (p == 0 && code == CONST_INT
1448
      && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1449
    {
1450
      x = copy_rtx (x);
1451
      PUT_MODE (x, word_mode);
1452
      p = lookup (x, SAFE_HASH (x, VOIDmode), word_mode);
1453
    }
1454
 
1455
  if (p == 0)
1456
    return 0;
1457
 
1458
  for (p = p->first_same_value; p; p = p->next_same_value)
1459
    if (GET_CODE (p->exp) == code
1460
        /* Make sure this is a valid entry in the table.  */
1461
        && exp_equiv_p (p->exp, p->exp, 1, false))
1462
      return p->exp;
1463
 
1464
  return 0;
1465
}
1466
 
1467
/* Insert X in the hash table, assuming HASH is its hash code
1468
   and CLASSP is an element of the class it should go in
1469
   (or 0 if a new class should be made).
1470
   It is inserted at the proper position to keep the class in
1471
   the order cheapest first.
1472
 
1473
   MODE is the machine-mode of X, or if X is an integer constant
1474
   with VOIDmode then MODE is the mode with which X will be used.
1475
 
1476
   For elements of equal cheapness, the most recent one
1477
   goes in front, except that the first element in the list
1478
   remains first unless a cheaper element is added.  The order of
1479
   pseudo-registers does not matter, as canon_reg will be called to
1480
   find the cheapest when a register is retrieved from the table.
1481
 
1482
   The in_memory field in the hash table element is set to 0.
1483
   The caller must set it nonzero if appropriate.
1484
 
1485
   You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1486
   and if insert_regs returns a nonzero value
1487
   you must then recompute its hash code before calling here.
1488
 
1489
   If necessary, update table showing constant values of quantities.  */
1490
 
1491
#define CHEAPER(X, Y) \
1492
 (preferable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
1493
 
1494
static struct table_elt *
1495
insert (rtx x, struct table_elt *classp, unsigned int hash, enum machine_mode mode)
1496
{
1497
  struct table_elt *elt;
1498
 
1499
  /* If X is a register and we haven't made a quantity for it,
1500
     something is wrong.  */
1501
  gcc_assert (!REG_P (x) || REGNO_QTY_VALID_P (REGNO (x)));
1502
 
1503
  /* If X is a hard register, show it is being put in the table.  */
1504
  if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
1505
    {
1506
      unsigned int regno = REGNO (x);
1507
      unsigned int endregno = regno + hard_regno_nregs[regno][GET_MODE (x)];
1508
      unsigned int i;
1509
 
1510
      for (i = regno; i < endregno; i++)
1511
        SET_HARD_REG_BIT (hard_regs_in_table, i);
1512
    }
1513
 
1514
  /* Put an element for X into the right hash bucket.  */
1515
 
1516
  elt = free_element_chain;
1517
  if (elt)
1518
    free_element_chain = elt->next_same_hash;
1519
  else
1520
    elt = XNEW (struct table_elt);
1521
 
1522
  elt->exp = x;
1523
  elt->canon_exp = NULL_RTX;
1524
  elt->cost = COST (x);
1525
  elt->regcost = approx_reg_cost (x);
1526
  elt->next_same_value = 0;
1527
  elt->prev_same_value = 0;
1528
  elt->next_same_hash = table[hash];
1529
  elt->prev_same_hash = 0;
1530
  elt->related_value = 0;
1531
  elt->in_memory = 0;
1532
  elt->mode = mode;
1533
  elt->is_const = (CONSTANT_P (x) || fixed_base_plus_p (x));
1534
 
1535
  if (table[hash])
1536
    table[hash]->prev_same_hash = elt;
1537
  table[hash] = elt;
1538
 
1539
  /* Put it into the proper value-class.  */
1540
  if (classp)
1541
    {
1542
      classp = classp->first_same_value;
1543
      if (CHEAPER (elt, classp))
1544
        /* Insert at the head of the class.  */
1545
        {
1546
          struct table_elt *p;
1547
          elt->next_same_value = classp;
1548
          classp->prev_same_value = elt;
1549
          elt->first_same_value = elt;
1550
 
1551
          for (p = classp; p; p = p->next_same_value)
1552
            p->first_same_value = elt;
1553
        }
1554
      else
1555
        {
1556
          /* Insert not at head of the class.  */
1557
          /* Put it after the last element cheaper than X.  */
1558
          struct table_elt *p, *next;
1559
 
1560
          for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1561
               p = next);
1562
 
1563
          /* Put it after P and before NEXT.  */
1564
          elt->next_same_value = next;
1565
          if (next)
1566
            next->prev_same_value = elt;
1567
 
1568
          elt->prev_same_value = p;
1569
          p->next_same_value = elt;
1570
          elt->first_same_value = classp;
1571
        }
1572
    }
1573
  else
1574
    elt->first_same_value = elt;
1575
 
1576
  /* If this is a constant being set equivalent to a register or a register
1577
     being set equivalent to a constant, note the constant equivalence.
1578
 
1579
     If this is a constant, it cannot be equivalent to a different constant,
1580
     and a constant is the only thing that can be cheaper than a register.  So
1581
     we know the register is the head of the class (before the constant was
1582
     inserted).
1583
 
1584
     If this is a register that is not already known equivalent to a
1585
     constant, we must check the entire class.
1586
 
1587
     If this is a register that is already known equivalent to an insn,
1588
     update the qtys `const_insn' to show that `this_insn' is the latest
1589
     insn making that quantity equivalent to the constant.  */
1590
 
1591
  if (elt->is_const && classp && REG_P (classp->exp)
1592
      && !REG_P (x))
1593
    {
1594
      int exp_q = REG_QTY (REGNO (classp->exp));
1595
      struct qty_table_elem *exp_ent = &qty_table[exp_q];
1596
 
1597
      exp_ent->const_rtx = gen_lowpart (exp_ent->mode, x);
1598
      exp_ent->const_insn = this_insn;
1599
    }
1600
 
1601
  else if (REG_P (x)
1602
           && classp
1603
           && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1604
           && ! elt->is_const)
1605
    {
1606
      struct table_elt *p;
1607
 
1608
      for (p = classp; p != 0; p = p->next_same_value)
1609
        {
1610
          if (p->is_const && !REG_P (p->exp))
1611
            {
1612
              int x_q = REG_QTY (REGNO (x));
1613
              struct qty_table_elem *x_ent = &qty_table[x_q];
1614
 
1615
              x_ent->const_rtx
1616
                = gen_lowpart (GET_MODE (x), p->exp);
1617
              x_ent->const_insn = this_insn;
1618
              break;
1619
            }
1620
        }
1621
    }
1622
 
1623
  else if (REG_P (x)
1624
           && qty_table[REG_QTY (REGNO (x))].const_rtx
1625
           && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1626
    qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1627
 
1628
  /* If this is a constant with symbolic value,
1629
     and it has a term with an explicit integer value,
1630
     link it up with related expressions.  */
1631
  if (GET_CODE (x) == CONST)
1632
    {
1633
      rtx subexp = get_related_value (x);
1634
      unsigned subhash;
1635
      struct table_elt *subelt, *subelt_prev;
1636
 
1637
      if (subexp != 0)
1638
        {
1639
          /* Get the integer-free subexpression in the hash table.  */
1640
          subhash = SAFE_HASH (subexp, mode);
1641
          subelt = lookup (subexp, subhash, mode);
1642
          if (subelt == 0)
1643
            subelt = insert (subexp, NULL, subhash, mode);
1644
          /* Initialize SUBELT's circular chain if it has none.  */
1645
          if (subelt->related_value == 0)
1646
            subelt->related_value = subelt;
1647
          /* Find the element in the circular chain that precedes SUBELT.  */
1648
          subelt_prev = subelt;
1649
          while (subelt_prev->related_value != subelt)
1650
            subelt_prev = subelt_prev->related_value;
1651
          /* Put new ELT into SUBELT's circular chain just before SUBELT.
1652
             This way the element that follows SUBELT is the oldest one.  */
1653
          elt->related_value = subelt_prev->related_value;
1654
          subelt_prev->related_value = elt;
1655
        }
1656
    }
1657
 
1658
  table_size++;
1659
 
1660
  return elt;
1661
}
1662
 
1663
/* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1664
   CLASS2 into CLASS1.  This is done when we have reached an insn which makes
1665
   the two classes equivalent.
1666
 
1667
   CLASS1 will be the surviving class; CLASS2 should not be used after this
1668
   call.
1669
 
1670
   Any invalid entries in CLASS2 will not be copied.  */
1671
 
1672
static void
1673
merge_equiv_classes (struct table_elt *class1, struct table_elt *class2)
1674
{
1675
  struct table_elt *elt, *next, *new;
1676
 
1677
  /* Ensure we start with the head of the classes.  */
1678
  class1 = class1->first_same_value;
1679
  class2 = class2->first_same_value;
1680
 
1681
  /* If they were already equal, forget it.  */
1682
  if (class1 == class2)
1683
    return;
1684
 
1685
  for (elt = class2; elt; elt = next)
1686
    {
1687
      unsigned int hash;
1688
      rtx exp = elt->exp;
1689
      enum machine_mode mode = elt->mode;
1690
 
1691
      next = elt->next_same_value;
1692
 
1693
      /* Remove old entry, make a new one in CLASS1's class.
1694
         Don't do this for invalid entries as we cannot find their
1695
         hash code (it also isn't necessary).  */
1696
      if (REG_P (exp) || exp_equiv_p (exp, exp, 1, false))
1697
        {
1698
          bool need_rehash = false;
1699
 
1700
          hash_arg_in_memory = 0;
1701
          hash = HASH (exp, mode);
1702
 
1703
          if (REG_P (exp))
1704
            {
1705
              need_rehash = REGNO_QTY_VALID_P (REGNO (exp));
1706
              delete_reg_equiv (REGNO (exp));
1707
            }
1708
 
1709
          remove_from_table (elt, hash);
1710
 
1711
          if (insert_regs (exp, class1, 0) || need_rehash)
1712
            {
1713
              rehash_using_reg (exp);
1714
              hash = HASH (exp, mode);
1715
            }
1716
          new = insert (exp, class1, hash, mode);
1717
          new->in_memory = hash_arg_in_memory;
1718
        }
1719
    }
1720
}
1721
 
1722
/* Flush the entire hash table.  */
1723
 
1724
static void
1725
flush_hash_table (void)
1726
{
1727
  int i;
1728
  struct table_elt *p;
1729
 
1730
  for (i = 0; i < HASH_SIZE; i++)
1731
    for (p = table[i]; p; p = table[i])
1732
      {
1733
        /* Note that invalidate can remove elements
1734
           after P in the current hash chain.  */
1735
        if (REG_P (p->exp))
1736
          invalidate (p->exp, VOIDmode);
1737
        else
1738
          remove_from_table (p, i);
1739
      }
1740
}
1741
 
1742
/* Function called for each rtx to check whether true dependence exist.  */
1743
struct check_dependence_data
1744
{
1745
  enum machine_mode mode;
1746
  rtx exp;
1747
  rtx addr;
1748
};
1749
 
1750
static int
1751
check_dependence (rtx *x, void *data)
1752
{
1753
  struct check_dependence_data *d = (struct check_dependence_data *) data;
1754
  if (*x && MEM_P (*x))
1755
    return canon_true_dependence (d->exp, d->mode, d->addr, *x,
1756
                                  cse_rtx_varies_p);
1757
  else
1758
    return 0;
1759
}
1760
 
1761
/* Remove from the hash table, or mark as invalid, all expressions whose
1762
   values could be altered by storing in X.  X is a register, a subreg, or
1763
   a memory reference with nonvarying address (because, when a memory
1764
   reference with a varying address is stored in, all memory references are
1765
   removed by invalidate_memory so specific invalidation is superfluous).
1766
   FULL_MODE, if not VOIDmode, indicates that this much should be
1767
   invalidated instead of just the amount indicated by the mode of X.  This
1768
   is only used for bitfield stores into memory.
1769
 
1770
   A nonvarying address may be just a register or just a symbol reference,
1771
   or it may be either of those plus a numeric offset.  */
1772
 
1773
static void
1774
invalidate (rtx x, enum machine_mode full_mode)
1775
{
1776
  int i;
1777
  struct table_elt *p;
1778
  rtx addr;
1779
 
1780
  switch (GET_CODE (x))
1781
    {
1782
    case REG:
1783
      {
1784
        /* If X is a register, dependencies on its contents are recorded
1785
           through the qty number mechanism.  Just change the qty number of
1786
           the register, mark it as invalid for expressions that refer to it,
1787
           and remove it itself.  */
1788
        unsigned int regno = REGNO (x);
1789
        unsigned int hash = HASH (x, GET_MODE (x));
1790
 
1791
        /* Remove REGNO from any quantity list it might be on and indicate
1792
           that its value might have changed.  If it is a pseudo, remove its
1793
           entry from the hash table.
1794
 
1795
           For a hard register, we do the first two actions above for any
1796
           additional hard registers corresponding to X.  Then, if any of these
1797
           registers are in the table, we must remove any REG entries that
1798
           overlap these registers.  */
1799
 
1800
        delete_reg_equiv (regno);
1801
        REG_TICK (regno)++;
1802
        SUBREG_TICKED (regno) = -1;
1803
 
1804
        if (regno >= FIRST_PSEUDO_REGISTER)
1805
          {
1806
            /* Because a register can be referenced in more than one mode,
1807
               we might have to remove more than one table entry.  */
1808
            struct table_elt *elt;
1809
 
1810
            while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1811
              remove_from_table (elt, hash);
1812
          }
1813
        else
1814
          {
1815
            HOST_WIDE_INT in_table
1816
              = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1817
            unsigned int endregno
1818
              = regno + hard_regno_nregs[regno][GET_MODE (x)];
1819
            unsigned int tregno, tendregno, rn;
1820
            struct table_elt *p, *next;
1821
 
1822
            CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1823
 
1824
            for (rn = regno + 1; rn < endregno; rn++)
1825
              {
1826
                in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1827
                CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1828
                delete_reg_equiv (rn);
1829
                REG_TICK (rn)++;
1830
                SUBREG_TICKED (rn) = -1;
1831
              }
1832
 
1833
            if (in_table)
1834
              for (hash = 0; hash < HASH_SIZE; hash++)
1835
                for (p = table[hash]; p; p = next)
1836
                  {
1837
                    next = p->next_same_hash;
1838
 
1839
                    if (!REG_P (p->exp)
1840
                        || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1841
                      continue;
1842
 
1843
                    tregno = REGNO (p->exp);
1844
                    tendregno
1845
                      = tregno + hard_regno_nregs[tregno][GET_MODE (p->exp)];
1846
                    if (tendregno > regno && tregno < endregno)
1847
                      remove_from_table (p, hash);
1848
                  }
1849
          }
1850
      }
1851
      return;
1852
 
1853
    case SUBREG:
1854
      invalidate (SUBREG_REG (x), VOIDmode);
1855
      return;
1856
 
1857
    case PARALLEL:
1858
      for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1859
        invalidate (XVECEXP (x, 0, i), VOIDmode);
1860
      return;
1861
 
1862
    case EXPR_LIST:
1863
      /* This is part of a disjoint return value; extract the location in
1864
         question ignoring the offset.  */
1865
      invalidate (XEXP (x, 0), VOIDmode);
1866
      return;
1867
 
1868
    case MEM:
1869
      addr = canon_rtx (get_addr (XEXP (x, 0)));
1870
      /* Calculate the canonical version of X here so that
1871
         true_dependence doesn't generate new RTL for X on each call.  */
1872
      x = canon_rtx (x);
1873
 
1874
      /* Remove all hash table elements that refer to overlapping pieces of
1875
         memory.  */
1876
      if (full_mode == VOIDmode)
1877
        full_mode = GET_MODE (x);
1878
 
1879
      for (i = 0; i < HASH_SIZE; i++)
1880
        {
1881
          struct table_elt *next;
1882
 
1883
          for (p = table[i]; p; p = next)
1884
            {
1885
              next = p->next_same_hash;
1886
              if (p->in_memory)
1887
                {
1888
                  struct check_dependence_data d;
1889
 
1890
                  /* Just canonicalize the expression once;
1891
                     otherwise each time we call invalidate
1892
                     true_dependence will canonicalize the
1893
                     expression again.  */
1894
                  if (!p->canon_exp)
1895
                    p->canon_exp = canon_rtx (p->exp);
1896
                  d.exp = x;
1897
                  d.addr = addr;
1898
                  d.mode = full_mode;
1899
                  if (for_each_rtx (&p->canon_exp, check_dependence, &d))
1900
                    remove_from_table (p, i);
1901
                }
1902
            }
1903
        }
1904
      return;
1905
 
1906
    default:
1907
      gcc_unreachable ();
1908
    }
1909
}
1910
 
1911
/* Remove all expressions that refer to register REGNO,
1912
   since they are already invalid, and we are about to
1913
   mark that register valid again and don't want the old
1914
   expressions to reappear as valid.  */
1915
 
1916
static void
1917
remove_invalid_refs (unsigned int regno)
1918
{
1919
  unsigned int i;
1920
  struct table_elt *p, *next;
1921
 
1922
  for (i = 0; i < HASH_SIZE; i++)
1923
    for (p = table[i]; p; p = next)
1924
      {
1925
        next = p->next_same_hash;
1926
        if (!REG_P (p->exp)
1927
            && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1928
          remove_from_table (p, i);
1929
      }
1930
}
1931
 
1932
/* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
1933
   and mode MODE.  */
1934
static void
1935
remove_invalid_subreg_refs (unsigned int regno, unsigned int offset,
1936
                            enum machine_mode mode)
1937
{
1938
  unsigned int i;
1939
  struct table_elt *p, *next;
1940
  unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
1941
 
1942
  for (i = 0; i < HASH_SIZE; i++)
1943
    for (p = table[i]; p; p = next)
1944
      {
1945
        rtx exp = p->exp;
1946
        next = p->next_same_hash;
1947
 
1948
        if (!REG_P (exp)
1949
            && (GET_CODE (exp) != SUBREG
1950
                || !REG_P (SUBREG_REG (exp))
1951
                || REGNO (SUBREG_REG (exp)) != regno
1952
                || (((SUBREG_BYTE (exp)
1953
                      + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
1954
                    && SUBREG_BYTE (exp) <= end))
1955
            && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
1956
          remove_from_table (p, i);
1957
      }
1958
}
1959
 
1960
/* Recompute the hash codes of any valid entries in the hash table that
1961
   reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
1962
 
1963
   This is called when we make a jump equivalence.  */
1964
 
1965
static void
1966
rehash_using_reg (rtx x)
1967
{
1968
  unsigned int i;
1969
  struct table_elt *p, *next;
1970
  unsigned hash;
1971
 
1972
  if (GET_CODE (x) == SUBREG)
1973
    x = SUBREG_REG (x);
1974
 
1975
  /* If X is not a register or if the register is known not to be in any
1976
     valid entries in the table, we have no work to do.  */
1977
 
1978
  if (!REG_P (x)
1979
      || REG_IN_TABLE (REGNO (x)) < 0
1980
      || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
1981
    return;
1982
 
1983
  /* Scan all hash chains looking for valid entries that mention X.
1984
     If we find one and it is in the wrong hash chain, move it.  */
1985
 
1986
  for (i = 0; i < HASH_SIZE; i++)
1987
    for (p = table[i]; p; p = next)
1988
      {
1989
        next = p->next_same_hash;
1990
        if (reg_mentioned_p (x, p->exp)
1991
            && exp_equiv_p (p->exp, p->exp, 1, false)
1992
            && i != (hash = SAFE_HASH (p->exp, p->mode)))
1993
          {
1994
            if (p->next_same_hash)
1995
              p->next_same_hash->prev_same_hash = p->prev_same_hash;
1996
 
1997
            if (p->prev_same_hash)
1998
              p->prev_same_hash->next_same_hash = p->next_same_hash;
1999
            else
2000
              table[i] = p->next_same_hash;
2001
 
2002
            p->next_same_hash = table[hash];
2003
            p->prev_same_hash = 0;
2004
            if (table[hash])
2005
              table[hash]->prev_same_hash = p;
2006
            table[hash] = p;
2007
          }
2008
      }
2009
}
2010
 
2011
/* Remove from the hash table any expression that is a call-clobbered
2012
   register.  Also update their TICK values.  */
2013
 
2014
static void
2015
invalidate_for_call (void)
2016
{
2017
  unsigned int regno, endregno;
2018
  unsigned int i;
2019
  unsigned hash;
2020
  struct table_elt *p, *next;
2021
  int in_table = 0;
2022
 
2023
  /* Go through all the hard registers.  For each that is clobbered in
2024
     a CALL_INSN, remove the register from quantity chains and update
2025
     reg_tick if defined.  Also see if any of these registers is currently
2026
     in the table.  */
2027
 
2028
  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2029
    if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2030
      {
2031
        delete_reg_equiv (regno);
2032
        if (REG_TICK (regno) >= 0)
2033
          {
2034
            REG_TICK (regno)++;
2035
            SUBREG_TICKED (regno) = -1;
2036
          }
2037
 
2038
        in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2039
      }
2040
 
2041
  /* In the case where we have no call-clobbered hard registers in the
2042
     table, we are done.  Otherwise, scan the table and remove any
2043
     entry that overlaps a call-clobbered register.  */
2044
 
2045
  if (in_table)
2046
    for (hash = 0; hash < HASH_SIZE; hash++)
2047
      for (p = table[hash]; p; p = next)
2048
        {
2049
          next = p->next_same_hash;
2050
 
2051
          if (!REG_P (p->exp)
2052
              || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2053
            continue;
2054
 
2055
          regno = REGNO (p->exp);
2056
          endregno = regno + hard_regno_nregs[regno][GET_MODE (p->exp)];
2057
 
2058
          for (i = regno; i < endregno; i++)
2059
            if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2060
              {
2061
                remove_from_table (p, hash);
2062
                break;
2063
              }
2064
        }
2065
}
2066
 
2067
/* Given an expression X of type CONST,
2068
   and ELT which is its table entry (or 0 if it
2069
   is not in the hash table),
2070
   return an alternate expression for X as a register plus integer.
2071
   If none can be found, return 0.  */
2072
 
2073
static rtx
2074
use_related_value (rtx x, struct table_elt *elt)
2075
{
2076
  struct table_elt *relt = 0;
2077
  struct table_elt *p, *q;
2078
  HOST_WIDE_INT offset;
2079
 
2080
  /* First, is there anything related known?
2081
     If we have a table element, we can tell from that.
2082
     Otherwise, must look it up.  */
2083
 
2084
  if (elt != 0 && elt->related_value != 0)
2085
    relt = elt;
2086
  else if (elt == 0 && GET_CODE (x) == CONST)
2087
    {
2088
      rtx subexp = get_related_value (x);
2089
      if (subexp != 0)
2090
        relt = lookup (subexp,
2091
                       SAFE_HASH (subexp, GET_MODE (subexp)),
2092
                       GET_MODE (subexp));
2093
    }
2094
 
2095
  if (relt == 0)
2096
    return 0;
2097
 
2098
  /* Search all related table entries for one that has an
2099
     equivalent register.  */
2100
 
2101
  p = relt;
2102
  while (1)
2103
    {
2104
      /* This loop is strange in that it is executed in two different cases.
2105
         The first is when X is already in the table.  Then it is searching
2106
         the RELATED_VALUE list of X's class (RELT).  The second case is when
2107
         X is not in the table.  Then RELT points to a class for the related
2108
         value.
2109
 
2110
         Ensure that, whatever case we are in, that we ignore classes that have
2111
         the same value as X.  */
2112
 
2113
      if (rtx_equal_p (x, p->exp))
2114
        q = 0;
2115
      else
2116
        for (q = p->first_same_value; q; q = q->next_same_value)
2117
          if (REG_P (q->exp))
2118
            break;
2119
 
2120
      if (q)
2121
        break;
2122
 
2123
      p = p->related_value;
2124
 
2125
      /* We went all the way around, so there is nothing to be found.
2126
         Alternatively, perhaps RELT was in the table for some other reason
2127
         and it has no related values recorded.  */
2128
      if (p == relt || p == 0)
2129
        break;
2130
    }
2131
 
2132
  if (q == 0)
2133
    return 0;
2134
 
2135
  offset = (get_integer_term (x) - get_integer_term (p->exp));
2136
  /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity.  */
2137
  return plus_constant (q->exp, offset);
2138
}
2139
 
2140
/* Hash a string.  Just add its bytes up.  */
2141
static inline unsigned
2142
hash_rtx_string (const char *ps)
2143
{
2144
  unsigned hash = 0;
2145
  const unsigned char *p = (const unsigned char *) ps;
2146
 
2147
  if (p)
2148
    while (*p)
2149
      hash += *p++;
2150
 
2151
  return hash;
2152
}
2153
 
2154
/* Hash an rtx.  We are careful to make sure the value is never negative.
2155
   Equivalent registers hash identically.
2156
   MODE is used in hashing for CONST_INTs only;
2157
   otherwise the mode of X is used.
2158
 
2159
   Store 1 in DO_NOT_RECORD_P if any subexpression is volatile.
2160
 
2161
   If HASH_ARG_IN_MEMORY_P is not NULL, store 1 in it if X contains
2162
   a MEM rtx which does not have the RTX_UNCHANGING_P bit set.
2163
 
2164
   Note that cse_insn knows that the hash code of a MEM expression
2165
   is just (int) MEM plus the hash code of the address.  */
2166
 
2167
unsigned
2168
hash_rtx (rtx x, enum machine_mode mode, int *do_not_record_p,
2169
          int *hash_arg_in_memory_p, bool have_reg_qty)
2170
{
2171
  int i, j;
2172
  unsigned hash = 0;
2173
  enum rtx_code code;
2174
  const char *fmt;
2175
 
2176
  /* Used to turn recursion into iteration.  We can't rely on GCC's
2177
     tail-recursion elimination since we need to keep accumulating values
2178
     in HASH.  */
2179
 repeat:
2180
  if (x == 0)
2181
    return hash;
2182
 
2183
  code = GET_CODE (x);
2184
  switch (code)
2185
    {
2186
    case REG:
2187
      {
2188
        unsigned int regno = REGNO (x);
2189
 
2190
        if (!reload_completed)
2191
          {
2192
            /* On some machines, we can't record any non-fixed hard register,
2193
               because extending its life will cause reload problems.  We
2194
               consider ap, fp, sp, gp to be fixed for this purpose.
2195
 
2196
               We also consider CCmode registers to be fixed for this purpose;
2197
               failure to do so leads to failure to simplify 0<100 type of
2198
               conditionals.
2199
 
2200
               On all machines, we can't record any global registers.
2201
               Nor should we record any register that is in a small
2202
               class, as defined by CLASS_LIKELY_SPILLED_P.  */
2203
            bool record;
2204
 
2205
            if (regno >= FIRST_PSEUDO_REGISTER)
2206
              record = true;
2207
            else if (x == frame_pointer_rtx
2208
                     || x == hard_frame_pointer_rtx
2209
                     || x == arg_pointer_rtx
2210
                     || x == stack_pointer_rtx
2211
                     || x == pic_offset_table_rtx)
2212
              record = true;
2213
            else if (global_regs[regno])
2214
              record = false;
2215
            else if (fixed_regs[regno])
2216
              record = true;
2217
            else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2218
              record = true;
2219
            else if (SMALL_REGISTER_CLASSES)
2220
              record = false;
2221
            else if (CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno)))
2222
              record = false;
2223
            else
2224
              record = true;
2225
 
2226
            if (!record)
2227
              {
2228
                *do_not_record_p = 1;
2229
                return 0;
2230
              }
2231
          }
2232
 
2233
        hash += ((unsigned int) REG << 7);
2234
        hash += (have_reg_qty ? (unsigned) REG_QTY (regno) : regno);
2235
        return hash;
2236
      }
2237
 
2238
    /* We handle SUBREG of a REG specially because the underlying
2239
       reg changes its hash value with every value change; we don't
2240
       want to have to forget unrelated subregs when one subreg changes.  */
2241
    case SUBREG:
2242
      {
2243
        if (REG_P (SUBREG_REG (x)))
2244
          {
2245
            hash += (((unsigned int) SUBREG << 7)
2246
                     + REGNO (SUBREG_REG (x))
2247
                     + (SUBREG_BYTE (x) / UNITS_PER_WORD));
2248
            return hash;
2249
          }
2250
        break;
2251
      }
2252
 
2253
    case CONST_INT:
2254
      hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
2255
               + (unsigned int) INTVAL (x));
2256
      return hash;
2257
 
2258
    case CONST_DOUBLE:
2259
      /* This is like the general case, except that it only counts
2260
         the integers representing the constant.  */
2261
      hash += (unsigned int) code + (unsigned int) GET_MODE (x);
2262
      if (GET_MODE (x) != VOIDmode)
2263
        hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
2264
      else
2265
        hash += ((unsigned int) CONST_DOUBLE_LOW (x)
2266
                 + (unsigned int) CONST_DOUBLE_HIGH (x));
2267
      return hash;
2268
 
2269
    case CONST_VECTOR:
2270
      {
2271
        int units;
2272
        rtx elt;
2273
 
2274
        units = CONST_VECTOR_NUNITS (x);
2275
 
2276
        for (i = 0; i < units; ++i)
2277
          {
2278
            elt = CONST_VECTOR_ELT (x, i);
2279
            hash += hash_rtx (elt, GET_MODE (elt), do_not_record_p,
2280
                              hash_arg_in_memory_p, have_reg_qty);
2281
          }
2282
 
2283
        return hash;
2284
      }
2285
 
2286
      /* Assume there is only one rtx object for any given label.  */
2287
    case LABEL_REF:
2288
      /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
2289
         differences and differences between each stage's debugging dumps.  */
2290
         hash += (((unsigned int) LABEL_REF << 7)
2291
                  + CODE_LABEL_NUMBER (XEXP (x, 0)));
2292
      return hash;
2293
 
2294
    case SYMBOL_REF:
2295
      {
2296
        /* Don't hash on the symbol's address to avoid bootstrap differences.
2297
           Different hash values may cause expressions to be recorded in
2298
           different orders and thus different registers to be used in the
2299
           final assembler.  This also avoids differences in the dump files
2300
           between various stages.  */
2301
        unsigned int h = 0;
2302
        const unsigned char *p = (const unsigned char *) XSTR (x, 0);
2303
 
2304
        while (*p)
2305
          h += (h << 7) + *p++; /* ??? revisit */
2306
 
2307
        hash += ((unsigned int) SYMBOL_REF << 7) + h;
2308
        return hash;
2309
      }
2310
 
2311
    case MEM:
2312
      /* We don't record if marked volatile or if BLKmode since we don't
2313
         know the size of the move.  */
2314
      if (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)
2315
        {
2316
          *do_not_record_p = 1;
2317
          return 0;
2318
        }
2319
      if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2320
        *hash_arg_in_memory_p = 1;
2321
 
2322
      /* Now that we have already found this special case,
2323
         might as well speed it up as much as possible.  */
2324
      hash += (unsigned) MEM;
2325
      x = XEXP (x, 0);
2326
      goto repeat;
2327
 
2328
    case USE:
2329
      /* A USE that mentions non-volatile memory needs special
2330
         handling since the MEM may be BLKmode which normally
2331
         prevents an entry from being made.  Pure calls are
2332
         marked by a USE which mentions BLKmode memory.
2333
         See calls.c:emit_call_1.  */
2334
      if (MEM_P (XEXP (x, 0))
2335
          && ! MEM_VOLATILE_P (XEXP (x, 0)))
2336
        {
2337
          hash += (unsigned) USE;
2338
          x = XEXP (x, 0);
2339
 
2340
          if (hash_arg_in_memory_p && !MEM_READONLY_P (x))
2341
            *hash_arg_in_memory_p = 1;
2342
 
2343
          /* Now that we have already found this special case,
2344
             might as well speed it up as much as possible.  */
2345
          hash += (unsigned) MEM;
2346
          x = XEXP (x, 0);
2347
          goto repeat;
2348
        }
2349
      break;
2350
 
2351
    case PRE_DEC:
2352
    case PRE_INC:
2353
    case POST_DEC:
2354
    case POST_INC:
2355
    case PRE_MODIFY:
2356
    case POST_MODIFY:
2357
    case PC:
2358
    case CC0:
2359
    case CALL:
2360
    case UNSPEC_VOLATILE:
2361
      *do_not_record_p = 1;
2362
      return 0;
2363
 
2364
    case ASM_OPERANDS:
2365
      if (MEM_VOLATILE_P (x))
2366
        {
2367
          *do_not_record_p = 1;
2368
          return 0;
2369
        }
2370
      else
2371
        {
2372
          /* We don't want to take the filename and line into account.  */
2373
          hash += (unsigned) code + (unsigned) GET_MODE (x)
2374
            + hash_rtx_string (ASM_OPERANDS_TEMPLATE (x))
2375
            + hash_rtx_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2376
            + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2377
 
2378
          if (ASM_OPERANDS_INPUT_LENGTH (x))
2379
            {
2380
              for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2381
                {
2382
                  hash += (hash_rtx (ASM_OPERANDS_INPUT (x, i),
2383
                                     GET_MODE (ASM_OPERANDS_INPUT (x, i)),
2384
                                     do_not_record_p, hash_arg_in_memory_p,
2385
                                     have_reg_qty)
2386
                           + hash_rtx_string
2387
                                (ASM_OPERANDS_INPUT_CONSTRAINT (x, i)));
2388
                }
2389
 
2390
              hash += hash_rtx_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2391
              x = ASM_OPERANDS_INPUT (x, 0);
2392
              mode = GET_MODE (x);
2393
              goto repeat;
2394
            }
2395
 
2396
          return hash;
2397
        }
2398
      break;
2399
 
2400
    default:
2401
      break;
2402
    }
2403
 
2404
  i = GET_RTX_LENGTH (code) - 1;
2405
  hash += (unsigned) code + (unsigned) GET_MODE (x);
2406
  fmt = GET_RTX_FORMAT (code);
2407
  for (; i >= 0; i--)
2408
    {
2409
      switch (fmt[i])
2410
        {
2411
        case 'e':
2412
          /* If we are about to do the last recursive call
2413
             needed at this level, change it into iteration.
2414
             This function  is called enough to be worth it.  */
2415
          if (i == 0)
2416
            {
2417
              x = XEXP (x, i);
2418
              goto repeat;
2419
            }
2420
 
2421
          hash += hash_rtx (XEXP (x, i), 0, do_not_record_p,
2422
                            hash_arg_in_memory_p, have_reg_qty);
2423
          break;
2424
 
2425
        case 'E':
2426
          for (j = 0; j < XVECLEN (x, i); j++)
2427
            hash += hash_rtx (XVECEXP (x, i, j), 0, do_not_record_p,
2428
                              hash_arg_in_memory_p, have_reg_qty);
2429
          break;
2430
 
2431
        case 's':
2432
          hash += hash_rtx_string (XSTR (x, i));
2433
          break;
2434
 
2435
        case 'i':
2436
          hash += (unsigned int) XINT (x, i);
2437
          break;
2438
 
2439
        case '0': case 't':
2440
          /* Unused.  */
2441
          break;
2442
 
2443
        default:
2444
          gcc_unreachable ();
2445
        }
2446
    }
2447
 
2448
  return hash;
2449
}
2450
 
2451
/* Hash an rtx X for cse via hash_rtx.
2452
   Stores 1 in do_not_record if any subexpression is volatile.
2453
   Stores 1 in hash_arg_in_memory if X contains a mem rtx which
2454
   does not have the RTX_UNCHANGING_P bit set.  */
2455
 
2456
static inline unsigned
2457
canon_hash (rtx x, enum machine_mode mode)
2458
{
2459
  return hash_rtx (x, mode, &do_not_record, &hash_arg_in_memory, true);
2460
}
2461
 
2462
/* Like canon_hash but with no side effects, i.e. do_not_record
2463
   and hash_arg_in_memory are not changed.  */
2464
 
2465
static inline unsigned
2466
safe_hash (rtx x, enum machine_mode mode)
2467
{
2468
  int dummy_do_not_record;
2469
  return hash_rtx (x, mode, &dummy_do_not_record, NULL, true);
2470
}
2471
 
2472
/* Return 1 iff X and Y would canonicalize into the same thing,
2473
   without actually constructing the canonicalization of either one.
2474
   If VALIDATE is nonzero,
2475
   we assume X is an expression being processed from the rtl
2476
   and Y was found in the hash table.  We check register refs
2477
   in Y for being marked as valid.
2478
 
2479
   If FOR_GCSE is true, we compare X and Y for equivalence for GCSE.  */
2480
 
2481
int
2482
exp_equiv_p (rtx x, rtx y, int validate, bool for_gcse)
2483
{
2484
  int i, j;
2485
  enum rtx_code code;
2486
  const char *fmt;
2487
 
2488
  /* Note: it is incorrect to assume an expression is equivalent to itself
2489
     if VALIDATE is nonzero.  */
2490
  if (x == y && !validate)
2491
    return 1;
2492
 
2493
  if (x == 0 || y == 0)
2494
    return x == y;
2495
 
2496
  code = GET_CODE (x);
2497
  if (code != GET_CODE (y))
2498
    return 0;
2499
 
2500
  /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.  */
2501
  if (GET_MODE (x) != GET_MODE (y))
2502
    return 0;
2503
 
2504
  switch (code)
2505
    {
2506
    case PC:
2507
    case CC0:
2508
    case CONST_INT:
2509
    case CONST_DOUBLE:
2510
      return x == y;
2511
 
2512
    case LABEL_REF:
2513
      return XEXP (x, 0) == XEXP (y, 0);
2514
 
2515
    case SYMBOL_REF:
2516
      return XSTR (x, 0) == XSTR (y, 0);
2517
 
2518
    case REG:
2519
      if (for_gcse)
2520
        return REGNO (x) == REGNO (y);
2521
      else
2522
        {
2523
          unsigned int regno = REGNO (y);
2524
          unsigned int i;
2525
          unsigned int endregno
2526
            = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2527
                       : hard_regno_nregs[regno][GET_MODE (y)]);
2528
 
2529
          /* If the quantities are not the same, the expressions are not
2530
             equivalent.  If there are and we are not to validate, they
2531
             are equivalent.  Otherwise, ensure all regs are up-to-date.  */
2532
 
2533
          if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2534
            return 0;
2535
 
2536
          if (! validate)
2537
            return 1;
2538
 
2539
          for (i = regno; i < endregno; i++)
2540
            if (REG_IN_TABLE (i) != REG_TICK (i))
2541
              return 0;
2542
 
2543
          return 1;
2544
        }
2545
 
2546
    case MEM:
2547
      if (for_gcse)
2548
        {
2549
          /* A volatile mem should not be considered equivalent to any
2550
             other.  */
2551
          if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2552
            return 0;
2553
 
2554
          /* Can't merge two expressions in different alias sets, since we
2555
             can decide that the expression is transparent in a block when
2556
             it isn't, due to it being set with the different alias set.
2557
 
2558
             Also, can't merge two expressions with different MEM_ATTRS.
2559
             They could e.g. be two different entities allocated into the
2560
             same space on the stack (see e.g. PR25130).  In that case, the
2561
             MEM addresses can be the same, even though the two MEMs are
2562
             absolutely not equivalent.
2563
 
2564
             But because really all MEM attributes should be the same for
2565
             equivalent MEMs, we just use the invariant that MEMs that have
2566
             the same attributes share the same mem_attrs data structure.  */
2567
          if (MEM_ATTRS (x) != MEM_ATTRS (y))
2568
            return 0;
2569
        }
2570
      break;
2571
 
2572
    /*  For commutative operations, check both orders.  */
2573
    case PLUS:
2574
    case MULT:
2575
    case AND:
2576
    case IOR:
2577
    case XOR:
2578
    case NE:
2579
    case EQ:
2580
      return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0),
2581
                             validate, for_gcse)
2582
               && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2583
                                validate, for_gcse))
2584
              || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2585
                                validate, for_gcse)
2586
                  && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2587
                                   validate, for_gcse)));
2588
 
2589
    case ASM_OPERANDS:
2590
      /* We don't use the generic code below because we want to
2591
         disregard filename and line numbers.  */
2592
 
2593
      /* A volatile asm isn't equivalent to any other.  */
2594
      if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2595
        return 0;
2596
 
2597
      if (GET_MODE (x) != GET_MODE (y)
2598
          || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2599
          || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2600
                     ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2601
          || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2602
          || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2603
        return 0;
2604
 
2605
      if (ASM_OPERANDS_INPUT_LENGTH (x))
2606
        {
2607
          for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2608
            if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2609
                               ASM_OPERANDS_INPUT (y, i),
2610
                               validate, for_gcse)
2611
                || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2612
                           ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2613
              return 0;
2614
        }
2615
 
2616
      return 1;
2617
 
2618
    default:
2619
      break;
2620
    }
2621
 
2622
  /* Compare the elements.  If any pair of corresponding elements
2623
     fail to match, return 0 for the whole thing.  */
2624
 
2625
  fmt = GET_RTX_FORMAT (code);
2626
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2627
    {
2628
      switch (fmt[i])
2629
        {
2630
        case 'e':
2631
          if (! exp_equiv_p (XEXP (x, i), XEXP (y, i),
2632
                              validate, for_gcse))
2633
            return 0;
2634
          break;
2635
 
2636
        case 'E':
2637
          if (XVECLEN (x, i) != XVECLEN (y, i))
2638
            return 0;
2639
          for (j = 0; j < XVECLEN (x, i); j++)
2640
            if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2641
                                validate, for_gcse))
2642
              return 0;
2643
          break;
2644
 
2645
        case 's':
2646
          if (strcmp (XSTR (x, i), XSTR (y, i)))
2647
            return 0;
2648
          break;
2649
 
2650
        case 'i':
2651
          if (XINT (x, i) != XINT (y, i))
2652
            return 0;
2653
          break;
2654
 
2655
        case 'w':
2656
          if (XWINT (x, i) != XWINT (y, i))
2657
            return 0;
2658
          break;
2659
 
2660
        case '0':
2661
        case 't':
2662
          break;
2663
 
2664
        default:
2665
          gcc_unreachable ();
2666
        }
2667
    }
2668
 
2669
  return 1;
2670
}
2671
 
2672
/* Return 1 if X has a value that can vary even between two
2673
   executions of the program.  0 means X can be compared reliably
2674
   against certain constants or near-constants.  */
2675
 
2676
static int
2677
cse_rtx_varies_p (rtx x, int from_alias)
2678
{
2679
  /* We need not check for X and the equivalence class being of the same
2680
     mode because if X is equivalent to a constant in some mode, it
2681
     doesn't vary in any mode.  */
2682
 
2683
  if (REG_P (x)
2684
      && REGNO_QTY_VALID_P (REGNO (x)))
2685
    {
2686
      int x_q = REG_QTY (REGNO (x));
2687
      struct qty_table_elem *x_ent = &qty_table[x_q];
2688
 
2689
      if (GET_MODE (x) == x_ent->mode
2690
          && x_ent->const_rtx != NULL_RTX)
2691
        return 0;
2692
    }
2693
 
2694
  if (GET_CODE (x) == PLUS
2695
      && GET_CODE (XEXP (x, 1)) == CONST_INT
2696
      && REG_P (XEXP (x, 0))
2697
      && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2698
    {
2699
      int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2700
      struct qty_table_elem *x0_ent = &qty_table[x0_q];
2701
 
2702
      if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2703
          && x0_ent->const_rtx != NULL_RTX)
2704
        return 0;
2705
    }
2706
 
2707
  /* This can happen as the result of virtual register instantiation, if
2708
     the initial constant is too large to be a valid address.  This gives
2709
     us a three instruction sequence, load large offset into a register,
2710
     load fp minus a constant into a register, then a MEM which is the
2711
     sum of the two `constant' registers.  */
2712
  if (GET_CODE (x) == PLUS
2713
      && REG_P (XEXP (x, 0))
2714
      && REG_P (XEXP (x, 1))
2715
      && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2716
      && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2717
    {
2718
      int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2719
      int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2720
      struct qty_table_elem *x0_ent = &qty_table[x0_q];
2721
      struct qty_table_elem *x1_ent = &qty_table[x1_q];
2722
 
2723
      if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2724
          && x0_ent->const_rtx != NULL_RTX
2725
          && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2726
          && x1_ent->const_rtx != NULL_RTX)
2727
        return 0;
2728
    }
2729
 
2730
  return rtx_varies_p (x, from_alias);
2731
}
2732
 
2733
/* Subroutine of canon_reg.  Pass *XLOC through canon_reg, and validate
2734
   the result if necessary.  INSN is as for canon_reg.  */
2735
 
2736
static void
2737
validate_canon_reg (rtx *xloc, rtx insn)
2738
{
2739
  rtx new = canon_reg (*xloc, insn);
2740
 
2741
  /* If replacing pseudo with hard reg or vice versa, ensure the
2742
     insn remains valid.  Likewise if the insn has MATCH_DUPs.  */
2743
  if (insn != 0 && new != 0)
2744
    validate_change (insn, xloc, new, 1);
2745
  else
2746
    *xloc = new;
2747
}
2748
 
2749
/* Canonicalize an expression:
2750
   replace each register reference inside it
2751
   with the "oldest" equivalent register.
2752
 
2753
   If INSN is nonzero validate_change is used to ensure that INSN remains valid
2754
   after we make our substitution.  The calls are made with IN_GROUP nonzero
2755
   so apply_change_group must be called upon the outermost return from this
2756
   function (unless INSN is zero).  The result of apply_change_group can
2757
   generally be discarded since the changes we are making are optional.  */
2758
 
2759
static rtx
2760
canon_reg (rtx x, rtx insn)
2761
{
2762
  int i;
2763
  enum rtx_code code;
2764
  const char *fmt;
2765
 
2766
  if (x == 0)
2767
    return x;
2768
 
2769
  code = GET_CODE (x);
2770
  switch (code)
2771
    {
2772
    case PC:
2773
    case CC0:
2774
    case CONST:
2775
    case CONST_INT:
2776
    case CONST_DOUBLE:
2777
    case CONST_VECTOR:
2778
    case SYMBOL_REF:
2779
    case LABEL_REF:
2780
    case ADDR_VEC:
2781
    case ADDR_DIFF_VEC:
2782
      return x;
2783
 
2784
    case REG:
2785
      {
2786
        int first;
2787
        int q;
2788
        struct qty_table_elem *ent;
2789
 
2790
        /* Never replace a hard reg, because hard regs can appear
2791
           in more than one machine mode, and we must preserve the mode
2792
           of each occurrence.  Also, some hard regs appear in
2793
           MEMs that are shared and mustn't be altered.  Don't try to
2794
           replace any reg that maps to a reg of class NO_REGS.  */
2795
        if (REGNO (x) < FIRST_PSEUDO_REGISTER
2796
            || ! REGNO_QTY_VALID_P (REGNO (x)))
2797
          return x;
2798
 
2799
        q = REG_QTY (REGNO (x));
2800
        ent = &qty_table[q];
2801
        first = ent->first_reg;
2802
        return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2803
                : REGNO_REG_CLASS (first) == NO_REGS ? x
2804
                : gen_rtx_REG (ent->mode, first));
2805
      }
2806
 
2807
    default:
2808
      break;
2809
    }
2810
 
2811
  fmt = GET_RTX_FORMAT (code);
2812
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2813
    {
2814
      int j;
2815
 
2816
      if (fmt[i] == 'e')
2817
        validate_canon_reg (&XEXP (x, i), insn);
2818
      else if (fmt[i] == 'E')
2819
        for (j = 0; j < XVECLEN (x, i); j++)
2820
          validate_canon_reg (&XVECEXP (x, i, j), insn);
2821
    }
2822
 
2823
  return x;
2824
}
2825
 
2826
/* LOC is a location within INSN that is an operand address (the contents of
2827
   a MEM).  Find the best equivalent address to use that is valid for this
2828
   insn.
2829
 
2830
   On most CISC machines, complicated address modes are costly, and rtx_cost
2831
   is a good approximation for that cost.  However, most RISC machines have
2832
   only a few (usually only one) memory reference formats.  If an address is
2833
   valid at all, it is often just as cheap as any other address.  Hence, for
2834
   RISC machines, we use `address_cost' to compare the costs of various
2835
   addresses.  For two addresses of equal cost, choose the one with the
2836
   highest `rtx_cost' value as that has the potential of eliminating the
2837
   most insns.  For equal costs, we choose the first in the equivalence
2838
   class.  Note that we ignore the fact that pseudo registers are cheaper than
2839
   hard registers here because we would also prefer the pseudo registers.  */
2840
 
2841
static void
2842
find_best_addr (rtx insn, rtx *loc, enum machine_mode mode)
2843
{
2844
  struct table_elt *elt;
2845
  rtx addr = *loc;
2846
  struct table_elt *p;
2847
  int found_better = 1;
2848
  int save_do_not_record = do_not_record;
2849
  int save_hash_arg_in_memory = hash_arg_in_memory;
2850
  int addr_volatile;
2851
  int regno;
2852
  unsigned hash;
2853
 
2854
  /* Do not try to replace constant addresses or addresses of local and
2855
     argument slots.  These MEM expressions are made only once and inserted
2856
     in many instructions, as well as being used to control symbol table
2857
     output.  It is not safe to clobber them.
2858
 
2859
     There are some uncommon cases where the address is already in a register
2860
     for some reason, but we cannot take advantage of that because we have
2861
     no easy way to unshare the MEM.  In addition, looking up all stack
2862
     addresses is costly.  */
2863
  if ((GET_CODE (addr) == PLUS
2864
       && REG_P (XEXP (addr, 0))
2865
       && GET_CODE (XEXP (addr, 1)) == CONST_INT
2866
       && (regno = REGNO (XEXP (addr, 0)),
2867
           regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2868
           || regno == ARG_POINTER_REGNUM))
2869
      || (REG_P (addr)
2870
          && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2871
              || regno == HARD_FRAME_POINTER_REGNUM
2872
              || regno == ARG_POINTER_REGNUM))
2873
      || CONSTANT_ADDRESS_P (addr))
2874
    return;
2875
 
2876
  /* If this address is not simply a register, try to fold it.  This will
2877
     sometimes simplify the expression.  Many simplifications
2878
     will not be valid, but some, usually applying the associative rule, will
2879
     be valid and produce better code.  */
2880
  if (!REG_P (addr))
2881
    {
2882
      rtx folded = canon_for_address (fold_rtx (addr, NULL_RTX));
2883
 
2884
      if (folded != addr)
2885
        {
2886
          int addr_folded_cost = address_cost (folded, mode);
2887
          int addr_cost = address_cost (addr, mode);
2888
 
2889
          if ((addr_folded_cost < addr_cost
2890
               || (addr_folded_cost == addr_cost
2891
                   /* ??? The rtx_cost comparison is left over from an older
2892
                      version of this code.  It is probably no longer helpful.*/
2893
                   && (rtx_cost (folded, MEM) > rtx_cost (addr, MEM)
2894
                       || approx_reg_cost (folded) < approx_reg_cost (addr))))
2895
              && validate_change (insn, loc, folded, 0))
2896
            addr = folded;
2897
        }
2898
    }
2899
 
2900
  /* If this address is not in the hash table, we can't look for equivalences
2901
     of the whole address.  Also, ignore if volatile.  */
2902
 
2903
  do_not_record = 0;
2904
  hash = HASH (addr, Pmode);
2905
  addr_volatile = do_not_record;
2906
  do_not_record = save_do_not_record;
2907
  hash_arg_in_memory = save_hash_arg_in_memory;
2908
 
2909
  if (addr_volatile)
2910
    return;
2911
 
2912
  elt = lookup (addr, hash, Pmode);
2913
 
2914
  if (elt)
2915
    {
2916
      /* We need to find the best (under the criteria documented above) entry
2917
         in the class that is valid.  We use the `flag' field to indicate
2918
         choices that were invalid and iterate until we can't find a better
2919
         one that hasn't already been tried.  */
2920
 
2921
      for (p = elt->first_same_value; p; p = p->next_same_value)
2922
        p->flag = 0;
2923
 
2924
      while (found_better)
2925
        {
2926
          int best_addr_cost = address_cost (*loc, mode);
2927
          int best_rtx_cost = (elt->cost + 1) >> 1;
2928
          int exp_cost;
2929
          struct table_elt *best_elt = elt;
2930
 
2931
          found_better = 0;
2932
          for (p = elt->first_same_value; p; p = p->next_same_value)
2933
            if (! p->flag)
2934
              {
2935
                if ((REG_P (p->exp)
2936
                     || exp_equiv_p (p->exp, p->exp, 1, false))
2937
                    && ((exp_cost = address_cost (p->exp, mode)) < best_addr_cost
2938
                        || (exp_cost == best_addr_cost
2939
                            && ((p->cost + 1) >> 1) > best_rtx_cost)))
2940
                  {
2941
                    found_better = 1;
2942
                    best_addr_cost = exp_cost;
2943
                    best_rtx_cost = (p->cost + 1) >> 1;
2944
                    best_elt = p;
2945
                  }
2946
              }
2947
 
2948
          if (found_better)
2949
            {
2950
              if (validate_change (insn, loc,
2951
                                   canon_reg (copy_rtx (best_elt->exp),
2952
                                              NULL_RTX), 0))
2953
                return;
2954
              else
2955
                best_elt->flag = 1;
2956
            }
2957
        }
2958
    }
2959
 
2960
  /* If the address is a binary operation with the first operand a register
2961
     and the second a constant, do the same as above, but looking for
2962
     equivalences of the register.  Then try to simplify before checking for
2963
     the best address to use.  This catches a few cases:  First is when we
2964
     have REG+const and the register is another REG+const.  We can often merge
2965
     the constants and eliminate one insn and one register.  It may also be
2966
     that a machine has a cheap REG+REG+const.  Finally, this improves the
2967
     code on the Alpha for unaligned byte stores.  */
2968
 
2969
  if (flag_expensive_optimizations
2970
      && ARITHMETIC_P (*loc)
2971
      && REG_P (XEXP (*loc, 0)))
2972
    {
2973
      rtx op1 = XEXP (*loc, 1);
2974
 
2975
      do_not_record = 0;
2976
      hash = HASH (XEXP (*loc, 0), Pmode);
2977
      do_not_record = save_do_not_record;
2978
      hash_arg_in_memory = save_hash_arg_in_memory;
2979
 
2980
      elt = lookup (XEXP (*loc, 0), hash, Pmode);
2981
      if (elt == 0)
2982
        return;
2983
 
2984
      /* We need to find the best (under the criteria documented above) entry
2985
         in the class that is valid.  We use the `flag' field to indicate
2986
         choices that were invalid and iterate until we can't find a better
2987
         one that hasn't already been tried.  */
2988
 
2989
      for (p = elt->first_same_value; p; p = p->next_same_value)
2990
        p->flag = 0;
2991
 
2992
      while (found_better)
2993
        {
2994
          int best_addr_cost = address_cost (*loc, mode);
2995
          int best_rtx_cost = (COST (*loc) + 1) >> 1;
2996
          struct table_elt *best_elt = elt;
2997
          rtx best_rtx = *loc;
2998
          int count;
2999
 
3000
          /* This is at worst case an O(n^2) algorithm, so limit our search
3001
             to the first 32 elements on the list.  This avoids trouble
3002
             compiling code with very long basic blocks that can easily
3003
             call simplify_gen_binary so many times that we run out of
3004
             memory.  */
3005
 
3006
          found_better = 0;
3007
          for (p = elt->first_same_value, count = 0;
3008
               p && count < 32;
3009
               p = p->next_same_value, count++)
3010
            if (! p->flag
3011
                && (REG_P (p->exp)
3012
                    || (GET_CODE (p->exp) != EXPR_LIST
3013
                        && exp_equiv_p (p->exp, p->exp, 1, false))))
3014
 
3015
              {
3016
                rtx new = simplify_gen_binary (GET_CODE (*loc), Pmode,
3017
                                               p->exp, op1);
3018
                int new_cost;
3019
 
3020
                /* Get the canonical version of the address so we can accept
3021
                   more.  */
3022
                new = canon_for_address (new);
3023
 
3024
                new_cost = address_cost (new, mode);
3025
 
3026
                if (new_cost < best_addr_cost
3027
                    || (new_cost == best_addr_cost
3028
                        && (COST (new) + 1) >> 1 > best_rtx_cost))
3029
                  {
3030
                    found_better = 1;
3031
                    best_addr_cost = new_cost;
3032
                    best_rtx_cost = (COST (new) + 1) >> 1;
3033
                    best_elt = p;
3034
                    best_rtx = new;
3035
                  }
3036
              }
3037
 
3038
          if (found_better)
3039
            {
3040
              if (validate_change (insn, loc,
3041
                                   canon_reg (copy_rtx (best_rtx),
3042
                                              NULL_RTX), 0))
3043
                return;
3044
              else
3045
                best_elt->flag = 1;
3046
            }
3047
        }
3048
    }
3049
}
3050
 
3051
/* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
3052
   operation (EQ, NE, GT, etc.), follow it back through the hash table and
3053
   what values are being compared.
3054
 
3055
   *PARG1 and *PARG2 are updated to contain the rtx representing the values
3056
   actually being compared.  For example, if *PARG1 was (cc0) and *PARG2
3057
   was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
3058
   compared to produce cc0.
3059
 
3060
   The return value is the comparison operator and is either the code of
3061
   A or the code corresponding to the inverse of the comparison.  */
3062
 
3063
static enum rtx_code
3064
find_comparison_args (enum rtx_code code, rtx *parg1, rtx *parg2,
3065
                      enum machine_mode *pmode1, enum machine_mode *pmode2)
3066
{
3067
  rtx arg1, arg2;
3068
 
3069
  arg1 = *parg1, arg2 = *parg2;
3070
 
3071
  /* If ARG2 is const0_rtx, see what ARG1 is equivalent to.  */
3072
 
3073
  while (arg2 == CONST0_RTX (GET_MODE (arg1)))
3074
    {
3075
      /* Set nonzero when we find something of interest.  */
3076
      rtx x = 0;
3077
      int reverse_code = 0;
3078
      struct table_elt *p = 0;
3079
 
3080
      /* If arg1 is a COMPARE, extract the comparison arguments from it.
3081
         On machines with CC0, this is the only case that can occur, since
3082
         fold_rtx will return the COMPARE or item being compared with zero
3083
         when given CC0.  */
3084
 
3085
      if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
3086
        x = arg1;
3087
 
3088
      /* If ARG1 is a comparison operator and CODE is testing for
3089
         STORE_FLAG_VALUE, get the inner arguments.  */
3090
 
3091
      else if (COMPARISON_P (arg1))
3092
        {
3093
#ifdef FLOAT_STORE_FLAG_VALUE
3094
          REAL_VALUE_TYPE fsfv;
3095
#endif
3096
 
3097
          if (code == NE
3098
              || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3099
                  && code == LT && STORE_FLAG_VALUE == -1)
3100
#ifdef FLOAT_STORE_FLAG_VALUE
3101
              || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1))
3102
                  && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3103
                      REAL_VALUE_NEGATIVE (fsfv)))
3104
#endif
3105
              )
3106
            x = arg1;
3107
          else if (code == EQ
3108
                   || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3109
                       && code == GE && STORE_FLAG_VALUE == -1)
3110
#ifdef FLOAT_STORE_FLAG_VALUE
3111
                   || (SCALAR_FLOAT_MODE_P (GET_MODE (arg1))
3112
                       && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3113
                           REAL_VALUE_NEGATIVE (fsfv)))
3114
#endif
3115
                   )
3116
            x = arg1, reverse_code = 1;
3117
        }
3118
 
3119
      /* ??? We could also check for
3120
 
3121
         (ne (and (eq (...) (const_int 1))) (const_int 0))
3122
 
3123
         and related forms, but let's wait until we see them occurring.  */
3124
 
3125
      if (x == 0)
3126
        /* Look up ARG1 in the hash table and see if it has an equivalence
3127
           that lets us see what is being compared.  */
3128
        p = lookup (arg1, SAFE_HASH (arg1, GET_MODE (arg1)), GET_MODE (arg1));
3129
      if (p)
3130
        {
3131
          p = p->first_same_value;
3132
 
3133
          /* If what we compare is already known to be constant, that is as
3134
             good as it gets.
3135
             We need to break the loop in this case, because otherwise we
3136
             can have an infinite loop when looking at a reg that is known
3137
             to be a constant which is the same as a comparison of a reg
3138
             against zero which appears later in the insn stream, which in
3139
             turn is constant and the same as the comparison of the first reg
3140
             against zero...  */
3141
          if (p->is_const)
3142
            break;
3143
        }
3144
 
3145
      for (; p; p = p->next_same_value)
3146
        {
3147
          enum machine_mode inner_mode = GET_MODE (p->exp);
3148
#ifdef FLOAT_STORE_FLAG_VALUE
3149
          REAL_VALUE_TYPE fsfv;
3150
#endif
3151
 
3152
          /* If the entry isn't valid, skip it.  */
3153
          if (! exp_equiv_p (p->exp, p->exp, 1, false))
3154
            continue;
3155
 
3156
          if (GET_CODE (p->exp) == COMPARE
3157
              /* Another possibility is that this machine has a compare insn
3158
                 that includes the comparison code.  In that case, ARG1 would
3159
                 be equivalent to a comparison operation that would set ARG1 to
3160
                 either STORE_FLAG_VALUE or zero.  If this is an NE operation,
3161
                 ORIG_CODE is the actual comparison being done; if it is an EQ,
3162
                 we must reverse ORIG_CODE.  On machine with a negative value
3163
                 for STORE_FLAG_VALUE, also look at LT and GE operations.  */
3164
              || ((code == NE
3165
                   || (code == LT
3166
                       && GET_MODE_CLASS (inner_mode) == MODE_INT
3167
                       && (GET_MODE_BITSIZE (inner_mode)
3168
                           <= HOST_BITS_PER_WIDE_INT)
3169
                       && (STORE_FLAG_VALUE
3170
                           & ((HOST_WIDE_INT) 1
3171
                              << (GET_MODE_BITSIZE (inner_mode) - 1))))
3172
#ifdef FLOAT_STORE_FLAG_VALUE
3173
                   || (code == LT
3174
                       && SCALAR_FLOAT_MODE_P (inner_mode)
3175
                       && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3176
                           REAL_VALUE_NEGATIVE (fsfv)))
3177
#endif
3178
                   )
3179
                  && COMPARISON_P (p->exp)))
3180
            {
3181
              x = p->exp;
3182
              break;
3183
            }
3184
          else if ((code == EQ
3185
                    || (code == GE
3186
                        && GET_MODE_CLASS (inner_mode) == MODE_INT
3187
                        && (GET_MODE_BITSIZE (inner_mode)
3188
                            <= HOST_BITS_PER_WIDE_INT)
3189
                        && (STORE_FLAG_VALUE
3190
                            & ((HOST_WIDE_INT) 1
3191
                               << (GET_MODE_BITSIZE (inner_mode) - 1))))
3192
#ifdef FLOAT_STORE_FLAG_VALUE
3193
                    || (code == GE
3194
                        && SCALAR_FLOAT_MODE_P (inner_mode)
3195
                        && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3196
                            REAL_VALUE_NEGATIVE (fsfv)))
3197
#endif
3198
                    )
3199
                   && COMPARISON_P (p->exp))
3200
            {
3201
              reverse_code = 1;
3202
              x = p->exp;
3203
              break;
3204
            }
3205
 
3206
          /* If this non-trapping address, e.g. fp + constant, the
3207
             equivalent is a better operand since it may let us predict
3208
             the value of the comparison.  */
3209
          else if (!rtx_addr_can_trap_p (p->exp))
3210
            {
3211
              arg1 = p->exp;
3212
              continue;
3213
            }
3214
        }
3215
 
3216
      /* If we didn't find a useful equivalence for ARG1, we are done.
3217
         Otherwise, set up for the next iteration.  */
3218
      if (x == 0)
3219
        break;
3220
 
3221
      /* If we need to reverse the comparison, make sure that that is
3222
         possible -- we can't necessarily infer the value of GE from LT
3223
         with floating-point operands.  */
3224
      if (reverse_code)
3225
        {
3226
          enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
3227
          if (reversed == UNKNOWN)
3228
            break;
3229
          else
3230
            code = reversed;
3231
        }
3232
      else if (COMPARISON_P (x))
3233
        code = GET_CODE (x);
3234
      arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3235
    }
3236
 
3237
  /* Return our results.  Return the modes from before fold_rtx
3238
     because fold_rtx might produce const_int, and then it's too late.  */
3239
  *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3240
  *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3241
 
3242
  return code;
3243
}
3244
 
3245
/* Fold SUBREG.  */
3246
 
3247
static rtx
3248
fold_rtx_subreg (rtx x, rtx insn)
3249
{
3250
  enum machine_mode mode = GET_MODE (x);
3251
  rtx folded_arg0;
3252
  rtx const_arg0;
3253
  rtx new;
3254
 
3255
  /* See if we previously assigned a constant value to this SUBREG.  */
3256
  if ((new = lookup_as_function (x, CONST_INT)) != 0
3257
      || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
3258
    return new;
3259
 
3260
  /* If this is a paradoxical SUBREG, we have no idea what value the
3261
     extra bits would have.  However, if the operand is equivalent to
3262
     a SUBREG whose operand is the same as our mode, and all the modes
3263
     are within a word, we can just use the inner operand because
3264
     these SUBREGs just say how to treat the register.
3265
 
3266
     Similarly if we find an integer constant.  */
3267
 
3268
  if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3269
    {
3270
      enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3271
      struct table_elt *elt;
3272
 
3273
      if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
3274
          && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
3275
          && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
3276
                            imode)) != 0)
3277
        for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3278
          {
3279
            if (CONSTANT_P (elt->exp)
3280
                && GET_MODE (elt->exp) == VOIDmode)
3281
              return elt->exp;
3282
 
3283
            if (GET_CODE (elt->exp) == SUBREG
3284
                && GET_MODE (SUBREG_REG (elt->exp)) == mode
3285
                && exp_equiv_p (elt->exp, elt->exp, 1, false))
3286
              return copy_rtx (SUBREG_REG (elt->exp));
3287
          }
3288
 
3289
      return x;
3290
    }
3291
 
3292
  /* Fold SUBREG_REG.  If it changed, see if we can simplify the
3293
     SUBREG.  We might be able to if the SUBREG is extracting a single
3294
     word in an integral mode or extracting the low part.  */
3295
 
3296
  folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
3297
  const_arg0 = equiv_constant (folded_arg0);
3298
  if (const_arg0)
3299
    folded_arg0 = const_arg0;
3300
 
3301
  if (folded_arg0 != SUBREG_REG (x))
3302
    {
3303
      new = simplify_subreg (mode, folded_arg0,
3304
                             GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
3305
      if (new)
3306
        return new;
3307
    }
3308
 
3309
  if (REG_P (folded_arg0)
3310
      && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0)))
3311
    {
3312
      struct table_elt *elt;
3313
 
3314
      elt = lookup (folded_arg0,
3315
                    HASH (folded_arg0, GET_MODE (folded_arg0)),
3316
                    GET_MODE (folded_arg0));
3317
 
3318
      if (elt)
3319
        elt = elt->first_same_value;
3320
 
3321
      if (subreg_lowpart_p (x))
3322
        /* If this is a narrowing SUBREG and our operand is a REG, see
3323
           if we can find an equivalence for REG that is an arithmetic
3324
           operation in a wider mode where both operands are
3325
           paradoxical SUBREGs from objects of our result mode.  In
3326
           that case, we couldn-t report an equivalent value for that
3327
           operation, since we don't know what the extra bits will be.
3328
           But we can find an equivalence for this SUBREG by folding
3329
           that operation in the narrow mode.  This allows us to fold
3330
           arithmetic in narrow modes when the machine only supports
3331
           word-sized arithmetic.
3332
 
3333
           Also look for a case where we have a SUBREG whose operand
3334
           is the same as our result.  If both modes are smaller than
3335
           a word, we are simply interpreting a register in different
3336
           modes and we can use the inner value.  */
3337
 
3338
        for (; elt; elt = elt->next_same_value)
3339
          {
3340
            enum rtx_code eltcode = GET_CODE (elt->exp);
3341
 
3342
            /* Just check for unary and binary operations.  */
3343
            if (UNARY_P (elt->exp)
3344
                && eltcode != SIGN_EXTEND
3345
                && eltcode != ZERO_EXTEND
3346
                && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3347
                && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode
3348
                && (GET_MODE_CLASS (mode)
3349
                    == GET_MODE_CLASS (GET_MODE (XEXP (elt->exp, 0)))))
3350
              {
3351
                rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
3352
 
3353
                if (!REG_P (op0) && ! CONSTANT_P (op0))
3354
                  op0 = fold_rtx (op0, NULL_RTX);
3355
 
3356
                op0 = equiv_constant (op0);
3357
                if (op0)
3358
                  new = simplify_unary_operation (GET_CODE (elt->exp), mode,
3359
                                                  op0, mode);
3360
              }
3361
            else if (ARITHMETIC_P (elt->exp)
3362
                     && eltcode != DIV && eltcode != MOD
3363
                     && eltcode != UDIV && eltcode != UMOD
3364
                     && eltcode != ASHIFTRT && eltcode != LSHIFTRT
3365
                     && eltcode != ROTATE && eltcode != ROTATERT
3366
                     && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3367
                          && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
3368
                              == mode))
3369
                         || CONSTANT_P (XEXP (elt->exp, 0)))
3370
                     && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
3371
                          && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
3372
                              == mode))
3373
                         || CONSTANT_P (XEXP (elt->exp, 1))))
3374
              {
3375
                rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
3376
                rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
3377
 
3378
                if (op0 && !REG_P (op0) && ! CONSTANT_P (op0))
3379
                  op0 = fold_rtx (op0, NULL_RTX);
3380
 
3381
                if (op0)
3382
                  op0 = equiv_constant (op0);
3383
 
3384
                if (op1 && !REG_P (op1) && ! CONSTANT_P (op1))
3385
                  op1 = fold_rtx (op1, NULL_RTX);
3386
 
3387
                if (op1)
3388
                  op1 = equiv_constant (op1);
3389
 
3390
                /* If we are looking for the low SImode part of
3391
                   (ashift:DI c (const_int 32)), it doesn't work to
3392
                   compute that in SImode, because a 32-bit shift in
3393
                   SImode is unpredictable.  We know the value is
3394
                   0.  */
3395
                if (op0 && op1
3396
                    && GET_CODE (elt->exp) == ASHIFT
3397
                    && GET_CODE (op1) == CONST_INT
3398
                    && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
3399
                  {
3400
                    if (INTVAL (op1)
3401
                        < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
3402
                      /* If the count fits in the inner mode's width,
3403
                         but exceeds the outer mode's width, the value
3404
                         will get truncated to 0 by the subreg.  */
3405
                      new = CONST0_RTX (mode);
3406
                    else
3407
                      /* If the count exceeds even the inner mode's width,
3408
                         don't fold this expression.  */
3409
                      new = 0;
3410
                  }
3411
                else if (op0 && op1)
3412
                  new = simplify_binary_operation (GET_CODE (elt->exp),
3413
                                                   mode, op0, op1);
3414
              }
3415
 
3416
            else if (GET_CODE (elt->exp) == SUBREG
3417
                     && GET_MODE (SUBREG_REG (elt->exp)) == mode
3418
                     && (GET_MODE_SIZE (GET_MODE (folded_arg0))
3419
                         <= UNITS_PER_WORD)
3420
                     && exp_equiv_p (elt->exp, elt->exp, 1, false))
3421
              new = copy_rtx (SUBREG_REG (elt->exp));
3422
 
3423
            if (new)
3424
              return new;
3425
          }
3426
      else
3427
        /* A SUBREG resulting from a zero extension may fold to zero
3428
           if it extracts higher bits than the ZERO_EXTEND's source
3429
           bits.  FIXME: if combine tried to, er, combine these
3430
           instructions, this transformation may be moved to
3431
           simplify_subreg.  */
3432
        for (; elt; elt = elt->next_same_value)
3433
          {
3434
            if (GET_CODE (elt->exp) == ZERO_EXTEND
3435
                && subreg_lsb (x)
3436
                >= GET_MODE_BITSIZE (GET_MODE (XEXP (elt->exp, 0))))
3437
              return CONST0_RTX (mode);
3438
          }
3439
    }
3440
 
3441
  return x;
3442
}
3443
 
3444
/* Fold MEM.  Not to be called directly, see fold_rtx_mem instead.  */
3445
 
3446
static rtx
3447
fold_rtx_mem_1 (rtx x, rtx insn)
3448
{
3449
  enum machine_mode mode = GET_MODE (x);
3450
  rtx new;
3451
 
3452
  /* If we are not actually processing an insn, don't try to find the
3453
     best address.  Not only don't we care, but we could modify the
3454
     MEM in an invalid way since we have no insn to validate
3455
     against.  */
3456
  if (insn != 0)
3457
    find_best_addr (insn, &XEXP (x, 0), mode);
3458
 
3459
  {
3460
    /* Even if we don't fold in the insn itself, we can safely do so
3461
       here, in hopes of getting a constant.  */
3462
    rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
3463
    rtx base = 0;
3464
    HOST_WIDE_INT offset = 0;
3465
 
3466
    if (REG_P (addr)
3467
        && REGNO_QTY_VALID_P (REGNO (addr)))
3468
      {
3469
        int addr_q = REG_QTY (REGNO (addr));
3470
        struct qty_table_elem *addr_ent = &qty_table[addr_q];
3471
 
3472
        if (GET_MODE (addr) == addr_ent->mode
3473
            && addr_ent->const_rtx != NULL_RTX)
3474
          addr = addr_ent->const_rtx;
3475
      }
3476
 
3477
    /* Call target hook to avoid the effects of -fpic etc....  */
3478
    addr = targetm.delegitimize_address (addr);
3479
 
3480
    /* If address is constant, split it into a base and integer
3481
       offset.  */
3482
    if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
3483
      base = addr;
3484
    else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3485
             && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3486
      {
3487
        base = XEXP (XEXP (addr, 0), 0);
3488
        offset = INTVAL (XEXP (XEXP (addr, 0), 1));
3489
      }
3490
    else if (GET_CODE (addr) == LO_SUM
3491
             && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
3492
      base = XEXP (addr, 1);
3493
 
3494
    /* If this is a constant pool reference, we can fold it into its
3495
       constant to allow better value tracking.  */
3496
    if (base && GET_CODE (base) == SYMBOL_REF
3497
        && CONSTANT_POOL_ADDRESS_P (base))
3498
      {
3499
        rtx constant = get_pool_constant (base);
3500
        enum machine_mode const_mode = get_pool_mode (base);
3501
        rtx new;
3502
 
3503
        if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
3504
          {
3505
            constant_pool_entries_cost = COST (constant);
3506
            constant_pool_entries_regcost = approx_reg_cost (constant);
3507
          }
3508
 
3509
        /* If we are loading the full constant, we have an
3510
           equivalence.  */
3511
        if (offset == 0 && mode == const_mode)
3512
          return constant;
3513
 
3514
        /* If this actually isn't a constant (weird!), we can't do
3515
           anything.  Otherwise, handle the two most common cases:
3516
           extracting a word from a multi-word constant, and
3517
           extracting the low-order bits.  Other cases don't seem
3518
           common enough to worry about.  */
3519
        if (! CONSTANT_P (constant))
3520
          return x;
3521
 
3522
        if (GET_MODE_CLASS (mode) == MODE_INT
3523
            && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3524
            && offset % UNITS_PER_WORD == 0
3525
            && (new = operand_subword (constant,
3526
                                       offset / UNITS_PER_WORD,
3527
                                       0, const_mode)) != 0)
3528
          return new;
3529
 
3530
        if (((BYTES_BIG_ENDIAN
3531
              && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
3532
             || (! BYTES_BIG_ENDIAN && offset == 0))
3533
            && (new = gen_lowpart (mode, constant)) != 0)
3534
          return new;
3535
      }
3536
 
3537
    /* If this is a reference to a label at a known position in a jump
3538
       table, we also know its value.  */
3539
    if (base && GET_CODE (base) == LABEL_REF)
3540
      {
3541
        rtx label = XEXP (base, 0);
3542
        rtx table_insn = NEXT_INSN (label);
3543
 
3544
        if (table_insn && JUMP_P (table_insn)
3545
            && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
3546
          {
3547
            rtx table = PATTERN (table_insn);
3548
 
3549
            if (offset >= 0
3550
                && (offset / GET_MODE_SIZE (GET_MODE (table))
3551
                    < XVECLEN (table, 0)))
3552
              {
3553
                rtx label = XVECEXP
3554
                  (table, 0, offset / GET_MODE_SIZE (GET_MODE (table)));
3555
                rtx set;
3556
 
3557
                /* If we have an insn that loads the label from the
3558
                   jumptable into a reg, we don't want to set the reg
3559
                   to the label, because this may cause a reference to
3560
                   the label to remain after the label is removed in
3561
                   some very obscure cases (PR middle-end/18628).  */
3562
                if (!insn)
3563
                  return label;
3564
 
3565
                set = single_set (insn);
3566
 
3567
                if (! set || SET_SRC (set) != x)
3568
                  return x;
3569
 
3570
                /* If it's a jump, it's safe to reference the label.  */
3571
                if (SET_DEST (set) == pc_rtx)
3572
                  return label;
3573
 
3574
                return x;
3575
              }
3576
          }
3577
        if (table_insn && JUMP_P (table_insn)
3578
            && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
3579
          {
3580
            rtx table = PATTERN (table_insn);
3581
 
3582
            if (offset >= 0
3583
                && (offset / GET_MODE_SIZE (GET_MODE (table))
3584
                    < XVECLEN (table, 1)))
3585
              {
3586
                offset /= GET_MODE_SIZE (GET_MODE (table));
3587
                new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
3588
                                     XEXP (table, 0));
3589
 
3590
                if (GET_MODE (table) != Pmode)
3591
                  new = gen_rtx_TRUNCATE (GET_MODE (table), new);
3592
 
3593
                /* Indicate this is a constant.  This isn't a valid
3594
                   form of CONST, but it will only be used to fold the
3595
                   next insns and then discarded, so it should be
3596
                   safe.
3597
 
3598
                   Note this expression must be explicitly discarded,
3599
                   by cse_insn, else it may end up in a REG_EQUAL note
3600
                   and "escape" to cause problems elsewhere.  */
3601
                return gen_rtx_CONST (GET_MODE (new), new);
3602
              }
3603
          }
3604
      }
3605
 
3606
    return x;
3607
  }
3608
}
3609
 
3610
/* Fold MEM.  */
3611
 
3612
static rtx
3613
fold_rtx_mem (rtx x, rtx insn)
3614
{
3615
  /* To avoid infinite oscillations between fold_rtx and fold_rtx_mem,
3616
     refuse to allow recursion of the latter past n levels.  This can
3617
     happen because fold_rtx_mem will try to fold the address of the
3618
     memory reference it is passed, i.e. conceptually throwing away
3619
     the MEM and reinjecting the bare address into fold_rtx.  As a
3620
     result, patterns like
3621
 
3622
       set (reg1)
3623
           (plus (reg)
3624
                 (mem (plus (reg2) (const_int))))
3625
 
3626
       set (reg2)
3627
           (plus (reg)
3628
                 (mem (plus (reg1) (const_int))))
3629
 
3630
     will defeat any "first-order" short-circuit put in either
3631
     function to prevent these infinite oscillations.
3632
 
3633
     The heuristics for determining n is as follows: since each time
3634
     it is invoked fold_rtx_mem throws away a MEM, and since MEMs
3635
     are generically not nested, we assume that each invocation of
3636
     fold_rtx_mem corresponds to a new "top-level" operand, i.e.
3637
     the source or the destination of a SET.  So fold_rtx_mem is
3638
     bound to stop or cycle before n recursions, n being the number
3639
     of expressions recorded in the hash table.  We also leave some
3640
     play to account for the initial steps.  */
3641
 
3642
  static unsigned int depth;
3643
  rtx ret;
3644
 
3645
  if (depth > 3 + table_size)
3646
    return x;
3647
 
3648
  depth++;
3649
  ret = fold_rtx_mem_1 (x, insn);
3650
  depth--;
3651
 
3652
  return ret;
3653
}
3654
 
3655
/* If X is a nontrivial arithmetic operation on an argument
3656
   for which a constant value can be determined, return
3657
   the result of operating on that value, as a constant.
3658
   Otherwise, return X, possibly with one or more operands
3659
   modified by recursive calls to this function.
3660
 
3661
   If X is a register whose contents are known, we do NOT
3662
   return those contents here.  equiv_constant is called to
3663
   perform that task.
3664
 
3665
   INSN is the insn that we may be modifying.  If it is 0, make a copy
3666
   of X before modifying it.  */
3667
 
3668
static rtx
3669
fold_rtx (rtx x, rtx insn)
3670
{
3671
  enum rtx_code code;
3672
  enum machine_mode mode;
3673
  const char *fmt;
3674
  int i;
3675
  rtx new = 0;
3676
  int copied = 0;
3677
  int must_swap = 0;
3678
 
3679
  /* Folded equivalents of first two operands of X.  */
3680
  rtx folded_arg0;
3681
  rtx folded_arg1;
3682
 
3683
  /* Constant equivalents of first three operands of X;
3684
 
3685
  rtx const_arg0;
3686
  rtx const_arg1;
3687
  rtx const_arg2;
3688
 
3689
  /* The mode of the first operand of X.  We need this for sign and zero
3690
     extends.  */
3691
  enum machine_mode mode_arg0;
3692
 
3693
  if (x == 0)
3694
    return x;
3695
 
3696
  mode = GET_MODE (x);
3697
  code = GET_CODE (x);
3698
  switch (code)
3699
    {
3700
    case CONST:
3701
    case CONST_INT:
3702
    case CONST_DOUBLE:
3703
    case CONST_VECTOR:
3704
    case SYMBOL_REF:
3705
    case LABEL_REF:
3706
    case REG:
3707
    case PC:
3708
      /* No use simplifying an EXPR_LIST
3709
         since they are used only for lists of args
3710
         in a function call's REG_EQUAL note.  */
3711
    case EXPR_LIST:
3712
      return x;
3713
 
3714
#ifdef HAVE_cc0
3715
    case CC0:
3716
      return prev_insn_cc0;
3717
#endif
3718
 
3719
    case SUBREG:
3720
      return fold_rtx_subreg (x, insn);
3721
 
3722
    case NOT:
3723
    case NEG:
3724
      /* If we have (NOT Y), see if Y is known to be (NOT Z).
3725
         If so, (NOT Y) simplifies to Z.  Similarly for NEG.  */
3726
      new = lookup_as_function (XEXP (x, 0), code);
3727
      if (new)
3728
        return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
3729
      break;
3730
 
3731
    case MEM:
3732
      return fold_rtx_mem (x, insn);
3733
 
3734
#ifdef NO_FUNCTION_CSE
3735
    case CALL:
3736
      if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3737
        return x;
3738
      break;
3739
#endif
3740
 
3741
    case ASM_OPERANDS:
3742
      if (insn)
3743
        {
3744
          for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3745
            validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3746
                             fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3747
        }
3748
      break;
3749
 
3750
    default:
3751
      break;
3752
    }
3753
 
3754
  const_arg0 = 0;
3755
  const_arg1 = 0;
3756
  const_arg2 = 0;
3757
  mode_arg0 = VOIDmode;
3758
 
3759
  /* Try folding our operands.
3760
     Then see which ones have constant values known.  */
3761
 
3762
  fmt = GET_RTX_FORMAT (code);
3763
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3764
    if (fmt[i] == 'e')
3765
      {
3766
        rtx arg = XEXP (x, i);
3767
        rtx folded_arg = arg, const_arg = 0;
3768
        enum machine_mode mode_arg = GET_MODE (arg);
3769
        rtx cheap_arg, expensive_arg;
3770
        rtx replacements[2];
3771
        int j;
3772
        int old_cost = COST_IN (XEXP (x, i), code);
3773
 
3774
        /* Most arguments are cheap, so handle them specially.  */
3775
        switch (GET_CODE (arg))
3776
          {
3777
          case REG:
3778
            /* This is the same as calling equiv_constant; it is duplicated
3779
               here for speed.  */
3780
            if (REGNO_QTY_VALID_P (REGNO (arg)))
3781
              {
3782
                int arg_q = REG_QTY (REGNO (arg));
3783
                struct qty_table_elem *arg_ent = &qty_table[arg_q];
3784
 
3785
                if (arg_ent->const_rtx != NULL_RTX
3786
                    && !REG_P (arg_ent->const_rtx)
3787
                    && GET_CODE (arg_ent->const_rtx) != PLUS)
3788
                  const_arg
3789
                    = gen_lowpart (GET_MODE (arg),
3790
                                               arg_ent->const_rtx);
3791
              }
3792
            break;
3793
 
3794
          case CONST:
3795
          case CONST_INT:
3796
          case SYMBOL_REF:
3797
          case LABEL_REF:
3798
          case CONST_DOUBLE:
3799
          case CONST_VECTOR:
3800
            const_arg = arg;
3801
            break;
3802
 
3803
#ifdef HAVE_cc0
3804
          case CC0:
3805
            folded_arg = prev_insn_cc0;
3806
            mode_arg = prev_insn_cc0_mode;
3807
            const_arg = equiv_constant (folded_arg);
3808
            break;
3809
#endif
3810
 
3811
          default:
3812
            folded_arg = fold_rtx (arg, insn);
3813
            const_arg = equiv_constant (folded_arg);
3814
          }
3815
 
3816
        /* For the first three operands, see if the operand
3817
           is constant or equivalent to a constant.  */
3818
        switch (i)
3819
          {
3820
          case 0:
3821
            folded_arg0 = folded_arg;
3822
            const_arg0 = const_arg;
3823
            mode_arg0 = mode_arg;
3824
            break;
3825
          case 1:
3826
            folded_arg1 = folded_arg;
3827
            const_arg1 = const_arg;
3828
            break;
3829
          case 2:
3830
            const_arg2 = const_arg;
3831
            break;
3832
          }
3833
 
3834
        /* Pick the least expensive of the folded argument and an
3835
           equivalent constant argument.  */
3836
        if (const_arg == 0 || const_arg == folded_arg
3837
            || COST_IN (const_arg, code) > COST_IN (folded_arg, code))
3838
          cheap_arg = folded_arg, expensive_arg = const_arg;
3839
        else
3840
          cheap_arg = const_arg, expensive_arg = folded_arg;
3841
 
3842
        /* Try to replace the operand with the cheapest of the two
3843
           possibilities.  If it doesn't work and this is either of the first
3844
           two operands of a commutative operation, try swapping them.
3845
           If THAT fails, try the more expensive, provided it is cheaper
3846
           than what is already there.  */
3847
 
3848
        if (cheap_arg == XEXP (x, i))
3849
          continue;
3850
 
3851
        if (insn == 0 && ! copied)
3852
          {
3853
            x = copy_rtx (x);
3854
            copied = 1;
3855
          }
3856
 
3857
        /* Order the replacements from cheapest to most expensive.  */
3858
        replacements[0] = cheap_arg;
3859
        replacements[1] = expensive_arg;
3860
 
3861
        for (j = 0; j < 2 && replacements[j]; j++)
3862
          {
3863
            int new_cost = COST_IN (replacements[j], code);
3864
 
3865
            /* Stop if what existed before was cheaper.  Prefer constants
3866
               in the case of a tie.  */
3867
            if (new_cost > old_cost
3868
                || (new_cost == old_cost && CONSTANT_P (XEXP (x, i))))
3869
              break;
3870
 
3871
            /* It's not safe to substitute the operand of a conversion
3872
               operator with a constant, as the conversion's identity
3873
               depends upon the mode of its operand.  This optimization
3874
               is handled by the call to simplify_unary_operation.  */
3875
            if (GET_RTX_CLASS (code) == RTX_UNARY
3876
                && GET_MODE (replacements[j]) != mode_arg0
3877
                && (code == ZERO_EXTEND
3878
                    || code == SIGN_EXTEND
3879
                    || code == TRUNCATE
3880
                    || code == FLOAT_TRUNCATE
3881
                    || code == FLOAT_EXTEND
3882
                    || code == FLOAT
3883
                    || code == FIX
3884
                    || code == UNSIGNED_FLOAT
3885
                    || code == UNSIGNED_FIX))
3886
              continue;
3887
 
3888
            if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
3889
              break;
3890
 
3891
            if (GET_RTX_CLASS (code) == RTX_COMM_COMPARE
3892
                || GET_RTX_CLASS (code) == RTX_COMM_ARITH)
3893
              {
3894
                validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
3895
                validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
3896
 
3897
                if (apply_change_group ())
3898
                  {
3899
                    /* Swap them back to be invalid so that this loop can
3900
                       continue and flag them to be swapped back later.  */
3901
                    rtx tem;
3902
 
3903
                    tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
3904
                                       XEXP (x, 1) = tem;
3905
                    must_swap = 1;
3906
                    break;
3907
                  }
3908
              }
3909
          }
3910
      }
3911
 
3912
    else
3913
      {
3914
        if (fmt[i] == 'E')
3915
          /* Don't try to fold inside of a vector of expressions.
3916
             Doing nothing is harmless.  */
3917
          {;}
3918
      }
3919
 
3920
  /* If a commutative operation, place a constant integer as the second
3921
     operand unless the first operand is also a constant integer.  Otherwise,
3922
     place any constant second unless the first operand is also a constant.  */
3923
 
3924
  if (COMMUTATIVE_P (x))
3925
    {
3926
      if (must_swap
3927
          || swap_commutative_operands_p (const_arg0 ? const_arg0
3928
                                                     : XEXP (x, 0),
3929
                                          const_arg1 ? const_arg1
3930
                                                     : XEXP (x, 1)))
3931
        {
3932
          rtx tem = XEXP (x, 0);
3933
 
3934
          if (insn == 0 && ! copied)
3935
            {
3936
              x = copy_rtx (x);
3937
              copied = 1;
3938
            }
3939
 
3940
          validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3941
          validate_change (insn, &XEXP (x, 1), tem, 1);
3942
          if (apply_change_group ())
3943
            {
3944
              tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3945
              tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3946
            }
3947
        }
3948
    }
3949
 
3950
  /* If X is an arithmetic operation, see if we can simplify it.  */
3951
 
3952
  switch (GET_RTX_CLASS (code))
3953
    {
3954
    case RTX_UNARY:
3955
      {
3956
        int is_const = 0;
3957
 
3958
        /* We can't simplify extension ops unless we know the
3959
           original mode.  */
3960
        if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3961
            && mode_arg0 == VOIDmode)
3962
          break;
3963
 
3964
        /* If we had a CONST, strip it off and put it back later if we
3965
           fold.  */
3966
        if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
3967
          is_const = 1, const_arg0 = XEXP (const_arg0, 0);
3968
 
3969
        new = simplify_unary_operation (code, mode,
3970
                                        const_arg0 ? const_arg0 : folded_arg0,
3971
                                        mode_arg0);
3972
        /* NEG of PLUS could be converted into MINUS, but that causes
3973
           expressions of the form
3974
           (CONST (MINUS (CONST_INT) (SYMBOL_REF)))
3975
           which many ports mistakenly treat as LEGITIMATE_CONSTANT_P.
3976
           FIXME: those ports should be fixed.  */
3977
        if (new != 0 && is_const
3978
            && GET_CODE (new) == PLUS
3979
            && (GET_CODE (XEXP (new, 0)) == SYMBOL_REF
3980
                || GET_CODE (XEXP (new, 0)) == LABEL_REF)
3981
            && GET_CODE (XEXP (new, 1)) == CONST_INT)
3982
          new = gen_rtx_CONST (mode, new);
3983
      }
3984
      break;
3985
 
3986
    case RTX_COMPARE:
3987
    case RTX_COMM_COMPARE:
3988
      /* See what items are actually being compared and set FOLDED_ARG[01]
3989
         to those values and CODE to the actual comparison code.  If any are
3990
         constant, set CONST_ARG0 and CONST_ARG1 appropriately.  We needn't
3991
         do anything if both operands are already known to be constant.  */
3992
 
3993
      /* ??? Vector mode comparisons are not supported yet.  */
3994
      if (VECTOR_MODE_P (mode))
3995
        break;
3996
 
3997
      if (const_arg0 == 0 || const_arg1 == 0)
3998
        {
3999
          struct table_elt *p0, *p1;
4000
          rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
4001
          enum machine_mode mode_arg1;
4002
 
4003
#ifdef FLOAT_STORE_FLAG_VALUE
4004
          if (SCALAR_FLOAT_MODE_P (mode))
4005
            {
4006
              true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
4007
                          (FLOAT_STORE_FLAG_VALUE (mode), mode));
4008
              false_rtx = CONST0_RTX (mode);
4009
            }
4010
#endif
4011
 
4012
          code = find_comparison_args (code, &folded_arg0, &folded_arg1,
4013
                                       &mode_arg0, &mode_arg1);
4014
 
4015
          /* If the mode is VOIDmode or a MODE_CC mode, we don't know
4016
             what kinds of things are being compared, so we can't do
4017
             anything with this comparison.  */
4018
 
4019
          if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
4020
            break;
4021
 
4022
          const_arg0 = equiv_constant (folded_arg0);
4023
          const_arg1 = equiv_constant (folded_arg1);
4024
 
4025
          /* If we do not now have two constants being compared, see
4026
             if we can nevertheless deduce some things about the
4027
             comparison.  */
4028
          if (const_arg0 == 0 || const_arg1 == 0)
4029
            {
4030
              if (const_arg1 != NULL)
4031
                {
4032
                  rtx cheapest_simplification;
4033
                  int cheapest_cost;
4034
                  rtx simp_result;
4035
                  struct table_elt *p;
4036
 
4037
                  /* See if we can find an equivalent of folded_arg0
4038
                     that gets us a cheaper expression, possibly a
4039
                     constant through simplifications.  */
4040
                  p = lookup (folded_arg0, SAFE_HASH (folded_arg0, mode_arg0),
4041
                              mode_arg0);
4042
 
4043
                  if (p != NULL)
4044
                    {
4045
                      cheapest_simplification = x;
4046
                      cheapest_cost = COST (x);
4047
 
4048
                      for (p = p->first_same_value; p != NULL; p = p->next_same_value)
4049
                        {
4050
                          int cost;
4051
 
4052
                          /* If the entry isn't valid, skip it.  */
4053
                          if (! exp_equiv_p (p->exp, p->exp, 1, false))
4054
                            continue;
4055
 
4056
                          /* Try to simplify using this equivalence.  */
4057
                          simp_result
4058
                            = simplify_relational_operation (code, mode,
4059
                                                             mode_arg0,
4060
                                                             p->exp,
4061
                                                             const_arg1);
4062
 
4063
                          if (simp_result == NULL)
4064
                            continue;
4065
 
4066
                          cost = COST (simp_result);
4067
                          if (cost < cheapest_cost)
4068
                            {
4069
                              cheapest_cost = cost;
4070
                              cheapest_simplification = simp_result;
4071
                            }
4072
                        }
4073
 
4074
                      /* If we have a cheaper expression now, use that
4075
                         and try folding it further, from the top.  */
4076
                      if (cheapest_simplification != x)
4077
                        return fold_rtx (cheapest_simplification, insn);
4078
                    }
4079
                }
4080
 
4081
              /* Some addresses are known to be nonzero.  We don't know
4082
                 their sign, but equality comparisons are known.  */
4083
              if (const_arg1 == const0_rtx
4084
                  && nonzero_address_p (folded_arg0))
4085
                {
4086
                  if (code == EQ)
4087
                    return false_rtx;
4088
                  else if (code == NE)
4089
                    return true_rtx;
4090
                }
4091
 
4092
              /* See if the two operands are the same.  */
4093
 
4094
              if (folded_arg0 == folded_arg1
4095
                  || (REG_P (folded_arg0)
4096
                      && REG_P (folded_arg1)
4097
                      && (REG_QTY (REGNO (folded_arg0))
4098
                          == REG_QTY (REGNO (folded_arg1))))
4099
                  || ((p0 = lookup (folded_arg0,
4100
                                    SAFE_HASH (folded_arg0, mode_arg0),
4101
                                    mode_arg0))
4102
                      && (p1 = lookup (folded_arg1,
4103
                                       SAFE_HASH (folded_arg1, mode_arg0),
4104
                                       mode_arg0))
4105
                      && p0->first_same_value == p1->first_same_value))
4106
                {
4107
                  /* Sadly two equal NaNs are not equivalent.  */
4108
                  if (!HONOR_NANS (mode_arg0))
4109
                    return ((code == EQ || code == LE || code == GE
4110
                             || code == LEU || code == GEU || code == UNEQ
4111
                             || code == UNLE || code == UNGE
4112
                             || code == ORDERED)
4113
                            ? true_rtx : false_rtx);
4114
                  /* Take care for the FP compares we can resolve.  */
4115
                  if (code == UNEQ || code == UNLE || code == UNGE)
4116
                    return true_rtx;
4117
                  if (code == LTGT || code == LT || code == GT)
4118
                    return false_rtx;
4119
                }
4120
 
4121
              /* If FOLDED_ARG0 is a register, see if the comparison we are
4122
                 doing now is either the same as we did before or the reverse
4123
                 (we only check the reverse if not floating-point).  */
4124
              else if (REG_P (folded_arg0))
4125
                {
4126
                  int qty = REG_QTY (REGNO (folded_arg0));
4127
 
4128
                  if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
4129
                    {
4130
                      struct qty_table_elem *ent = &qty_table[qty];
4131
 
4132
                      if ((comparison_dominates_p (ent->comparison_code, code)
4133
                           || (! FLOAT_MODE_P (mode_arg0)
4134
                               && comparison_dominates_p (ent->comparison_code,
4135
                                                          reverse_condition (code))))
4136
                          && (rtx_equal_p (ent->comparison_const, folded_arg1)
4137
                              || (const_arg1
4138
                                  && rtx_equal_p (ent->comparison_const,
4139
                                                  const_arg1))
4140
                              || (REG_P (folded_arg1)
4141
                                  && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
4142
                        return (comparison_dominates_p (ent->comparison_code, code)
4143
                                ? true_rtx : false_rtx);
4144
                    }
4145
                }
4146
            }
4147
        }
4148
 
4149
      /* If we are comparing against zero, see if the first operand is
4150
         equivalent to an IOR with a constant.  If so, we may be able to
4151
         determine the result of this comparison.  */
4152
 
4153
      if (const_arg1 == const0_rtx)
4154
        {
4155
          rtx y = lookup_as_function (folded_arg0, IOR);
4156
          rtx inner_const;
4157
 
4158
          if (y != 0
4159
              && (inner_const = equiv_constant (XEXP (y, 1))) != 0
4160
              && GET_CODE (inner_const) == CONST_INT
4161
              && INTVAL (inner_const) != 0)
4162
            {
4163
              int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
4164
              int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
4165
                              && (INTVAL (inner_const)
4166
                                  & ((HOST_WIDE_INT) 1 << sign_bitnum)));
4167
              rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
4168
 
4169
#ifdef FLOAT_STORE_FLAG_VALUE
4170
              if (SCALAR_FLOAT_MODE_P (mode))
4171
                {
4172
                  true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
4173
                          (FLOAT_STORE_FLAG_VALUE (mode), mode));
4174
                  false_rtx = CONST0_RTX (mode);
4175
                }
4176
#endif
4177
 
4178
              switch (code)
4179
                {
4180
                case EQ:
4181
                  return false_rtx;
4182
                case NE:
4183
                  return true_rtx;
4184
                case LT:  case LE:
4185
                  if (has_sign)
4186
                    return true_rtx;
4187
                  break;
4188
                case GT:  case GE:
4189
                  if (has_sign)
4190
                    return false_rtx;
4191
                  break;
4192
                default:
4193
                  break;
4194
                }
4195
            }
4196
        }
4197
 
4198
      {
4199
        rtx op0 = const_arg0 ? const_arg0 : folded_arg0;
4200
        rtx op1 = const_arg1 ? const_arg1 : folded_arg1;
4201
        new = simplify_relational_operation (code, mode, mode_arg0, op0, op1);
4202
      }
4203
      break;
4204
 
4205
    case RTX_BIN_ARITH:
4206
    case RTX_COMM_ARITH:
4207
      switch (code)
4208
        {
4209
        case PLUS:
4210
          /* If the second operand is a LABEL_REF, see if the first is a MINUS
4211
             with that LABEL_REF as its second operand.  If so, the result is
4212
             the first operand of that MINUS.  This handles switches with an
4213
             ADDR_DIFF_VEC table.  */
4214
          if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
4215
            {
4216
              rtx y
4217
                = GET_CODE (folded_arg0) == MINUS ? folded_arg0
4218
                : lookup_as_function (folded_arg0, MINUS);
4219
 
4220
              if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4221
                  && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
4222
                return XEXP (y, 0);
4223
 
4224
              /* Now try for a CONST of a MINUS like the above.  */
4225
              if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
4226
                        : lookup_as_function (folded_arg0, CONST))) != 0
4227
                  && GET_CODE (XEXP (y, 0)) == MINUS
4228
                  && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4229
                  && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
4230
                return XEXP (XEXP (y, 0), 0);
4231
            }
4232
 
4233
          /* Likewise if the operands are in the other order.  */
4234
          if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
4235
            {
4236
              rtx y
4237
                = GET_CODE (folded_arg1) == MINUS ? folded_arg1
4238
                : lookup_as_function (folded_arg1, MINUS);
4239
 
4240
              if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4241
                  && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
4242
                return XEXP (y, 0);
4243
 
4244
              /* Now try for a CONST of a MINUS like the above.  */
4245
              if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
4246
                        : lookup_as_function (folded_arg1, CONST))) != 0
4247
                  && GET_CODE (XEXP (y, 0)) == MINUS
4248
                  && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4249
                  && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
4250
                return XEXP (XEXP (y, 0), 0);
4251
            }
4252
 
4253
          /* If second operand is a register equivalent to a negative
4254
             CONST_INT, see if we can find a register equivalent to the
4255
             positive constant.  Make a MINUS if so.  Don't do this for
4256
             a non-negative constant since we might then alternate between
4257
             choosing positive and negative constants.  Having the positive
4258
             constant previously-used is the more common case.  Be sure
4259
             the resulting constant is non-negative; if const_arg1 were
4260
             the smallest negative number this would overflow: depending
4261
             on the mode, this would either just be the same value (and
4262
             hence not save anything) or be incorrect.  */
4263
          if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
4264
              && INTVAL (const_arg1) < 0
4265
              /* This used to test
4266
 
4267
                 -INTVAL (const_arg1) >= 0
4268
 
4269
                 But The Sun V5.0 compilers mis-compiled that test.  So
4270
                 instead we test for the problematic value in a more direct
4271
                 manner and hope the Sun compilers get it correct.  */
4272
              && INTVAL (const_arg1) !=
4273
                ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
4274
              && REG_P (folded_arg1))
4275
            {
4276
              rtx new_const = GEN_INT (-INTVAL (const_arg1));
4277
              struct table_elt *p
4278
                = lookup (new_const, SAFE_HASH (new_const, mode), mode);
4279
 
4280
              if (p)
4281
                for (p = p->first_same_value; p; p = p->next_same_value)
4282
                  if (REG_P (p->exp))
4283
                    return simplify_gen_binary (MINUS, mode, folded_arg0,
4284
                                                canon_reg (p->exp, NULL_RTX));
4285
            }
4286
          goto from_plus;
4287
 
4288
        case MINUS:
4289
          /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
4290
             If so, produce (PLUS Z C2-C).  */
4291
          if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
4292
            {
4293
              rtx y = lookup_as_function (XEXP (x, 0), PLUS);
4294
              if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
4295
                return fold_rtx (plus_constant (copy_rtx (y),
4296
                                                -INTVAL (const_arg1)),
4297
                                 NULL_RTX);
4298
            }
4299
 
4300
          /* Fall through.  */
4301
 
4302
        from_plus:
4303
        case SMIN:    case SMAX:      case UMIN:    case UMAX:
4304
        case IOR:     case AND:       case XOR:
4305
        case MULT:
4306
        case ASHIFT:  case LSHIFTRT:  case ASHIFTRT:
4307
          /* If we have (<op> <reg> <const_int>) for an associative OP and REG
4308
             is known to be of similar form, we may be able to replace the
4309
             operation with a combined operation.  This may eliminate the
4310
             intermediate operation if every use is simplified in this way.
4311
             Note that the similar optimization done by combine.c only works
4312
             if the intermediate operation's result has only one reference.  */
4313
 
4314
          if (REG_P (folded_arg0)
4315
              && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
4316
            {
4317
              int is_shift
4318
                = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
4319
              rtx y, inner_const, new_const;
4320
              enum rtx_code associate_code;
4321
 
4322
              if (is_shift
4323
                  && (INTVAL (const_arg1) >= GET_MODE_BITSIZE (mode)
4324
                      || INTVAL (const_arg1) < 0))
4325
                {
4326
                  if (SHIFT_COUNT_TRUNCATED)
4327
                    const_arg1 = GEN_INT (INTVAL (const_arg1)
4328
                                          & (GET_MODE_BITSIZE (mode) - 1));
4329
                  else
4330
                    break;
4331
                }
4332
 
4333
              y = lookup_as_function (folded_arg0, code);
4334
              if (y == 0)
4335
                break;
4336
 
4337
              /* If we have compiled a statement like
4338
                 "if (x == (x & mask1))", and now are looking at
4339
                 "x & mask2", we will have a case where the first operand
4340
                 of Y is the same as our first operand.  Unless we detect
4341
                 this case, an infinite loop will result.  */
4342
              if (XEXP (y, 0) == folded_arg0)
4343
                break;
4344
 
4345
              inner_const = equiv_constant (fold_rtx (XEXP (y, 1), 0));
4346
              if (!inner_const || GET_CODE (inner_const) != CONST_INT)
4347
                break;
4348
 
4349
              /* Don't associate these operations if they are a PLUS with the
4350
                 same constant and it is a power of two.  These might be doable
4351
                 with a pre- or post-increment.  Similarly for two subtracts of
4352
                 identical powers of two with post decrement.  */
4353
 
4354
              if (code == PLUS && const_arg1 == inner_const
4355
                  && ((HAVE_PRE_INCREMENT
4356
                          && exact_log2 (INTVAL (const_arg1)) >= 0)
4357
                      || (HAVE_POST_INCREMENT
4358
                          && exact_log2 (INTVAL (const_arg1)) >= 0)
4359
                      || (HAVE_PRE_DECREMENT
4360
                          && exact_log2 (- INTVAL (const_arg1)) >= 0)
4361
                      || (HAVE_POST_DECREMENT
4362
                          && exact_log2 (- INTVAL (const_arg1)) >= 0)))
4363
                break;
4364
 
4365
              if (is_shift
4366
                  && (INTVAL (inner_const) >= GET_MODE_BITSIZE (mode)
4367
                      || INTVAL (inner_const) < 0))
4368
                {
4369
                  if (SHIFT_COUNT_TRUNCATED)
4370
                    inner_const = GEN_INT (INTVAL (inner_const)
4371
                                           & (GET_MODE_BITSIZE (mode) - 1));
4372
                  else
4373
                    break;
4374
                }
4375
 
4376
              /* Compute the code used to compose the constants.  For example,
4377
                 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS.  */
4378
 
4379
              associate_code = (is_shift || code == MINUS ? PLUS : code);
4380
 
4381
              new_const = simplify_binary_operation (associate_code, mode,
4382
                                                     const_arg1, inner_const);
4383
 
4384
              if (new_const == 0)
4385
                break;
4386
 
4387
              /* If we are associating shift operations, don't let this
4388
                 produce a shift of the size of the object or larger.
4389
                 This could occur when we follow a sign-extend by a right
4390
                 shift on a machine that does a sign-extend as a pair
4391
                 of shifts.  */
4392
 
4393
              if (is_shift
4394
                  && GET_CODE (new_const) == CONST_INT
4395
                  && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
4396
                {
4397
                  /* As an exception, we can turn an ASHIFTRT of this
4398
                     form into a shift of the number of bits - 1.  */
4399
                  if (code == ASHIFTRT)
4400
                    new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
4401
                  else if (!side_effects_p (XEXP (y, 0)))
4402
                    return CONST0_RTX (mode);
4403
                  else
4404
                    break;
4405
                }
4406
 
4407
              y = copy_rtx (XEXP (y, 0));
4408
 
4409
              /* If Y contains our first operand (the most common way this
4410
                 can happen is if Y is a MEM), we would do into an infinite
4411
                 loop if we tried to fold it.  So don't in that case.  */
4412
 
4413
              if (! reg_mentioned_p (folded_arg0, y))
4414
                y = fold_rtx (y, insn);
4415
 
4416
              return simplify_gen_binary (code, mode, y, new_const);
4417
            }
4418
          break;
4419
 
4420
        case DIV:       case UDIV:
4421
          /* ??? The associative optimization performed immediately above is
4422
             also possible for DIV and UDIV using associate_code of MULT.
4423
             However, we would need extra code to verify that the
4424
             multiplication does not overflow, that is, there is no overflow
4425
             in the calculation of new_const.  */
4426
          break;
4427
 
4428
        default:
4429
          break;
4430
        }
4431
 
4432
      new = simplify_binary_operation (code, mode,
4433
                                       const_arg0 ? const_arg0 : folded_arg0,
4434
                                       const_arg1 ? const_arg1 : folded_arg1);
4435
      break;
4436
 
4437
    case RTX_OBJ:
4438
      /* (lo_sum (high X) X) is simply X.  */
4439
      if (code == LO_SUM && const_arg0 != 0
4440
          && GET_CODE (const_arg0) == HIGH
4441
          && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
4442
        return const_arg1;
4443
      break;
4444
 
4445
    case RTX_TERNARY:
4446
    case RTX_BITFIELD_OPS:
4447
      new = simplify_ternary_operation (code, mode, mode_arg0,
4448
                                        const_arg0 ? const_arg0 : folded_arg0,
4449
                                        const_arg1 ? const_arg1 : folded_arg1,
4450
                                        const_arg2 ? const_arg2 : XEXP (x, 2));
4451
      break;
4452
 
4453
    default:
4454
      break;
4455
    }
4456
 
4457
  return new ? new : x;
4458
}
4459
 
4460
/* Return a constant value currently equivalent to X.
4461
   Return 0 if we don't know one.  */
4462
 
4463
static rtx
4464
equiv_constant (rtx x)
4465
{
4466
  if (REG_P (x)
4467
      && REGNO_QTY_VALID_P (REGNO (x)))
4468
    {
4469
      int x_q = REG_QTY (REGNO (x));
4470
      struct qty_table_elem *x_ent = &qty_table[x_q];
4471
 
4472
      if (x_ent->const_rtx)
4473
        x = gen_lowpart (GET_MODE (x), x_ent->const_rtx);
4474
    }
4475
 
4476
  if (x == 0 || CONSTANT_P (x))
4477
    return x;
4478
 
4479
  /* If X is a MEM, try to fold it outside the context of any insn to see if
4480
     it might be equivalent to a constant.  That handles the case where it
4481
     is a constant-pool reference.  Then try to look it up in the hash table
4482
     in case it is something whose value we have seen before.  */
4483
 
4484
  if (MEM_P (x))
4485
    {
4486
      struct table_elt *elt;
4487
 
4488
      x = fold_rtx (x, NULL_RTX);
4489
      if (CONSTANT_P (x))
4490
        return x;
4491
 
4492
      elt = lookup (x, SAFE_HASH (x, GET_MODE (x)), GET_MODE (x));
4493
      if (elt == 0)
4494
        return 0;
4495
 
4496
      for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
4497
        if (elt->is_const && CONSTANT_P (elt->exp))
4498
          return elt->exp;
4499
    }
4500
 
4501
  return 0;
4502
}
4503
 
4504
/* Given INSN, a jump insn, PATH_TAKEN indicates if we are following the "taken"
4505
   branch.  It will be zero if not.
4506
 
4507
   In certain cases, this can cause us to add an equivalence.  For example,
4508
   if we are following the taken case of
4509
        if (i == 2)
4510
   we can add the fact that `i' and '2' are now equivalent.
4511
 
4512
   In any case, we can record that this comparison was passed.  If the same
4513
   comparison is seen later, we will know its value.  */
4514
 
4515
static void
4516
record_jump_equiv (rtx insn, int taken)
4517
{
4518
  int cond_known_true;
4519
  rtx op0, op1;
4520
  rtx set;
4521
  enum machine_mode mode, mode0, mode1;
4522
  int reversed_nonequality = 0;
4523
  enum rtx_code code;
4524
 
4525
  /* Ensure this is the right kind of insn.  */
4526
  if (! any_condjump_p (insn))
4527
    return;
4528
  set = pc_set (insn);
4529
 
4530
  /* See if this jump condition is known true or false.  */
4531
  if (taken)
4532
    cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
4533
  else
4534
    cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
4535
 
4536
  /* Get the type of comparison being done and the operands being compared.
4537
     If we had to reverse a non-equality condition, record that fact so we
4538
     know that it isn't valid for floating-point.  */
4539
  code = GET_CODE (XEXP (SET_SRC (set), 0));
4540
  op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
4541
  op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
4542
 
4543
  code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
4544
 
4545
  /* If the mode is a MODE_CC mode, we don't know what kinds of things
4546
     are being compared, so we can't do anything with this
4547
     comparison.  */
4548
 
4549
  if (GET_MODE_CLASS (mode0) == MODE_CC)
4550
    return;
4551
 
4552
  if (! cond_known_true)
4553
    {
4554
      code = reversed_comparison_code_parts (code, op0, op1, insn);
4555
 
4556
      /* Don't remember if we can't find the inverse.  */
4557
      if (code == UNKNOWN)
4558
        return;
4559
    }
4560
 
4561
  /* The mode is the mode of the non-constant.  */
4562
  mode = mode0;
4563
  if (mode1 != VOIDmode)
4564
    mode = mode1;
4565
 
4566
  record_jump_cond (code, mode, op0, op1, reversed_nonequality);
4567
}
4568
 
4569
/* Yet another form of subreg creation.  In this case, we want something in
4570
   MODE, and we should assume OP has MODE iff it is naturally modeless.  */
4571
 
4572
static rtx
4573
record_jump_cond_subreg (enum machine_mode mode, rtx op)
4574
{
4575
  enum machine_mode op_mode = GET_MODE (op);
4576
  if (op_mode == mode || op_mode == VOIDmode)
4577
    return op;
4578
  return lowpart_subreg (mode, op, op_mode);
4579
}
4580
 
4581
/* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
4582
   REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
4583
   Make any useful entries we can with that information.  Called from
4584
   above function and called recursively.  */
4585
 
4586
static void
4587
record_jump_cond (enum rtx_code code, enum machine_mode mode, rtx op0,
4588
                  rtx op1, int reversed_nonequality)
4589
{
4590
  unsigned op0_hash, op1_hash;
4591
  int op0_in_memory, op1_in_memory;
4592
  struct table_elt *op0_elt, *op1_elt;
4593
 
4594
  /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
4595
     we know that they are also equal in the smaller mode (this is also
4596
     true for all smaller modes whether or not there is a SUBREG, but
4597
     is not worth testing for with no SUBREG).  */
4598
 
4599
  /* Note that GET_MODE (op0) may not equal MODE.  */
4600
  if (code == EQ && GET_CODE (op0) == SUBREG
4601
      && (GET_MODE_SIZE (GET_MODE (op0))
4602
          > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4603
    {
4604
      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4605
      rtx tem = record_jump_cond_subreg (inner_mode, op1);
4606
      if (tem)
4607
        record_jump_cond (code, mode, SUBREG_REG (op0), tem,
4608
                          reversed_nonequality);
4609
    }
4610
 
4611
  if (code == EQ && GET_CODE (op1) == SUBREG
4612
      && (GET_MODE_SIZE (GET_MODE (op1))
4613
          > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4614
    {
4615
      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4616
      rtx tem = record_jump_cond_subreg (inner_mode, op0);
4617
      if (tem)
4618
        record_jump_cond (code, mode, SUBREG_REG (op1), tem,
4619
                          reversed_nonequality);
4620
    }
4621
 
4622
  /* Similarly, if this is an NE comparison, and either is a SUBREG
4623
     making a smaller mode, we know the whole thing is also NE.  */
4624
 
4625
  /* Note that GET_MODE (op0) may not equal MODE;
4626
     if we test MODE instead, we can get an infinite recursion
4627
     alternating between two modes each wider than MODE.  */
4628
 
4629
  if (code == NE && GET_CODE (op0) == SUBREG
4630
      && subreg_lowpart_p (op0)
4631
      && (GET_MODE_SIZE (GET_MODE (op0))
4632
          < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4633
    {
4634
      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4635
      rtx tem = record_jump_cond_subreg (inner_mode, op1);
4636
      if (tem)
4637
        record_jump_cond (code, mode, SUBREG_REG (op0), tem,
4638
                          reversed_nonequality);
4639
    }
4640
 
4641
  if (code == NE && GET_CODE (op1) == SUBREG
4642
      && subreg_lowpart_p (op1)
4643
      && (GET_MODE_SIZE (GET_MODE (op1))
4644
          < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4645
    {
4646
      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4647
      rtx tem = record_jump_cond_subreg (inner_mode, op0);
4648
      if (tem)
4649
        record_jump_cond (code, mode, SUBREG_REG (op1), tem,
4650
                          reversed_nonequality);
4651
    }
4652
 
4653
  /* Hash both operands.  */
4654
 
4655
  do_not_record = 0;
4656
  hash_arg_in_memory = 0;
4657
  op0_hash = HASH (op0, mode);
4658
  op0_in_memory = hash_arg_in_memory;
4659
 
4660
  if (do_not_record)
4661
    return;
4662
 
4663
  do_not_record = 0;
4664
  hash_arg_in_memory = 0;
4665
  op1_hash = HASH (op1, mode);
4666
  op1_in_memory = hash_arg_in_memory;
4667
 
4668
  if (do_not_record)
4669
    return;
4670
 
4671
  /* Look up both operands.  */
4672
  op0_elt = lookup (op0, op0_hash, mode);
4673
  op1_elt = lookup (op1, op1_hash, mode);
4674
 
4675
  /* If both operands are already equivalent or if they are not in the
4676
     table but are identical, do nothing.  */
4677
  if ((op0_elt != 0 && op1_elt != 0
4678
       && op0_elt->first_same_value == op1_elt->first_same_value)
4679
      || op0 == op1 || rtx_equal_p (op0, op1))
4680
    return;
4681
 
4682
  /* If we aren't setting two things equal all we can do is save this
4683
     comparison.   Similarly if this is floating-point.  In the latter
4684
     case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4685
     If we record the equality, we might inadvertently delete code
4686
     whose intent was to change -0 to +0.  */
4687
 
4688
  if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4689
    {
4690
      struct qty_table_elem *ent;
4691
      int qty;
4692
 
4693
      /* If we reversed a floating-point comparison, if OP0 is not a
4694
         register, or if OP1 is neither a register or constant, we can't
4695
         do anything.  */
4696
 
4697
      if (!REG_P (op1))
4698
        op1 = equiv_constant (op1);
4699
 
4700
      if ((reversed_nonequality && FLOAT_MODE_P (mode))
4701
          || !REG_P (op0) || op1 == 0)
4702
        return;
4703
 
4704
      /* Put OP0 in the hash table if it isn't already.  This gives it a
4705
         new quantity number.  */
4706
      if (op0_elt == 0)
4707
        {
4708
          if (insert_regs (op0, NULL, 0))
4709
            {
4710
              rehash_using_reg (op0);
4711
              op0_hash = HASH (op0, mode);
4712
 
4713
              /* If OP0 is contained in OP1, this changes its hash code
4714
                 as well.  Faster to rehash than to check, except
4715
                 for the simple case of a constant.  */
4716
              if (! CONSTANT_P (op1))
4717
                op1_hash = HASH (op1,mode);
4718
            }
4719
 
4720
          op0_elt = insert (op0, NULL, op0_hash, mode);
4721
          op0_elt->in_memory = op0_in_memory;
4722
        }
4723
 
4724
      qty = REG_QTY (REGNO (op0));
4725
      ent = &qty_table[qty];
4726
 
4727
      ent->comparison_code = code;
4728
      if (REG_P (op1))
4729
        {
4730
          /* Look it up again--in case op0 and op1 are the same.  */
4731
          op1_elt = lookup (op1, op1_hash, mode);
4732
 
4733
          /* Put OP1 in the hash table so it gets a new quantity number.  */
4734
          if (op1_elt == 0)
4735
            {
4736
              if (insert_regs (op1, NULL, 0))
4737
                {
4738
                  rehash_using_reg (op1);
4739
                  op1_hash = HASH (op1, mode);
4740
                }
4741
 
4742
              op1_elt = insert (op1, NULL, op1_hash, mode);
4743
              op1_elt->in_memory = op1_in_memory;
4744
            }
4745
 
4746
          ent->comparison_const = NULL_RTX;
4747
          ent->comparison_qty = REG_QTY (REGNO (op1));
4748
        }
4749
      else
4750
        {
4751
          ent->comparison_const = op1;
4752
          ent->comparison_qty = -1;
4753
        }
4754
 
4755
      return;
4756
    }
4757
 
4758
  /* If either side is still missing an equivalence, make it now,
4759
     then merge the equivalences.  */
4760
 
4761
  if (op0_elt == 0)
4762
    {
4763
      if (insert_regs (op0, NULL, 0))
4764
        {
4765
          rehash_using_reg (op0);
4766
          op0_hash = HASH (op0, mode);
4767
        }
4768
 
4769
      op0_elt = insert (op0, NULL, op0_hash, mode);
4770
      op0_elt->in_memory = op0_in_memory;
4771
    }
4772
 
4773
  if (op1_elt == 0)
4774
    {
4775
      if (insert_regs (op1, NULL, 0))
4776
        {
4777
          rehash_using_reg (op1);
4778
          op1_hash = HASH (op1, mode);
4779
        }
4780
 
4781
      op1_elt = insert (op1, NULL, op1_hash, mode);
4782
      op1_elt->in_memory = op1_in_memory;
4783
    }
4784
 
4785
  merge_equiv_classes (op0_elt, op1_elt);
4786
}
4787
 
4788
/* CSE processing for one instruction.
4789
   First simplify sources and addresses of all assignments
4790
   in the instruction, using previously-computed equivalents values.
4791
   Then install the new sources and destinations in the table
4792
   of available values.
4793
 
4794
   If LIBCALL_INSN is nonzero, don't record any equivalence made in
4795
   the insn.  It means that INSN is inside libcall block.  In this
4796
   case LIBCALL_INSN is the corresponding insn with REG_LIBCALL.  */
4797
 
4798
/* Data on one SET contained in the instruction.  */
4799
 
4800
struct set
4801
{
4802
  /* The SET rtx itself.  */
4803
  rtx rtl;
4804
  /* The SET_SRC of the rtx (the original value, if it is changing).  */
4805
  rtx src;
4806
  /* The hash-table element for the SET_SRC of the SET.  */
4807
  struct table_elt *src_elt;
4808
  /* Hash value for the SET_SRC.  */
4809
  unsigned src_hash;
4810
  /* Hash value for the SET_DEST.  */
4811
  unsigned dest_hash;
4812
  /* The SET_DEST, with SUBREG, etc., stripped.  */
4813
  rtx inner_dest;
4814
  /* Nonzero if the SET_SRC is in memory.  */
4815
  char src_in_memory;
4816
  /* Nonzero if the SET_SRC contains something
4817
     whose value cannot be predicted and understood.  */
4818
  char src_volatile;
4819
  /* Original machine mode, in case it becomes a CONST_INT.
4820
     The size of this field should match the size of the mode
4821
     field of struct rtx_def (see rtl.h).  */
4822
  ENUM_BITFIELD(machine_mode) mode : 8;
4823
  /* A constant equivalent for SET_SRC, if any.  */
4824
  rtx src_const;
4825
  /* Original SET_SRC value used for libcall notes.  */
4826
  rtx orig_src;
4827
  /* Hash value of constant equivalent for SET_SRC.  */
4828
  unsigned src_const_hash;
4829
  /* Table entry for constant equivalent for SET_SRC, if any.  */
4830
  struct table_elt *src_const_elt;
4831
  /* Table entry for the destination address.  */
4832
  struct table_elt *dest_addr_elt;
4833
};
4834
 
4835
static void
4836
cse_insn (rtx insn, rtx libcall_insn)
4837
{
4838
  rtx x = PATTERN (insn);
4839
  int i;
4840
  rtx tem;
4841
  int n_sets = 0;
4842
 
4843
#ifdef HAVE_cc0
4844
  /* Records what this insn does to set CC0.  */
4845
  rtx this_insn_cc0 = 0;
4846
  enum machine_mode this_insn_cc0_mode = VOIDmode;
4847
#endif
4848
 
4849
  rtx src_eqv = 0;
4850
  struct table_elt *src_eqv_elt = 0;
4851
  int src_eqv_volatile = 0;
4852
  int src_eqv_in_memory = 0;
4853
  unsigned src_eqv_hash = 0;
4854
 
4855
  struct set *sets = (struct set *) 0;
4856
 
4857
  this_insn = insn;
4858
 
4859
  /* Find all the SETs and CLOBBERs in this instruction.
4860
     Record all the SETs in the array `set' and count them.
4861
     Also determine whether there is a CLOBBER that invalidates
4862
     all memory references, or all references at varying addresses.  */
4863
 
4864
  if (CALL_P (insn))
4865
    {
4866
      for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4867
        {
4868
          if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4869
            invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4870
          XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4871
        }
4872
    }
4873
 
4874
  if (GET_CODE (x) == SET)
4875
    {
4876
      sets = alloca (sizeof (struct set));
4877
      sets[0].rtl = x;
4878
 
4879
      /* Ignore SETs that are unconditional jumps.
4880
         They never need cse processing, so this does not hurt.
4881
         The reason is not efficiency but rather
4882
         so that we can test at the end for instructions
4883
         that have been simplified to unconditional jumps
4884
         and not be misled by unchanged instructions
4885
         that were unconditional jumps to begin with.  */
4886
      if (SET_DEST (x) == pc_rtx
4887
          && GET_CODE (SET_SRC (x)) == LABEL_REF)
4888
        ;
4889
 
4890
      /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4891
         The hard function value register is used only once, to copy to
4892
         someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4893
         Ensure we invalidate the destination register.  On the 80386 no
4894
         other code would invalidate it since it is a fixed_reg.
4895
         We need not check the return of apply_change_group; see canon_reg.  */
4896
 
4897
      else if (GET_CODE (SET_SRC (x)) == CALL)
4898
        {
4899
          canon_reg (SET_SRC (x), insn);
4900
          apply_change_group ();
4901
          fold_rtx (SET_SRC (x), insn);
4902
          invalidate (SET_DEST (x), VOIDmode);
4903
        }
4904
      else
4905
        n_sets = 1;
4906
    }
4907
  else if (GET_CODE (x) == PARALLEL)
4908
    {
4909
      int lim = XVECLEN (x, 0);
4910
 
4911
      sets = alloca (lim * sizeof (struct set));
4912
 
4913
      /* Find all regs explicitly clobbered in this insn,
4914
         and ensure they are not replaced with any other regs
4915
         elsewhere in this insn.
4916
         When a reg that is clobbered is also used for input,
4917
         we should presume that that is for a reason,
4918
         and we should not substitute some other register
4919
         which is not supposed to be clobbered.
4920
         Therefore, this loop cannot be merged into the one below
4921
         because a CALL may precede a CLOBBER and refer to the
4922
         value clobbered.  We must not let a canonicalization do
4923
         anything in that case.  */
4924
      for (i = 0; i < lim; i++)
4925
        {
4926
          rtx y = XVECEXP (x, 0, i);
4927
          if (GET_CODE (y) == CLOBBER)
4928
            {
4929
              rtx clobbered = XEXP (y, 0);
4930
 
4931
              if (REG_P (clobbered)
4932
                  || GET_CODE (clobbered) == SUBREG)
4933
                invalidate (clobbered, VOIDmode);
4934
              else if (GET_CODE (clobbered) == STRICT_LOW_PART
4935
                       || GET_CODE (clobbered) == ZERO_EXTRACT)
4936
                invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4937
            }
4938
        }
4939
 
4940
      for (i = 0; i < lim; i++)
4941
        {
4942
          rtx y = XVECEXP (x, 0, i);
4943
          if (GET_CODE (y) == SET)
4944
            {
4945
              /* As above, we ignore unconditional jumps and call-insns and
4946
                 ignore the result of apply_change_group.  */
4947
              if (GET_CODE (SET_SRC (y)) == CALL)
4948
                {
4949
                  canon_reg (SET_SRC (y), insn);
4950
                  apply_change_group ();
4951
                  fold_rtx (SET_SRC (y), insn);
4952
                  invalidate (SET_DEST (y), VOIDmode);
4953
                }
4954
              else if (SET_DEST (y) == pc_rtx
4955
                       && GET_CODE (SET_SRC (y)) == LABEL_REF)
4956
                ;
4957
              else
4958
                sets[n_sets++].rtl = y;
4959
            }
4960
          else if (GET_CODE (y) == CLOBBER)
4961
            {
4962
              /* If we clobber memory, canon the address.
4963
                 This does nothing when a register is clobbered
4964
                 because we have already invalidated the reg.  */
4965
              if (MEM_P (XEXP (y, 0)))
4966
                canon_reg (XEXP (y, 0), NULL_RTX);
4967
            }
4968
          else if (GET_CODE (y) == USE
4969
                   && ! (REG_P (XEXP (y, 0))
4970
                         && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4971
            canon_reg (y, NULL_RTX);
4972
          else if (GET_CODE (y) == CALL)
4973
            {
4974
              /* The result of apply_change_group can be ignored; see
4975
                 canon_reg.  */
4976
              canon_reg (y, insn);
4977
              apply_change_group ();
4978
              fold_rtx (y, insn);
4979
            }
4980
        }
4981
    }
4982
  else if (GET_CODE (x) == CLOBBER)
4983
    {
4984
      if (MEM_P (XEXP (x, 0)))
4985
        canon_reg (XEXP (x, 0), NULL_RTX);
4986
    }
4987
 
4988
  /* Canonicalize a USE of a pseudo register or memory location.  */
4989
  else if (GET_CODE (x) == USE
4990
           && ! (REG_P (XEXP (x, 0))
4991
                 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4992
    canon_reg (XEXP (x, 0), NULL_RTX);
4993
  else if (GET_CODE (x) == CALL)
4994
    {
4995
      /* The result of apply_change_group can be ignored; see canon_reg.  */
4996
      canon_reg (x, insn);
4997
      apply_change_group ();
4998
      fold_rtx (x, insn);
4999
    }
5000
 
5001
  /* Store the equivalent value in SRC_EQV, if different, or if the DEST
5002
     is a STRICT_LOW_PART.  The latter condition is necessary because SRC_EQV
5003
     is handled specially for this case, and if it isn't set, then there will
5004
     be no equivalence for the destination.  */
5005
  if (n_sets == 1 && REG_NOTES (insn) != 0
5006
      && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
5007
      && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
5008
          || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
5009
    {
5010
      src_eqv = fold_rtx (canon_reg (XEXP (tem, 0), NULL_RTX), insn);
5011
      XEXP (tem, 0) = src_eqv;
5012
    }
5013
 
5014
  /* Canonicalize sources and addresses of destinations.
5015
     We do this in a separate pass to avoid problems when a MATCH_DUP is
5016
     present in the insn pattern.  In that case, we want to ensure that
5017
     we don't break the duplicate nature of the pattern.  So we will replace
5018
     both operands at the same time.  Otherwise, we would fail to find an
5019
     equivalent substitution in the loop calling validate_change below.
5020
 
5021
     We used to suppress canonicalization of DEST if it appears in SRC,
5022
     but we don't do this any more.  */
5023
 
5024
  for (i = 0; i < n_sets; i++)
5025
    {
5026
      rtx dest = SET_DEST (sets[i].rtl);
5027
      rtx src = SET_SRC (sets[i].rtl);
5028
      rtx new = canon_reg (src, insn);
5029
 
5030
      sets[i].orig_src = src;
5031
      validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
5032
 
5033
      if (GET_CODE (dest) == ZERO_EXTRACT)
5034
        {
5035
          validate_change (insn, &XEXP (dest, 1),
5036
                           canon_reg (XEXP (dest, 1), insn), 1);
5037
          validate_change (insn, &XEXP (dest, 2),
5038
                           canon_reg (XEXP (dest, 2), insn), 1);
5039
        }
5040
 
5041
      while (GET_CODE (dest) == SUBREG
5042
             || GET_CODE (dest) == ZERO_EXTRACT
5043
             || GET_CODE (dest) == STRICT_LOW_PART)
5044
        dest = XEXP (dest, 0);
5045
 
5046
      if (MEM_P (dest))
5047
        canon_reg (dest, insn);
5048
    }
5049
 
5050
  /* Now that we have done all the replacements, we can apply the change
5051
     group and see if they all work.  Note that this will cause some
5052
     canonicalizations that would have worked individually not to be applied
5053
     because some other canonicalization didn't work, but this should not
5054
     occur often.
5055
 
5056
     The result of apply_change_group can be ignored; see canon_reg.  */
5057
 
5058
  apply_change_group ();
5059
 
5060
  /* Set sets[i].src_elt to the class each source belongs to.
5061
     Detect assignments from or to volatile things
5062
     and set set[i] to zero so they will be ignored
5063
     in the rest of this function.
5064
 
5065
     Nothing in this loop changes the hash table or the register chains.  */
5066
 
5067
  for (i = 0; i < n_sets; i++)
5068
    {
5069
      rtx src, dest;
5070
      rtx src_folded;
5071
      struct table_elt *elt = 0, *p;
5072
      enum machine_mode mode;
5073
      rtx src_eqv_here;
5074
      rtx src_const = 0;
5075
      rtx src_related = 0;
5076
      struct table_elt *src_const_elt = 0;
5077
      int src_cost = MAX_COST;
5078
      int src_eqv_cost = MAX_COST;
5079
      int src_folded_cost = MAX_COST;
5080
      int src_related_cost = MAX_COST;
5081
      int src_elt_cost = MAX_COST;
5082
      int src_regcost = MAX_COST;
5083
      int src_eqv_regcost = MAX_COST;
5084
      int src_folded_regcost = MAX_COST;
5085
      int src_related_regcost = MAX_COST;
5086
      int src_elt_regcost = MAX_COST;
5087
      /* Set nonzero if we need to call force_const_mem on with the
5088
         contents of src_folded before using it.  */
5089
      int src_folded_force_flag = 0;
5090
 
5091
      dest = SET_DEST (sets[i].rtl);
5092
      src = SET_SRC (sets[i].rtl);
5093
 
5094
      /* If SRC is a constant that has no machine mode,
5095
         hash it with the destination's machine mode.
5096
         This way we can keep different modes separate.  */
5097
 
5098
      mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5099
      sets[i].mode = mode;
5100
 
5101
      if (src_eqv)
5102
        {
5103
          enum machine_mode eqvmode = mode;
5104
          if (GET_CODE (dest) == STRICT_LOW_PART)
5105
            eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5106
          do_not_record = 0;
5107
          hash_arg_in_memory = 0;
5108
          src_eqv_hash = HASH (src_eqv, eqvmode);
5109
 
5110
          /* Find the equivalence class for the equivalent expression.  */
5111
 
5112
          if (!do_not_record)
5113
            src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
5114
 
5115
          src_eqv_volatile = do_not_record;
5116
          src_eqv_in_memory = hash_arg_in_memory;
5117
        }
5118
 
5119
      /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
5120
         value of the INNER register, not the destination.  So it is not
5121
         a valid substitution for the source.  But save it for later.  */
5122
      if (GET_CODE (dest) == STRICT_LOW_PART)
5123
        src_eqv_here = 0;
5124
      else
5125
        src_eqv_here = src_eqv;
5126
 
5127
      /* Simplify and foldable subexpressions in SRC.  Then get the fully-
5128
         simplified result, which may not necessarily be valid.  */
5129
      src_folded = fold_rtx (src, insn);
5130
 
5131
#if 0
5132
      /* ??? This caused bad code to be generated for the m68k port with -O2.
5133
         Suppose src is (CONST_INT -1), and that after truncation src_folded
5134
         is (CONST_INT 3).  Suppose src_folded is then used for src_const.
5135
         At the end we will add src and src_const to the same equivalence
5136
         class.  We now have 3 and -1 on the same equivalence class.  This
5137
         causes later instructions to be mis-optimized.  */
5138
      /* If storing a constant in a bitfield, pre-truncate the constant
5139
         so we will be able to record it later.  */
5140
      if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
5141
        {
5142
          rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5143
 
5144
          if (GET_CODE (src) == CONST_INT
5145
              && GET_CODE (width) == CONST_INT
5146
              && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5147
              && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5148
            src_folded
5149
              = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
5150
                                          << INTVAL (width)) - 1));
5151
        }
5152
#endif
5153
 
5154
      /* Compute SRC's hash code, and also notice if it
5155
         should not be recorded at all.  In that case,
5156
         prevent any further processing of this assignment.  */
5157
      do_not_record = 0;
5158
      hash_arg_in_memory = 0;
5159
 
5160
      sets[i].src = src;
5161
      sets[i].src_hash = HASH (src, mode);
5162
      sets[i].src_volatile = do_not_record;
5163
      sets[i].src_in_memory = hash_arg_in_memory;
5164
 
5165
      /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
5166
         a pseudo, do not record SRC.  Using SRC as a replacement for
5167
         anything else will be incorrect in that situation.  Note that
5168
         this usually occurs only for stack slots, in which case all the
5169
         RTL would be referring to SRC, so we don't lose any optimization
5170
         opportunities by not having SRC in the hash table.  */
5171
 
5172
      if (MEM_P (src)
5173
          && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
5174
          && REG_P (dest)
5175
          && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
5176
        sets[i].src_volatile = 1;
5177
 
5178
#if 0
5179
      /* It is no longer clear why we used to do this, but it doesn't
5180
         appear to still be needed.  So let's try without it since this
5181
         code hurts cse'ing widened ops.  */
5182
      /* If source is a paradoxical subreg (such as QI treated as an SI),
5183
         treat it as volatile.  It may do the work of an SI in one context
5184
         where the extra bits are not being used, but cannot replace an SI
5185
         in general.  */
5186
      if (GET_CODE (src) == SUBREG
5187
          && (GET_MODE_SIZE (GET_MODE (src))
5188
              > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
5189
        sets[i].src_volatile = 1;
5190
#endif
5191
 
5192
      /* Locate all possible equivalent forms for SRC.  Try to replace
5193
         SRC in the insn with each cheaper equivalent.
5194
 
5195
         We have the following types of equivalents: SRC itself, a folded
5196
         version, a value given in a REG_EQUAL note, or a value related
5197
         to a constant.
5198
 
5199
         Each of these equivalents may be part of an additional class
5200
         of equivalents (if more than one is in the table, they must be in
5201
         the same class; we check for this).
5202
 
5203
         If the source is volatile, we don't do any table lookups.
5204
 
5205
         We note any constant equivalent for possible later use in a
5206
         REG_NOTE.  */
5207
 
5208
      if (!sets[i].src_volatile)
5209
        elt = lookup (src, sets[i].src_hash, mode);
5210
 
5211
      sets[i].src_elt = elt;
5212
 
5213
      if (elt && src_eqv_here && src_eqv_elt)
5214
        {
5215
          if (elt->first_same_value != src_eqv_elt->first_same_value)
5216
            {
5217
              /* The REG_EQUAL is indicating that two formerly distinct
5218
                 classes are now equivalent.  So merge them.  */
5219
              merge_equiv_classes (elt, src_eqv_elt);
5220
              src_eqv_hash = HASH (src_eqv, elt->mode);
5221
              src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
5222
            }
5223
 
5224
          src_eqv_here = 0;
5225
        }
5226
 
5227
      else if (src_eqv_elt)
5228
        elt = src_eqv_elt;
5229
 
5230
      /* Try to find a constant somewhere and record it in `src_const'.
5231
         Record its table element, if any, in `src_const_elt'.  Look in
5232
         any known equivalences first.  (If the constant is not in the
5233
         table, also set `sets[i].src_const_hash').  */
5234
      if (elt)
5235
        for (p = elt->first_same_value; p; p = p->next_same_value)
5236
          if (p->is_const)
5237
            {
5238
              src_const = p->exp;
5239
              src_const_elt = elt;
5240
              break;
5241
            }
5242
 
5243
      if (src_const == 0
5244
          && (CONSTANT_P (src_folded)
5245
              /* Consider (minus (label_ref L1) (label_ref L2)) as
5246
                 "constant" here so we will record it. This allows us
5247
                 to fold switch statements when an ADDR_DIFF_VEC is used.  */
5248
              || (GET_CODE (src_folded) == MINUS
5249
                  && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
5250
                  && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
5251
        src_const = src_folded, src_const_elt = elt;
5252
      else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
5253
        src_const = src_eqv_here, src_const_elt = src_eqv_elt;
5254
 
5255
      /* If we don't know if the constant is in the table, get its
5256
         hash code and look it up.  */
5257
      if (src_const && src_const_elt == 0)
5258
        {
5259
          sets[i].src_const_hash = HASH (src_const, mode);
5260
          src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
5261
        }
5262
 
5263
      sets[i].src_const = src_const;
5264
      sets[i].src_const_elt = src_const_elt;
5265
 
5266
      /* If the constant and our source are both in the table, mark them as
5267
         equivalent.  Otherwise, if a constant is in the table but the source
5268
         isn't, set ELT to it.  */
5269
      if (src_const_elt && elt
5270
          && src_const_elt->first_same_value != elt->first_same_value)
5271
        merge_equiv_classes (elt, src_const_elt);
5272
      else if (src_const_elt && elt == 0)
5273
        elt = src_const_elt;
5274
 
5275
      /* See if there is a register linearly related to a constant
5276
         equivalent of SRC.  */
5277
      if (src_const
5278
          && (GET_CODE (src_const) == CONST
5279
              || (src_const_elt && src_const_elt->related_value != 0)))
5280
        {
5281
          src_related = use_related_value (src_const, src_const_elt);
5282
          if (src_related)
5283
            {
5284
              struct table_elt *src_related_elt
5285
                = lookup (src_related, HASH (src_related, mode), mode);
5286
              if (src_related_elt && elt)
5287
                {
5288
                  if (elt->first_same_value
5289
                      != src_related_elt->first_same_value)
5290
                    /* This can occur when we previously saw a CONST
5291
                       involving a SYMBOL_REF and then see the SYMBOL_REF
5292
                       twice.  Merge the involved classes.  */
5293
                    merge_equiv_classes (elt, src_related_elt);
5294
 
5295
                  src_related = 0;
5296
                  src_related_elt = 0;
5297
                }
5298
              else if (src_related_elt && elt == 0)
5299
                elt = src_related_elt;
5300
            }
5301
        }
5302
 
5303
      /* See if we have a CONST_INT that is already in a register in a
5304
         wider mode.  */
5305
 
5306
      if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
5307
          && GET_MODE_CLASS (mode) == MODE_INT
5308
          && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
5309
        {
5310
          enum machine_mode wider_mode;
5311
 
5312
          for (wider_mode = GET_MODE_WIDER_MODE (mode);
5313
               GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
5314
               && src_related == 0;
5315
               wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5316
            {
5317
              struct table_elt *const_elt
5318
                = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
5319
 
5320
              if (const_elt == 0)
5321
                continue;
5322
 
5323
              for (const_elt = const_elt->first_same_value;
5324
                   const_elt; const_elt = const_elt->next_same_value)
5325
                if (REG_P (const_elt->exp))
5326
                  {
5327
                    src_related = gen_lowpart (mode,
5328
                                                           const_elt->exp);
5329
                    break;
5330
                  }
5331
            }
5332
        }
5333
 
5334
      /* Another possibility is that we have an AND with a constant in
5335
         a mode narrower than a word.  If so, it might have been generated
5336
         as part of an "if" which would narrow the AND.  If we already
5337
         have done the AND in a wider mode, we can use a SUBREG of that
5338
         value.  */
5339
 
5340
      if (flag_expensive_optimizations && ! src_related
5341
          && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
5342
          && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5343
        {
5344
          enum machine_mode tmode;
5345
          rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
5346
 
5347
          for (tmode = GET_MODE_WIDER_MODE (mode);
5348
               GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5349
               tmode = GET_MODE_WIDER_MODE (tmode))
5350
            {
5351
              rtx inner = gen_lowpart (tmode, XEXP (src, 0));
5352
              struct table_elt *larger_elt;
5353
 
5354
              if (inner)
5355
                {
5356
                  PUT_MODE (new_and, tmode);
5357
                  XEXP (new_and, 0) = inner;
5358
                  larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
5359
                  if (larger_elt == 0)
5360
                    continue;
5361
 
5362
                  for (larger_elt = larger_elt->first_same_value;
5363
                       larger_elt; larger_elt = larger_elt->next_same_value)
5364
                    if (REG_P (larger_elt->exp))
5365
                      {
5366
                        src_related
5367
                          = gen_lowpart (mode, larger_elt->exp);
5368
                        break;
5369
                      }
5370
 
5371
                  if (src_related)
5372
                    break;
5373
                }
5374
            }
5375
        }
5376
 
5377
#ifdef LOAD_EXTEND_OP
5378
      /* See if a MEM has already been loaded with a widening operation;
5379
         if it has, we can use a subreg of that.  Many CISC machines
5380
         also have such operations, but this is only likely to be
5381
         beneficial on these machines.  */
5382
 
5383
      if (flag_expensive_optimizations && src_related == 0
5384
          && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5385
          && GET_MODE_CLASS (mode) == MODE_INT
5386
          && MEM_P (src) && ! do_not_record
5387
          && LOAD_EXTEND_OP (mode) != UNKNOWN)
5388
        {
5389
          struct rtx_def memory_extend_buf;
5390
          rtx memory_extend_rtx = &memory_extend_buf;
5391
          enum machine_mode tmode;
5392
 
5393
          /* Set what we are trying to extend and the operation it might
5394
             have been extended with.  */
5395
          memset (memory_extend_rtx, 0, sizeof(*memory_extend_rtx));
5396
          PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
5397
          XEXP (memory_extend_rtx, 0) = src;
5398
 
5399
          for (tmode = GET_MODE_WIDER_MODE (mode);
5400
               GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5401
               tmode = GET_MODE_WIDER_MODE (tmode))
5402
            {
5403
              struct table_elt *larger_elt;
5404
 
5405
              PUT_MODE (memory_extend_rtx, tmode);
5406
              larger_elt = lookup (memory_extend_rtx,
5407
                                   HASH (memory_extend_rtx, tmode), tmode);
5408
              if (larger_elt == 0)
5409
                continue;
5410
 
5411
              for (larger_elt = larger_elt->first_same_value;
5412
                   larger_elt; larger_elt = larger_elt->next_same_value)
5413
                if (REG_P (larger_elt->exp))
5414
                  {
5415
                    src_related = gen_lowpart (mode,
5416
                                                           larger_elt->exp);
5417
                    break;
5418
                  }
5419
 
5420
              if (src_related)
5421
                break;
5422
            }
5423
        }
5424
#endif /* LOAD_EXTEND_OP */
5425
 
5426
      if (src == src_folded)
5427
        src_folded = 0;
5428
 
5429
      /* At this point, ELT, if nonzero, points to a class of expressions
5430
         equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5431
         and SRC_RELATED, if nonzero, each contain additional equivalent
5432
         expressions.  Prune these latter expressions by deleting expressions
5433
         already in the equivalence class.
5434
 
5435
         Check for an equivalent identical to the destination.  If found,
5436
         this is the preferred equivalent since it will likely lead to
5437
         elimination of the insn.  Indicate this by placing it in
5438
         `src_related'.  */
5439
 
5440
      if (elt)
5441
        elt = elt->first_same_value;
5442
      for (p = elt; p; p = p->next_same_value)
5443
        {
5444
          enum rtx_code code = GET_CODE (p->exp);
5445
 
5446
          /* If the expression is not valid, ignore it.  Then we do not
5447
             have to check for validity below.  In most cases, we can use
5448
             `rtx_equal_p', since canonicalization has already been done.  */
5449
          if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, false))
5450
            continue;
5451
 
5452
          /* Also skip paradoxical subregs, unless that's what we're
5453
             looking for.  */
5454
          if (code == SUBREG
5455
              && (GET_MODE_SIZE (GET_MODE (p->exp))
5456
                  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
5457
              && ! (src != 0
5458
                    && GET_CODE (src) == SUBREG
5459
                    && GET_MODE (src) == GET_MODE (p->exp)
5460
                    && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5461
                        < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
5462
            continue;
5463
 
5464
          if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5465
            src = 0;
5466
          else if (src_folded && GET_CODE (src_folded) == code
5467
                   && rtx_equal_p (src_folded, p->exp))
5468
            src_folded = 0;
5469
          else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5470
                   && rtx_equal_p (src_eqv_here, p->exp))
5471
            src_eqv_here = 0;
5472
          else if (src_related && GET_CODE (src_related) == code
5473
                   && rtx_equal_p (src_related, p->exp))
5474
            src_related = 0;
5475
 
5476
          /* This is the same as the destination of the insns, we want
5477
             to prefer it.  Copy it to src_related.  The code below will
5478
             then give it a negative cost.  */
5479
          if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5480
            src_related = dest;
5481
        }
5482
 
5483
      /* Find the cheapest valid equivalent, trying all the available
5484
         possibilities.  Prefer items not in the hash table to ones
5485
         that are when they are equal cost.  Note that we can never
5486
         worsen an insn as the current contents will also succeed.
5487
         If we find an equivalent identical to the destination, use it as best,
5488
         since this insn will probably be eliminated in that case.  */
5489
      if (src)
5490
        {
5491
          if (rtx_equal_p (src, dest))
5492
            src_cost = src_regcost = -1;
5493
          else
5494
            {
5495
              src_cost = COST (src);
5496
              src_regcost = approx_reg_cost (src);
5497
            }
5498
        }
5499
 
5500
      if (src_eqv_here)
5501
        {
5502
          if (rtx_equal_p (src_eqv_here, dest))
5503
            src_eqv_cost = src_eqv_regcost = -1;
5504
          else
5505
            {
5506
              src_eqv_cost = COST (src_eqv_here);
5507
              src_eqv_regcost = approx_reg_cost (src_eqv_here);
5508
            }
5509
        }
5510
 
5511
      if (src_folded)
5512
        {
5513
          if (rtx_equal_p (src_folded, dest))
5514
            src_folded_cost = src_folded_regcost = -1;
5515
          else
5516
            {
5517
              src_folded_cost = COST (src_folded);
5518
              src_folded_regcost = approx_reg_cost (src_folded);
5519
            }
5520
        }
5521
 
5522
      if (src_related)
5523
        {
5524
          if (rtx_equal_p (src_related, dest))
5525
            src_related_cost = src_related_regcost = -1;
5526
          else
5527
            {
5528
              src_related_cost = COST (src_related);
5529
              src_related_regcost = approx_reg_cost (src_related);
5530
            }
5531
        }
5532
 
5533
      /* If this was an indirect jump insn, a known label will really be
5534
         cheaper even though it looks more expensive.  */
5535
      if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5536
        src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
5537
 
5538
      /* Terminate loop when replacement made.  This must terminate since
5539
         the current contents will be tested and will always be valid.  */
5540
      while (1)
5541
        {
5542
          rtx trial;
5543
 
5544
          /* Skip invalid entries.  */
5545
          while (elt && !REG_P (elt->exp)
5546
                 && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
5547
            elt = elt->next_same_value;
5548
 
5549
          /* A paradoxical subreg would be bad here: it'll be the right
5550
             size, but later may be adjusted so that the upper bits aren't
5551
             what we want.  So reject it.  */
5552
          if (elt != 0
5553
              && GET_CODE (elt->exp) == SUBREG
5554
              && (GET_MODE_SIZE (GET_MODE (elt->exp))
5555
                  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
5556
              /* It is okay, though, if the rtx we're trying to match
5557
                 will ignore any of the bits we can't predict.  */
5558
              && ! (src != 0
5559
                    && GET_CODE (src) == SUBREG
5560
                    && GET_MODE (src) == GET_MODE (elt->exp)
5561
                    && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5562
                        < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5563
            {
5564
              elt = elt->next_same_value;
5565
              continue;
5566
            }
5567
 
5568
          if (elt)
5569
            {
5570
              src_elt_cost = elt->cost;
5571
              src_elt_regcost = elt->regcost;
5572
            }
5573
 
5574
          /* Find cheapest and skip it for the next time.   For items
5575
             of equal cost, use this order:
5576
             src_folded, src, src_eqv, src_related and hash table entry.  */
5577
          if (src_folded
5578
              && preferable (src_folded_cost, src_folded_regcost,
5579
                             src_cost, src_regcost) <= 0
5580
              && preferable (src_folded_cost, src_folded_regcost,
5581
                             src_eqv_cost, src_eqv_regcost) <= 0
5582
              && preferable (src_folded_cost, src_folded_regcost,
5583
                             src_related_cost, src_related_regcost) <= 0
5584
              && preferable (src_folded_cost, src_folded_regcost,
5585
                             src_elt_cost, src_elt_regcost) <= 0)
5586
            {
5587
              trial = src_folded, src_folded_cost = MAX_COST;
5588
              if (src_folded_force_flag)
5589
                {
5590
                  rtx forced = force_const_mem (mode, trial);
5591
                  if (forced)
5592
                    trial = forced;
5593
                }
5594
            }
5595
          else if (src
5596
                   && preferable (src_cost, src_regcost,
5597
                                  src_eqv_cost, src_eqv_regcost) <= 0
5598
                   && preferable (src_cost, src_regcost,
5599
                                  src_related_cost, src_related_regcost) <= 0
5600
                   && preferable (src_cost, src_regcost,
5601
                                  src_elt_cost, src_elt_regcost) <= 0)
5602
            trial = src, src_cost = MAX_COST;
5603
          else if (src_eqv_here
5604
                   && preferable (src_eqv_cost, src_eqv_regcost,
5605
                                  src_related_cost, src_related_regcost) <= 0
5606
                   && preferable (src_eqv_cost, src_eqv_regcost,
5607
                                  src_elt_cost, src_elt_regcost) <= 0)
5608
            trial = copy_rtx (src_eqv_here), src_eqv_cost = MAX_COST;
5609
          else if (src_related
5610
                   && preferable (src_related_cost, src_related_regcost,
5611
                                  src_elt_cost, src_elt_regcost) <= 0)
5612
            trial = copy_rtx (src_related), src_related_cost = MAX_COST;
5613
          else
5614
            {
5615
              trial = copy_rtx (elt->exp);
5616
              elt = elt->next_same_value;
5617
              src_elt_cost = MAX_COST;
5618
            }
5619
 
5620
          /* We don't normally have an insn matching (set (pc) (pc)), so
5621
             check for this separately here.  We will delete such an
5622
             insn below.
5623
 
5624
             For other cases such as a table jump or conditional jump
5625
             where we know the ultimate target, go ahead and replace the
5626
             operand.  While that may not make a valid insn, we will
5627
             reemit the jump below (and also insert any necessary
5628
             barriers).  */
5629
          if (n_sets == 1 && dest == pc_rtx
5630
              && (trial == pc_rtx
5631
                  || (GET_CODE (trial) == LABEL_REF
5632
                      && ! condjump_p (insn))))
5633
            {
5634
              /* Don't substitute non-local labels, this confuses CFG.  */
5635
              if (GET_CODE (trial) == LABEL_REF
5636
                  && LABEL_REF_NONLOCAL_P (trial))
5637
                continue;
5638
 
5639
              SET_SRC (sets[i].rtl) = trial;
5640
              cse_jumps_altered = 1;
5641
              break;
5642
            }
5643
 
5644
          /* Reject certain invalid forms of CONST that we create.  */
5645
          else if (CONSTANT_P (trial)
5646
                   && GET_CODE (trial) == CONST
5647
                   /* Reject cases that will cause decode_rtx_const to
5648
                      die.  On the alpha when simplifying a switch, we
5649
                      get (const (truncate (minus (label_ref)
5650
                      (label_ref)))).  */
5651
                   && (GET_CODE (XEXP (trial, 0)) == TRUNCATE
5652
                       /* Likewise on IA-64, except without the
5653
                          truncate.  */
5654
                       || (GET_CODE (XEXP (trial, 0)) == MINUS
5655
                           && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5656
                           && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)))
5657
            /* Do nothing for this case.  */
5658
            ;
5659
 
5660
          /* Look for a substitution that makes a valid insn.  */
5661
          else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
5662
            {
5663
              rtx new = canon_reg (SET_SRC (sets[i].rtl), insn);
5664
 
5665
              /* If we just made a substitution inside a libcall, then we
5666
                 need to make the same substitution in any notes attached
5667
                 to the RETVAL insn.  */
5668
              if (libcall_insn
5669
                  && (REG_P (sets[i].orig_src)
5670
                      || GET_CODE (sets[i].orig_src) == SUBREG
5671
                      || MEM_P (sets[i].orig_src)))
5672
                {
5673
                  rtx note = find_reg_equal_equiv_note (libcall_insn);
5674
                  if (note != 0)
5675
                    XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0),
5676
                                                           sets[i].orig_src,
5677
                                                           copy_rtx (new));
5678
                }
5679
 
5680
              /* The result of apply_change_group can be ignored; see
5681
                 canon_reg.  */
5682
 
5683
              validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
5684
              apply_change_group ();
5685
              break;
5686
            }
5687
 
5688
          /* If we previously found constant pool entries for
5689
             constants and this is a constant, try making a
5690
             pool entry.  Put it in src_folded unless we already have done
5691
             this since that is where it likely came from.  */
5692
 
5693
          else if (constant_pool_entries_cost
5694
                   && CONSTANT_P (trial)
5695
                   && (src_folded == 0
5696
                       || (!MEM_P (src_folded)
5697
                           && ! src_folded_force_flag))
5698
                   && GET_MODE_CLASS (mode) != MODE_CC
5699
                   && mode != VOIDmode)
5700
            {
5701
              src_folded_force_flag = 1;
5702
              src_folded = trial;
5703
              src_folded_cost = constant_pool_entries_cost;
5704
              src_folded_regcost = constant_pool_entries_regcost;
5705
            }
5706
        }
5707
 
5708
      src = SET_SRC (sets[i].rtl);
5709
 
5710
      /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5711
         However, there is an important exception:  If both are registers
5712
         that are not the head of their equivalence class, replace SET_SRC
5713
         with the head of the class.  If we do not do this, we will have
5714
         both registers live over a portion of the basic block.  This way,
5715
         their lifetimes will likely abut instead of overlapping.  */
5716
      if (REG_P (dest)
5717
          && REGNO_QTY_VALID_P (REGNO (dest)))
5718
        {
5719
          int dest_q = REG_QTY (REGNO (dest));
5720
          struct qty_table_elem *dest_ent = &qty_table[dest_q];
5721
 
5722
          if (dest_ent->mode == GET_MODE (dest)
5723
              && dest_ent->first_reg != REGNO (dest)
5724
              && REG_P (src) && REGNO (src) == REGNO (dest)
5725
              /* Don't do this if the original insn had a hard reg as
5726
                 SET_SRC or SET_DEST.  */
5727
              && (!REG_P (sets[i].src)
5728
                  || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5729
              && (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5730
            /* We can't call canon_reg here because it won't do anything if
5731
               SRC is a hard register.  */
5732
            {
5733
              int src_q = REG_QTY (REGNO (src));
5734
              struct qty_table_elem *src_ent = &qty_table[src_q];
5735
              int first = src_ent->first_reg;
5736
              rtx new_src
5737
                = (first >= FIRST_PSEUDO_REGISTER
5738
                   ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5739
 
5740
              /* We must use validate-change even for this, because this
5741
                 might be a special no-op instruction, suitable only to
5742
                 tag notes onto.  */
5743
              if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5744
                {
5745
                  src = new_src;
5746
                  /* If we had a constant that is cheaper than what we are now
5747
                     setting SRC to, use that constant.  We ignored it when we
5748
                     thought we could make this into a no-op.  */
5749
                  if (src_const && COST (src_const) < COST (src)
5750
                      && validate_change (insn, &SET_SRC (sets[i].rtl),
5751
                                          src_const, 0))
5752
                    src = src_const;
5753
                }
5754
            }
5755
        }
5756
 
5757
      /* If we made a change, recompute SRC values.  */
5758
      if (src != sets[i].src)
5759
        {
5760
          cse_altered = 1;
5761
          do_not_record = 0;
5762
          hash_arg_in_memory = 0;
5763
          sets[i].src = src;
5764
          sets[i].src_hash = HASH (src, mode);
5765
          sets[i].src_volatile = do_not_record;
5766
          sets[i].src_in_memory = hash_arg_in_memory;
5767
          sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5768
        }
5769
 
5770
      /* If this is a single SET, we are setting a register, and we have an
5771
         equivalent constant, we want to add a REG_NOTE.   We don't want
5772
         to write a REG_EQUAL note for a constant pseudo since verifying that
5773
         that pseudo hasn't been eliminated is a pain.  Such a note also
5774
         won't help anything.
5775
 
5776
         Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5777
         which can be created for a reference to a compile time computable
5778
         entry in a jump table.  */
5779
 
5780
      if (n_sets == 1 && src_const && REG_P (dest)
5781
          && !REG_P (src_const)
5782
          && ! (GET_CODE (src_const) == CONST
5783
                && GET_CODE (XEXP (src_const, 0)) == MINUS
5784
                && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5785
                && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
5786
        {
5787
          /* We only want a REG_EQUAL note if src_const != src.  */
5788
          if (! rtx_equal_p (src, src_const))
5789
            {
5790
              /* Make sure that the rtx is not shared.  */
5791
              src_const = copy_rtx (src_const);
5792
 
5793
              /* Record the actual constant value in a REG_EQUAL note,
5794
                 making a new one if one does not already exist.  */
5795
              set_unique_reg_note (insn, REG_EQUAL, src_const);
5796
            }
5797
        }
5798
 
5799
      /* Now deal with the destination.  */
5800
      do_not_record = 0;
5801
 
5802
      /* Look within any ZERO_EXTRACT to the MEM or REG within it.  */
5803
      while (GET_CODE (dest) == SUBREG
5804
             || GET_CODE (dest) == ZERO_EXTRACT
5805
             || GET_CODE (dest) == STRICT_LOW_PART)
5806
        dest = XEXP (dest, 0);
5807
 
5808
      sets[i].inner_dest = dest;
5809
 
5810
      if (MEM_P (dest))
5811
        {
5812
#ifdef PUSH_ROUNDING
5813
          /* Stack pushes invalidate the stack pointer.  */
5814
          rtx addr = XEXP (dest, 0);
5815
          if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
5816
              && XEXP (addr, 0) == stack_pointer_rtx)
5817
            invalidate (stack_pointer_rtx, VOIDmode);
5818
#endif
5819
          dest = fold_rtx (dest, insn);
5820
        }
5821
 
5822
      /* Compute the hash code of the destination now,
5823
         before the effects of this instruction are recorded,
5824
         since the register values used in the address computation
5825
         are those before this instruction.  */
5826
      sets[i].dest_hash = HASH (dest, mode);
5827
 
5828
      /* Don't enter a bit-field in the hash table
5829
         because the value in it after the store
5830
         may not equal what was stored, due to truncation.  */
5831
 
5832
      if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT)
5833
        {
5834
          rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5835
 
5836
          if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5837
              && GET_CODE (width) == CONST_INT
5838
              && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5839
              && ! (INTVAL (src_const)
5840
                    & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5841
            /* Exception: if the value is constant,
5842
               and it won't be truncated, record it.  */
5843
            ;
5844
          else
5845
            {
5846
              /* This is chosen so that the destination will be invalidated
5847
                 but no new value will be recorded.
5848
                 We must invalidate because sometimes constant
5849
                 values can be recorded for bitfields.  */
5850
              sets[i].src_elt = 0;
5851
              sets[i].src_volatile = 1;
5852
              src_eqv = 0;
5853
              src_eqv_elt = 0;
5854
            }
5855
        }
5856
 
5857
      /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5858
         the insn.  */
5859
      else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5860
        {
5861
          /* One less use of the label this insn used to jump to.  */
5862
          delete_insn (insn);
5863
          cse_jumps_altered = 1;
5864
          /* No more processing for this set.  */
5865
          sets[i].rtl = 0;
5866
        }
5867
 
5868
      /* If this SET is now setting PC to a label, we know it used to
5869
         be a conditional or computed branch.  */
5870
      else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF
5871
               && !LABEL_REF_NONLOCAL_P (src))
5872
        {
5873
          /* Now emit a BARRIER after the unconditional jump.  */
5874
          if (NEXT_INSN (insn) == 0
5875
              || !BARRIER_P (NEXT_INSN (insn)))
5876
            emit_barrier_after (insn);
5877
 
5878
          /* We reemit the jump in as many cases as possible just in
5879
             case the form of an unconditional jump is significantly
5880
             different than a computed jump or conditional jump.
5881
 
5882
             If this insn has multiple sets, then reemitting the
5883
             jump is nontrivial.  So instead we just force rerecognition
5884
             and hope for the best.  */
5885
          if (n_sets == 1)
5886
            {
5887
              rtx new, note;
5888
 
5889
              new = emit_jump_insn_after (gen_jump (XEXP (src, 0)), insn);
5890
              JUMP_LABEL (new) = XEXP (src, 0);
5891
              LABEL_NUSES (XEXP (src, 0))++;
5892
 
5893
              /* Make sure to copy over REG_NON_LOCAL_GOTO.  */
5894
              note = find_reg_note (insn, REG_NON_LOCAL_GOTO, 0);
5895
              if (note)
5896
                {
5897
                  XEXP (note, 1) = NULL_RTX;
5898
                  REG_NOTES (new) = note;
5899
                }
5900
 
5901
              delete_insn (insn);
5902
              insn = new;
5903
 
5904
              /* Now emit a BARRIER after the unconditional jump.  */
5905
              if (NEXT_INSN (insn) == 0
5906
                  || !BARRIER_P (NEXT_INSN (insn)))
5907
                emit_barrier_after (insn);
5908
            }
5909
          else
5910
            INSN_CODE (insn) = -1;
5911
 
5912
          /* Do not bother deleting any unreachable code,
5913
             let jump/flow do that.  */
5914
 
5915
          cse_jumps_altered = 1;
5916
          sets[i].rtl = 0;
5917
        }
5918
 
5919
      /* If destination is volatile, invalidate it and then do no further
5920
         processing for this assignment.  */
5921
 
5922
      else if (do_not_record)
5923
        {
5924
          if (REG_P (dest) || GET_CODE (dest) == SUBREG)
5925
            invalidate (dest, VOIDmode);
5926
          else if (MEM_P (dest))
5927
            invalidate (dest, VOIDmode);
5928
          else if (GET_CODE (dest) == STRICT_LOW_PART
5929
                   || GET_CODE (dest) == ZERO_EXTRACT)
5930
            invalidate (XEXP (dest, 0), GET_MODE (dest));
5931
          sets[i].rtl = 0;
5932
        }
5933
 
5934
      if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5935
        sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5936
 
5937
#ifdef HAVE_cc0
5938
      /* If setting CC0, record what it was set to, or a constant, if it
5939
         is equivalent to a constant.  If it is being set to a floating-point
5940
         value, make a COMPARE with the appropriate constant of 0.  If we
5941
         don't do this, later code can interpret this as a test against
5942
         const0_rtx, which can cause problems if we try to put it into an
5943
         insn as a floating-point operand.  */
5944
      if (dest == cc0_rtx)
5945
        {
5946
          this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5947
          this_insn_cc0_mode = mode;
5948
          if (FLOAT_MODE_P (mode))
5949
            this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5950
                                             CONST0_RTX (mode));
5951
        }
5952
#endif
5953
    }
5954
 
5955
  /* Now enter all non-volatile source expressions in the hash table
5956
     if they are not already present.
5957
     Record their equivalence classes in src_elt.
5958
     This way we can insert the corresponding destinations into
5959
     the same classes even if the actual sources are no longer in them
5960
     (having been invalidated).  */
5961
 
5962
  if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5963
      && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5964
    {
5965
      struct table_elt *elt;
5966
      struct table_elt *classp = sets[0].src_elt;
5967
      rtx dest = SET_DEST (sets[0].rtl);
5968
      enum machine_mode eqvmode = GET_MODE (dest);
5969
 
5970
      if (GET_CODE (dest) == STRICT_LOW_PART)
5971
        {
5972
          eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5973
          classp = 0;
5974
        }
5975
      if (insert_regs (src_eqv, classp, 0))
5976
        {
5977
          rehash_using_reg (src_eqv);
5978
          src_eqv_hash = HASH (src_eqv, eqvmode);
5979
        }
5980
      elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5981
      elt->in_memory = src_eqv_in_memory;
5982
      src_eqv_elt = elt;
5983
 
5984
      /* Check to see if src_eqv_elt is the same as a set source which
5985
         does not yet have an elt, and if so set the elt of the set source
5986
         to src_eqv_elt.  */
5987
      for (i = 0; i < n_sets; i++)
5988
        if (sets[i].rtl && sets[i].src_elt == 0
5989
            && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5990
          sets[i].src_elt = src_eqv_elt;
5991
    }
5992
 
5993
  for (i = 0; i < n_sets; i++)
5994
    if (sets[i].rtl && ! sets[i].src_volatile
5995
        && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5996
      {
5997
        if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5998
          {
5999
            /* REG_EQUAL in setting a STRICT_LOW_PART
6000
               gives an equivalent for the entire destination register,
6001
               not just for the subreg being stored in now.
6002
               This is a more interesting equivalence, so we arrange later
6003
               to treat the entire reg as the destination.  */
6004
            sets[i].src_elt = src_eqv_elt;
6005
            sets[i].src_hash = src_eqv_hash;
6006
          }
6007
        else
6008
          {
6009
            /* Insert source and constant equivalent into hash table, if not
6010
               already present.  */
6011
            struct table_elt *classp = src_eqv_elt;
6012
            rtx src = sets[i].src;
6013
            rtx dest = SET_DEST (sets[i].rtl);
6014
            enum machine_mode mode
6015
              = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
6016
 
6017
            /* It's possible that we have a source value known to be
6018
               constant but don't have a REG_EQUAL note on the insn.
6019
               Lack of a note will mean src_eqv_elt will be NULL.  This
6020
               can happen where we've generated a SUBREG to access a
6021
               CONST_INT that is already in a register in a wider mode.
6022
               Ensure that the source expression is put in the proper
6023
               constant class.  */
6024
            if (!classp)
6025
              classp = sets[i].src_const_elt;
6026
 
6027
            if (sets[i].src_elt == 0)
6028
              {
6029
                /* Don't put a hard register source into the table if this is
6030
                   the last insn of a libcall.  In this case, we only need
6031
                   to put src_eqv_elt in src_elt.  */
6032
                if (! find_reg_note (insn, REG_RETVAL, NULL_RTX))
6033
                  {
6034
                    struct table_elt *elt;
6035
 
6036
                    /* Note that these insert_regs calls cannot remove
6037
                       any of the src_elt's, because they would have failed to
6038
                       match if not still valid.  */
6039
                    if (insert_regs (src, classp, 0))
6040
                      {
6041
                        rehash_using_reg (src);
6042
                        sets[i].src_hash = HASH (src, mode);
6043
                      }
6044
                    elt = insert (src, classp, sets[i].src_hash, mode);
6045
                    elt->in_memory = sets[i].src_in_memory;
6046
                    sets[i].src_elt = classp = elt;
6047
                  }
6048
                else
6049
                  sets[i].src_elt = classp;
6050
              }
6051
            if (sets[i].src_const && sets[i].src_const_elt == 0
6052
                && src != sets[i].src_const
6053
                && ! rtx_equal_p (sets[i].src_const, src))
6054
              sets[i].src_elt = insert (sets[i].src_const, classp,
6055
                                        sets[i].src_const_hash, mode);
6056
          }
6057
      }
6058
    else if (sets[i].src_elt == 0)
6059
      /* If we did not insert the source into the hash table (e.g., it was
6060
         volatile), note the equivalence class for the REG_EQUAL value, if any,
6061
         so that the destination goes into that class.  */
6062
      sets[i].src_elt = src_eqv_elt;
6063
 
6064
  /* Record destination addresses in the hash table.  This allows us to
6065
     check if they are invalidated by other sets.  */
6066
  for (i = 0; i < n_sets; i++)
6067
    {
6068
      if (sets[i].rtl)
6069
        {
6070
          rtx x = sets[i].inner_dest;
6071
          struct table_elt *elt;
6072
          enum machine_mode mode;
6073
          unsigned hash;
6074
 
6075
          if (MEM_P (x))
6076
            {
6077
              x = XEXP (x, 0);
6078
              mode = GET_MODE (x);
6079
              hash = HASH (x, mode);
6080
              elt = lookup (x, hash, mode);
6081
              if (!elt)
6082
                {
6083
                  if (insert_regs (x, NULL, 0))
6084
                    {
6085
                      rtx dest = SET_DEST (sets[i].rtl);
6086
 
6087
                      rehash_using_reg (x);
6088
                      hash = HASH (x, mode);
6089
                      sets[i].dest_hash = HASH (dest, GET_MODE (dest));
6090
                    }
6091
                  elt = insert (x, NULL, hash, mode);
6092
                }
6093
 
6094
              sets[i].dest_addr_elt = elt;
6095
            }
6096
          else
6097
            sets[i].dest_addr_elt = NULL;
6098
        }
6099
    }
6100
 
6101
  invalidate_from_clobbers (x);
6102
 
6103
  /* Some registers are invalidated by subroutine calls.  Memory is
6104
     invalidated by non-constant calls.  */
6105
 
6106
  if (CALL_P (insn))
6107
    {
6108
      if (! CONST_OR_PURE_CALL_P (insn))
6109
        invalidate_memory ();
6110
      invalidate_for_call ();
6111
    }
6112
 
6113
  /* Now invalidate everything set by this instruction.
6114
     If a SUBREG or other funny destination is being set,
6115
     sets[i].rtl is still nonzero, so here we invalidate the reg
6116
     a part of which is being set.  */
6117
 
6118
  for (i = 0; i < n_sets; i++)
6119
    if (sets[i].rtl)
6120
      {
6121
        /* We can't use the inner dest, because the mode associated with
6122
           a ZERO_EXTRACT is significant.  */
6123
        rtx dest = SET_DEST (sets[i].rtl);
6124
 
6125
        /* Needed for registers to remove the register from its
6126
           previous quantity's chain.
6127
           Needed for memory if this is a nonvarying address, unless
6128
           we have just done an invalidate_memory that covers even those.  */
6129
        if (REG_P (dest) || GET_CODE (dest) == SUBREG)
6130
          invalidate (dest, VOIDmode);
6131
        else if (MEM_P (dest))
6132
          invalidate (dest, VOIDmode);
6133
        else if (GET_CODE (dest) == STRICT_LOW_PART
6134
                 || GET_CODE (dest) == ZERO_EXTRACT)
6135
          invalidate (XEXP (dest, 0), GET_MODE (dest));
6136
      }
6137
 
6138
  /* A volatile ASM invalidates everything.  */
6139
  if (NONJUMP_INSN_P (insn)
6140
      && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
6141
      && MEM_VOLATILE_P (PATTERN (insn)))
6142
    flush_hash_table ();
6143
 
6144
  /* Make sure registers mentioned in destinations
6145
     are safe for use in an expression to be inserted.
6146
     This removes from the hash table
6147
     any invalid entry that refers to one of these registers.
6148
 
6149
     We don't care about the return value from mention_regs because
6150
     we are going to hash the SET_DEST values unconditionally.  */
6151
 
6152
  for (i = 0; i < n_sets; i++)
6153
    {
6154
      if (sets[i].rtl)
6155
        {
6156
          rtx x = SET_DEST (sets[i].rtl);
6157
 
6158
          if (!REG_P (x))
6159
            mention_regs (x);
6160
          else
6161
            {
6162
              /* We used to rely on all references to a register becoming
6163
                 inaccessible when a register changes to a new quantity,
6164
                 since that changes the hash code.  However, that is not
6165
                 safe, since after HASH_SIZE new quantities we get a
6166
                 hash 'collision' of a register with its own invalid
6167
                 entries.  And since SUBREGs have been changed not to
6168
                 change their hash code with the hash code of the register,
6169
                 it wouldn't work any longer at all.  So we have to check
6170
                 for any invalid references lying around now.
6171
                 This code is similar to the REG case in mention_regs,
6172
                 but it knows that reg_tick has been incremented, and
6173
                 it leaves reg_in_table as -1 .  */
6174
              unsigned int regno = REGNO (x);
6175
              unsigned int endregno
6176
                = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
6177
                           : hard_regno_nregs[regno][GET_MODE (x)]);
6178
              unsigned int i;
6179
 
6180
              for (i = regno; i < endregno; i++)
6181
                {
6182
                  if (REG_IN_TABLE (i) >= 0)
6183
                    {
6184
                      remove_invalid_refs (i);
6185
                      REG_IN_TABLE (i) = -1;
6186
                    }
6187
                }
6188
            }
6189
        }
6190
    }
6191
 
6192
  /* We may have just removed some of the src_elt's from the hash table.
6193
     So replace each one with the current head of the same class.
6194
     Also check if destination addresses have been removed.  */
6195
 
6196
  for (i = 0; i < n_sets; i++)
6197
    if (sets[i].rtl)
6198
      {
6199
        if (sets[i].dest_addr_elt
6200
            && sets[i].dest_addr_elt->first_same_value == 0)
6201
          {
6202
            /* The elt was removed, which means this destination is not
6203
               valid after this instruction.  */
6204
            sets[i].rtl = NULL_RTX;
6205
          }
6206
        else if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
6207
          /* If elt was removed, find current head of same class,
6208
             or 0 if nothing remains of that class.  */
6209
          {
6210
            struct table_elt *elt = sets[i].src_elt;
6211
 
6212
            while (elt && elt->prev_same_value)
6213
              elt = elt->prev_same_value;
6214
 
6215
            while (elt && elt->first_same_value == 0)
6216
              elt = elt->next_same_value;
6217
            sets[i].src_elt = elt ? elt->first_same_value : 0;
6218
          }
6219
      }
6220
 
6221
  /* Now insert the destinations into their equivalence classes.  */
6222
 
6223
  for (i = 0; i < n_sets; i++)
6224
    if (sets[i].rtl)
6225
      {
6226
        rtx dest = SET_DEST (sets[i].rtl);
6227
        struct table_elt *elt;
6228
 
6229
        /* Don't record value if we are not supposed to risk allocating
6230
           floating-point values in registers that might be wider than
6231
           memory.  */
6232
        if ((flag_float_store
6233
             && MEM_P (dest)
6234
             && FLOAT_MODE_P (GET_MODE (dest)))
6235
            /* Don't record BLKmode values, because we don't know the
6236
               size of it, and can't be sure that other BLKmode values
6237
               have the same or smaller size.  */
6238
            || GET_MODE (dest) == BLKmode
6239
            /* Don't record values of destinations set inside a libcall block
6240
               since we might delete the libcall.  Things should have been set
6241
               up so we won't want to reuse such a value, but we play it safe
6242
               here.  */
6243
            || libcall_insn
6244
            /* If we didn't put a REG_EQUAL value or a source into the hash
6245
               table, there is no point is recording DEST.  */
6246
            || sets[i].src_elt == 0
6247
            /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
6248
               or SIGN_EXTEND, don't record DEST since it can cause
6249
               some tracking to be wrong.
6250
 
6251
               ??? Think about this more later.  */
6252
            || (GET_CODE (dest) == SUBREG
6253
                && (GET_MODE_SIZE (GET_MODE (dest))
6254
                    > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6255
                && (GET_CODE (sets[i].src) == SIGN_EXTEND
6256
                    || GET_CODE (sets[i].src) == ZERO_EXTEND)))
6257
          continue;
6258
 
6259
        /* STRICT_LOW_PART isn't part of the value BEING set,
6260
           and neither is the SUBREG inside it.
6261
           Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT.  */
6262
        if (GET_CODE (dest) == STRICT_LOW_PART)
6263
          dest = SUBREG_REG (XEXP (dest, 0));
6264
 
6265
        if (REG_P (dest) || GET_CODE (dest) == SUBREG)
6266
          /* Registers must also be inserted into chains for quantities.  */
6267
          if (insert_regs (dest, sets[i].src_elt, 1))
6268
            {
6269
              /* If `insert_regs' changes something, the hash code must be
6270
                 recalculated.  */
6271
              rehash_using_reg (dest);
6272
              sets[i].dest_hash = HASH (dest, GET_MODE (dest));
6273
            }
6274
 
6275
        elt = insert (dest, sets[i].src_elt,
6276
                      sets[i].dest_hash, GET_MODE (dest));
6277
 
6278
        elt->in_memory = (MEM_P (sets[i].inner_dest)
6279
                          && !MEM_READONLY_P (sets[i].inner_dest));
6280
 
6281
        /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
6282
           narrower than M2, and both M1 and M2 are the same number of words,
6283
           we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
6284
           make that equivalence as well.
6285
 
6286
           However, BAR may have equivalences for which gen_lowpart
6287
           will produce a simpler value than gen_lowpart applied to
6288
           BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
6289
           BAR's equivalences.  If we don't get a simplified form, make
6290
           the SUBREG.  It will not be used in an equivalence, but will
6291
           cause two similar assignments to be detected.
6292
 
6293
           Note the loop below will find SUBREG_REG (DEST) since we have
6294
           already entered SRC and DEST of the SET in the table.  */
6295
 
6296
        if (GET_CODE (dest) == SUBREG
6297
            && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
6298
                 / UNITS_PER_WORD)
6299
                == (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
6300
            && (GET_MODE_SIZE (GET_MODE (dest))
6301
                >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6302
            && sets[i].src_elt != 0)
6303
          {
6304
            enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6305
            struct table_elt *elt, *classp = 0;
6306
 
6307
            for (elt = sets[i].src_elt->first_same_value; elt;
6308
                 elt = elt->next_same_value)
6309
              {
6310
                rtx new_src = 0;
6311
                unsigned src_hash;
6312
                struct table_elt *src_elt;
6313
                int byte = 0;
6314
 
6315
                /* Ignore invalid entries.  */
6316
                if (!REG_P (elt->exp)
6317
                    && ! exp_equiv_p (elt->exp, elt->exp, 1, false))
6318
                  continue;
6319
 
6320
                /* We may have already been playing subreg games.  If the
6321
                   mode is already correct for the destination, use it.  */
6322
                if (GET_MODE (elt->exp) == new_mode)
6323
                  new_src = elt->exp;
6324
                else
6325
                  {
6326
                    /* Calculate big endian correction for the SUBREG_BYTE.
6327
                       We have already checked that M1 (GET_MODE (dest))
6328
                       is not narrower than M2 (new_mode).  */
6329
                    if (BYTES_BIG_ENDIAN)
6330
                      byte = (GET_MODE_SIZE (GET_MODE (dest))
6331
                              - GET_MODE_SIZE (new_mode));
6332
 
6333
                    new_src = simplify_gen_subreg (new_mode, elt->exp,
6334
                                                   GET_MODE (dest), byte);
6335
                  }
6336
 
6337
                /* The call to simplify_gen_subreg fails if the value
6338
                   is VOIDmode, yet we can't do any simplification, e.g.
6339
                   for EXPR_LISTs denoting function call results.
6340
                   It is invalid to construct a SUBREG with a VOIDmode
6341
                   SUBREG_REG, hence a zero new_src means we can't do
6342
                   this substitution.  */
6343
                if (! new_src)
6344
                  continue;
6345
 
6346
                src_hash = HASH (new_src, new_mode);
6347
                src_elt = lookup (new_src, src_hash, new_mode);
6348
 
6349
                /* Put the new source in the hash table is if isn't
6350
                   already.  */
6351
                if (src_elt == 0)
6352
                  {
6353
                    if (insert_regs (new_src, classp, 0))
6354
                      {
6355
                        rehash_using_reg (new_src);
6356
                        src_hash = HASH (new_src, new_mode);
6357
                      }
6358
                    src_elt = insert (new_src, classp, src_hash, new_mode);
6359
                    src_elt->in_memory = elt->in_memory;
6360
                  }
6361
                else if (classp && classp != src_elt->first_same_value)
6362
                  /* Show that two things that we've seen before are
6363
                     actually the same.  */
6364
                  merge_equiv_classes (src_elt, classp);
6365
 
6366
                classp = src_elt->first_same_value;
6367
                /* Ignore invalid entries.  */
6368
                while (classp
6369
                       && !REG_P (classp->exp)
6370
                       && ! exp_equiv_p (classp->exp, classp->exp, 1, false))
6371
                  classp = classp->next_same_value;
6372
              }
6373
          }
6374
      }
6375
 
6376
  /* Special handling for (set REG0 REG1) where REG0 is the
6377
     "cheapest", cheaper than REG1.  After cse, REG1 will probably not
6378
     be used in the sequel, so (if easily done) change this insn to
6379
     (set REG1 REG0) and replace REG1 with REG0 in the previous insn
6380
     that computed their value.  Then REG1 will become a dead store
6381
     and won't cloud the situation for later optimizations.
6382
 
6383
     Do not make this change if REG1 is a hard register, because it will
6384
     then be used in the sequel and we may be changing a two-operand insn
6385
     into a three-operand insn.
6386
 
6387
     Also do not do this if we are operating on a copy of INSN.
6388
 
6389
     Also don't do this if INSN ends a libcall; this would cause an unrelated
6390
     register to be set in the middle of a libcall, and we then get bad code
6391
     if the libcall is deleted.  */
6392
 
6393
  if (n_sets == 1 && sets[0].rtl && REG_P (SET_DEST (sets[0].rtl))
6394
      && NEXT_INSN (PREV_INSN (insn)) == insn
6395
      && REG_P (SET_SRC (sets[0].rtl))
6396
      && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
6397
      && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
6398
    {
6399
      int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
6400
      struct qty_table_elem *src_ent = &qty_table[src_q];
6401
 
6402
      if ((src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
6403
          && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
6404
        {
6405
          rtx prev = insn;
6406
          /* Scan for the previous nonnote insn, but stop at a basic
6407
             block boundary.  */
6408
          do
6409
            {
6410
              prev = PREV_INSN (prev);
6411
            }
6412
          while (prev && NOTE_P (prev)
6413
                 && NOTE_LINE_NUMBER (prev) != NOTE_INSN_BASIC_BLOCK);
6414
 
6415
          /* Do not swap the registers around if the previous instruction
6416
             attaches a REG_EQUIV note to REG1.
6417
 
6418
             ??? It's not entirely clear whether we can transfer a REG_EQUIV
6419
             from the pseudo that originally shadowed an incoming argument
6420
             to another register.  Some uses of REG_EQUIV might rely on it
6421
             being attached to REG1 rather than REG2.
6422
 
6423
             This section previously turned the REG_EQUIV into a REG_EQUAL
6424
             note.  We cannot do that because REG_EQUIV may provide an
6425
             uninitialized stack slot when REG_PARM_STACK_SPACE is used.  */
6426
 
6427
          if (prev != 0 && NONJUMP_INSN_P (prev)
6428
              && GET_CODE (PATTERN (prev)) == SET
6429
              && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
6430
              && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
6431
            {
6432
              rtx dest = SET_DEST (sets[0].rtl);
6433
              rtx src = SET_SRC (sets[0].rtl);
6434
              rtx note;
6435
 
6436
              validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
6437
              validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
6438
              validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
6439
              apply_change_group ();
6440
 
6441
              /* If INSN has a REG_EQUAL note, and this note mentions
6442
                 REG0, then we must delete it, because the value in
6443
                 REG0 has changed.  If the note's value is REG1, we must
6444
                 also delete it because that is now this insn's dest.  */
6445
              note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6446
              if (note != 0
6447
                  && (reg_mentioned_p (dest, XEXP (note, 0))
6448
                      || rtx_equal_p (src, XEXP (note, 0))))
6449
                remove_note (insn, note);
6450
            }
6451
        }
6452
    }
6453
 
6454
  /* If this is a conditional jump insn, record any known equivalences due to
6455
     the condition being tested.  */
6456
 
6457
  if (JUMP_P (insn)
6458
      && n_sets == 1 && GET_CODE (x) == SET
6459
      && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
6460
    record_jump_equiv (insn, 0);
6461
 
6462
#ifdef HAVE_cc0
6463
  /* If the previous insn set CC0 and this insn no longer references CC0,
6464
     delete the previous insn.  Here we use the fact that nothing expects CC0
6465
     to be valid over an insn, which is true until the final pass.  */
6466
  if (prev_insn && NONJUMP_INSN_P (prev_insn)
6467
      && (tem = single_set (prev_insn)) != 0
6468
      && SET_DEST (tem) == cc0_rtx
6469
      && ! reg_mentioned_p (cc0_rtx, x))
6470
    delete_insn (prev_insn);
6471
 
6472
  prev_insn_cc0 = this_insn_cc0;
6473
  prev_insn_cc0_mode = this_insn_cc0_mode;
6474
  prev_insn = insn;
6475
#endif
6476
}
6477
 
6478
/* Remove from the hash table all expressions that reference memory.  */
6479
 
6480
static void
6481
invalidate_memory (void)
6482
{
6483
  int i;
6484
  struct table_elt *p, *next;
6485
 
6486
  for (i = 0; i < HASH_SIZE; i++)
6487
    for (p = table[i]; p; p = next)
6488
      {
6489
        next = p->next_same_hash;
6490
        if (p->in_memory)
6491
          remove_from_table (p, i);
6492
      }
6493
}
6494
 
6495
/* If ADDR is an address that implicitly affects the stack pointer, return
6496
   1 and update the register tables to show the effect.  Else, return 0.  */
6497
 
6498
static int
6499
addr_affects_sp_p (rtx addr)
6500
{
6501
  if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC
6502
      && REG_P (XEXP (addr, 0))
6503
      && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
6504
    {
6505
      if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
6506
        {
6507
          REG_TICK (STACK_POINTER_REGNUM)++;
6508
          /* Is it possible to use a subreg of SP?  */
6509
          SUBREG_TICKED (STACK_POINTER_REGNUM) = -1;
6510
        }
6511
 
6512
      /* This should be *very* rare.  */
6513
      if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
6514
        invalidate (stack_pointer_rtx, VOIDmode);
6515
 
6516
      return 1;
6517
    }
6518
 
6519
  return 0;
6520
}
6521
 
6522
/* Perform invalidation on the basis of everything about an insn
6523
   except for invalidating the actual places that are SET in it.
6524
   This includes the places CLOBBERed, and anything that might
6525
   alias with something that is SET or CLOBBERed.
6526
 
6527
   X is the pattern of the insn.  */
6528
 
6529
static void
6530
invalidate_from_clobbers (rtx x)
6531
{
6532
  if (GET_CODE (x) == CLOBBER)
6533
    {
6534
      rtx ref = XEXP (x, 0);
6535
      if (ref)
6536
        {
6537
          if (REG_P (ref) || GET_CODE (ref) == SUBREG
6538
              || MEM_P (ref))
6539
            invalidate (ref, VOIDmode);
6540
          else if (GET_CODE (ref) == STRICT_LOW_PART
6541
                   || GET_CODE (ref) == ZERO_EXTRACT)
6542
            invalidate (XEXP (ref, 0), GET_MODE (ref));
6543
        }
6544
    }
6545
  else if (GET_CODE (x) == PARALLEL)
6546
    {
6547
      int i;
6548
      for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6549
        {
6550
          rtx y = XVECEXP (x, 0, i);
6551
          if (GET_CODE (y) == CLOBBER)
6552
            {
6553
              rtx ref = XEXP (y, 0);
6554
              if (REG_P (ref) || GET_CODE (ref) == SUBREG
6555
                  || MEM_P (ref))
6556
                invalidate (ref, VOIDmode);
6557
              else if (GET_CODE (ref) == STRICT_LOW_PART
6558
                       || GET_CODE (ref) == ZERO_EXTRACT)
6559
                invalidate (XEXP (ref, 0), GET_MODE (ref));
6560
            }
6561
        }
6562
    }
6563
}
6564
 
6565
/* Process X, part of the REG_NOTES of an insn.  Look at any REG_EQUAL notes
6566
   and replace any registers in them with either an equivalent constant
6567
   or the canonical form of the register.  If we are inside an address,
6568
   only do this if the address remains valid.
6569
 
6570
   OBJECT is 0 except when within a MEM in which case it is the MEM.
6571
 
6572
   Return the replacement for X.  */
6573
 
6574
static rtx
6575
cse_process_notes (rtx x, rtx object)
6576
{
6577
  enum rtx_code code = GET_CODE (x);
6578
  const char *fmt = GET_RTX_FORMAT (code);
6579
  int i;
6580
 
6581
  switch (code)
6582
    {
6583
    case CONST_INT:
6584
    case CONST:
6585
    case SYMBOL_REF:
6586
    case LABEL_REF:
6587
    case CONST_DOUBLE:
6588
    case CONST_VECTOR:
6589
    case PC:
6590
    case CC0:
6591
    case LO_SUM:
6592
      return x;
6593
 
6594
    case MEM:
6595
      validate_change (x, &XEXP (x, 0),
6596
                       cse_process_notes (XEXP (x, 0), x), 0);
6597
      return x;
6598
 
6599
    case EXPR_LIST:
6600
    case INSN_LIST:
6601
      if (REG_NOTE_KIND (x) == REG_EQUAL)
6602
        XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
6603
      if (XEXP (x, 1))
6604
        XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
6605
      return x;
6606
 
6607
    case SIGN_EXTEND:
6608
    case ZERO_EXTEND:
6609
    case SUBREG:
6610
      {
6611
        rtx new = cse_process_notes (XEXP (x, 0), object);
6612
        /* We don't substitute VOIDmode constants into these rtx,
6613
           since they would impede folding.  */
6614
        if (GET_MODE (new) != VOIDmode)
6615
          validate_change (object, &XEXP (x, 0), new, 0);
6616
        return x;
6617
      }
6618
 
6619
    case REG:
6620
      i = REG_QTY (REGNO (x));
6621
 
6622
      /* Return a constant or a constant register.  */
6623
      if (REGNO_QTY_VALID_P (REGNO (x)))
6624
        {
6625
          struct qty_table_elem *ent = &qty_table[i];
6626
 
6627
          if (ent->const_rtx != NULL_RTX
6628
              && (CONSTANT_P (ent->const_rtx)
6629
                  || REG_P (ent->const_rtx)))
6630
            {
6631
              rtx new = gen_lowpart (GET_MODE (x), ent->const_rtx);
6632
              if (new)
6633
                return new;
6634
            }
6635
        }
6636
 
6637
      /* Otherwise, canonicalize this register.  */
6638
      return canon_reg (x, NULL_RTX);
6639
 
6640
    default:
6641
      break;
6642
    }
6643
 
6644
  for (i = 0; i < GET_RTX_LENGTH (code); i++)
6645
    if (fmt[i] == 'e')
6646
      validate_change (object, &XEXP (x, i),
6647
                       cse_process_notes (XEXP (x, i), object), 0);
6648
 
6649
  return x;
6650
}
6651
 
6652
/* Process one SET of an insn that was skipped.  We ignore CLOBBERs
6653
   since they are done elsewhere.  This function is called via note_stores.  */
6654
 
6655
static void
6656
invalidate_skipped_set (rtx dest, rtx set, void *data ATTRIBUTE_UNUSED)
6657
{
6658
  enum rtx_code code = GET_CODE (dest);
6659
 
6660
  if (code == MEM
6661
      && ! addr_affects_sp_p (dest)     /* If this is not a stack push ...  */
6662
      /* There are times when an address can appear varying and be a PLUS
6663
         during this scan when it would be a fixed address were we to know
6664
         the proper equivalences.  So invalidate all memory if there is
6665
         a BLKmode or nonscalar memory reference or a reference to a
6666
         variable address.  */
6667
      && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
6668
          || cse_rtx_varies_p (XEXP (dest, 0), 0)))
6669
    {
6670
      invalidate_memory ();
6671
      return;
6672
    }
6673
 
6674
  if (GET_CODE (set) == CLOBBER
6675
      || CC0_P (dest)
6676
      || dest == pc_rtx)
6677
    return;
6678
 
6679
  if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
6680
    invalidate (XEXP (dest, 0), GET_MODE (dest));
6681
  else if (code == REG || code == SUBREG || code == MEM)
6682
    invalidate (dest, VOIDmode);
6683
}
6684
 
6685
/* Invalidate all insns from START up to the end of the function or the
6686
   next label.  This called when we wish to CSE around a block that is
6687
   conditionally executed.  */
6688
 
6689
static void
6690
invalidate_skipped_block (rtx start)
6691
{
6692
  rtx insn;
6693
 
6694
  for (insn = start; insn && !LABEL_P (insn);
6695
       insn = NEXT_INSN (insn))
6696
    {
6697
      if (! INSN_P (insn))
6698
        continue;
6699
 
6700
      if (CALL_P (insn))
6701
        {
6702
          if (! CONST_OR_PURE_CALL_P (insn))
6703
            invalidate_memory ();
6704
          invalidate_for_call ();
6705
        }
6706
 
6707
      invalidate_from_clobbers (PATTERN (insn));
6708
      note_stores (PATTERN (insn), invalidate_skipped_set, NULL);
6709
    }
6710
}
6711
 
6712
/* Find the end of INSN's basic block and return its range,
6713
   the total number of SETs in all the insns of the block, the last insn of the
6714
   block, and the branch path.
6715
 
6716
   The branch path indicates which branches should be followed.  If a nonzero
6717
   path size is specified, the block should be rescanned and a different set
6718
   of branches will be taken.  The branch path is only used if
6719
   FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is nonzero.
6720
 
6721
   DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6722
   used to describe the block.  It is filled in with the information about
6723
   the current block.  The incoming structure's branch path, if any, is used
6724
   to construct the output branch path.  */
6725
 
6726
static void
6727
cse_end_of_basic_block (rtx insn, struct cse_basic_block_data *data,
6728
                        int follow_jumps, int skip_blocks)
6729
{
6730
  rtx p = insn, q;
6731
  int nsets = 0;
6732
  int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
6733
  rtx next = INSN_P (insn) ? insn : next_real_insn (insn);
6734
  int path_size = data->path_size;
6735
  int path_entry = 0;
6736
  int i;
6737
 
6738
  /* Update the previous branch path, if any.  If the last branch was
6739
     previously PATH_TAKEN, mark it PATH_NOT_TAKEN.
6740
     If it was previously PATH_NOT_TAKEN,
6741
     shorten the path by one and look at the previous branch.  We know that
6742
     at least one branch must have been taken if PATH_SIZE is nonzero.  */
6743
  while (path_size > 0)
6744
    {
6745
      if (data->path[path_size - 1].status != PATH_NOT_TAKEN)
6746
        {
6747
          data->path[path_size - 1].status = PATH_NOT_TAKEN;
6748
          break;
6749
        }
6750
      else
6751
        path_size--;
6752
    }
6753
 
6754
  /* If the first instruction is marked with QImode, that means we've
6755
     already processed this block.  Our caller will look at DATA->LAST
6756
     to figure out where to go next.  We want to return the next block
6757
     in the instruction stream, not some branched-to block somewhere
6758
     else.  We accomplish this by pretending our called forbid us to
6759
     follow jumps, or skip blocks.  */
6760
  if (GET_MODE (insn) == QImode)
6761
    follow_jumps = skip_blocks = 0;
6762
 
6763
  /* Scan to end of this basic block.  */
6764
  while (p && !LABEL_P (p))
6765
    {
6766
      /* Don't cse over a call to setjmp; on some machines (eg VAX)
6767
         the regs restored by the longjmp come from
6768
         a later time than the setjmp.  */
6769
      if (PREV_INSN (p) && CALL_P (PREV_INSN (p))
6770
          && find_reg_note (PREV_INSN (p), REG_SETJMP, NULL))
6771
        break;
6772
 
6773
      /* A PARALLEL can have lots of SETs in it,
6774
         especially if it is really an ASM_OPERANDS.  */
6775
      if (INSN_P (p) && GET_CODE (PATTERN (p)) == PARALLEL)
6776
        nsets += XVECLEN (PATTERN (p), 0);
6777
      else if (!NOTE_P (p))
6778
        nsets += 1;
6779
 
6780
      /* Ignore insns made by CSE; they cannot affect the boundaries of
6781
         the basic block.  */
6782
 
6783
      if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
6784
        high_cuid = INSN_CUID (p);
6785
      if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
6786
        low_cuid = INSN_CUID (p);
6787
 
6788
      /* See if this insn is in our branch path.  If it is and we are to
6789
         take it, do so.  */
6790
      if (path_entry < path_size && data->path[path_entry].branch == p)
6791
        {
6792
          if (data->path[path_entry].status != PATH_NOT_TAKEN)
6793
            p = JUMP_LABEL (p);
6794
 
6795
          /* Point to next entry in path, if any.  */
6796
          path_entry++;
6797
        }
6798
 
6799
      /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
6800
         was specified, we haven't reached our maximum path length, there are
6801
         insns following the target of the jump, this is the only use of the
6802
         jump label, and the target label is preceded by a BARRIER.
6803
 
6804
         Alternatively, we can follow the jump if it branches around a
6805
         block of code and there are no other branches into the block.
6806
         In this case invalidate_skipped_block will be called to invalidate any
6807
         registers set in the block when following the jump.  */
6808
 
6809
      else if ((follow_jumps || skip_blocks) && path_size < PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH) - 1
6810
               && JUMP_P (p)
6811
               && GET_CODE (PATTERN (p)) == SET
6812
               && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
6813
               && JUMP_LABEL (p) != 0
6814
               && LABEL_NUSES (JUMP_LABEL (p)) == 1
6815
               && NEXT_INSN (JUMP_LABEL (p)) != 0)
6816
        {
6817
          for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
6818
            if ((!NOTE_P (q)
6819
                 || (PREV_INSN (q) && CALL_P (PREV_INSN (q))
6820
                     && find_reg_note (PREV_INSN (q), REG_SETJMP, NULL)))
6821
                && (!LABEL_P (q) || LABEL_NUSES (q) != 0))
6822
              break;
6823
 
6824
          /* If we ran into a BARRIER, this code is an extension of the
6825
             basic block when the branch is taken.  */
6826
          if (follow_jumps && q != 0 && BARRIER_P (q))
6827
            {
6828
              /* Don't allow ourself to keep walking around an
6829
                 always-executed loop.  */
6830
              if (next_real_insn (q) == next)
6831
                {
6832
                  p = NEXT_INSN (p);
6833
                  continue;
6834
                }
6835
 
6836
              /* Similarly, don't put a branch in our path more than once.  */
6837
              for (i = 0; i < path_entry; i++)
6838
                if (data->path[i].branch == p)
6839
                  break;
6840
 
6841
              if (i != path_entry)
6842
                break;
6843
 
6844
              data->path[path_entry].branch = p;
6845
              data->path[path_entry++].status = PATH_TAKEN;
6846
 
6847
              /* This branch now ends our path.  It was possible that we
6848
                 didn't see this branch the last time around (when the
6849
                 insn in front of the target was a JUMP_INSN that was
6850
                 turned into a no-op).  */
6851
              path_size = path_entry;
6852
 
6853
              p = JUMP_LABEL (p);
6854
              /* Mark block so we won't scan it again later.  */
6855
              PUT_MODE (NEXT_INSN (p), QImode);
6856
            }
6857
          /* Detect a branch around a block of code.  */
6858
          else if (skip_blocks && q != 0 && !LABEL_P (q))
6859
            {
6860
              rtx tmp;
6861
 
6862
              if (next_real_insn (q) == next)
6863
                {
6864
                  p = NEXT_INSN (p);
6865
                  continue;
6866
                }
6867
 
6868
              for (i = 0; i < path_entry; i++)
6869
                if (data->path[i].branch == p)
6870
                  break;
6871
 
6872
              if (i != path_entry)
6873
                break;
6874
 
6875
              /* This is no_labels_between_p (p, q) with an added check for
6876
                 reaching the end of a function (in case Q precedes P).  */
6877
              for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
6878
                if (LABEL_P (tmp))
6879
                  break;
6880
 
6881
              if (tmp == q)
6882
                {
6883
                  data->path[path_entry].branch = p;
6884
                  data->path[path_entry++].status = PATH_AROUND;
6885
 
6886
                  path_size = path_entry;
6887
 
6888
                  p = JUMP_LABEL (p);
6889
                  /* Mark block so we won't scan it again later.  */
6890
                  PUT_MODE (NEXT_INSN (p), QImode);
6891
                }
6892
            }
6893
        }
6894
      p = NEXT_INSN (p);
6895
    }
6896
 
6897
  data->low_cuid = low_cuid;
6898
  data->high_cuid = high_cuid;
6899
  data->nsets = nsets;
6900
  data->last = p;
6901
 
6902
  /* If all jumps in the path are not taken, set our path length to zero
6903
     so a rescan won't be done.  */
6904
  for (i = path_size - 1; i >= 0; i--)
6905
    if (data->path[i].status != PATH_NOT_TAKEN)
6906
      break;
6907
 
6908
  if (i == -1)
6909
    data->path_size = 0;
6910
  else
6911
    data->path_size = path_size;
6912
 
6913
  /* End the current branch path.  */
6914
  data->path[path_size].branch = 0;
6915
}
6916
 
6917
/* Perform cse on the instructions of a function.
6918
   F is the first instruction.
6919
   NREGS is one plus the highest pseudo-reg number used in the instruction.
6920
 
6921
   Returns 1 if jump_optimize should be redone due to simplifications
6922
   in conditional jump instructions.  */
6923
 
6924
int
6925
cse_main (rtx f, int nregs)
6926
{
6927
  struct cse_basic_block_data val;
6928
  rtx insn = f;
6929
  int i;
6930
 
6931
  init_cse_reg_info (nregs);
6932
 
6933
  val.path = XNEWVEC (struct branch_path, PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
6934
 
6935
  cse_jumps_altered = 0;
6936
  recorded_label_ref = 0;
6937
  constant_pool_entries_cost = 0;
6938
  constant_pool_entries_regcost = 0;
6939
  val.path_size = 0;
6940
  rtl_hooks = cse_rtl_hooks;
6941
 
6942
  init_recog ();
6943
  init_alias_analysis ();
6944
 
6945
  reg_eqv_table = XNEWVEC (struct reg_eqv_elem, nregs);
6946
 
6947
  /* Find the largest uid.  */
6948
 
6949
  max_uid = get_max_uid ();
6950
  uid_cuid = XCNEWVEC (int, max_uid + 1);
6951
 
6952
  /* Compute the mapping from uids to cuids.
6953
     CUIDs are numbers assigned to insns, like uids,
6954
     except that cuids increase monotonically through the code.
6955
     Don't assign cuids to line-number NOTEs, so that the distance in cuids
6956
     between two insns is not affected by -g.  */
6957
 
6958
  for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
6959
    {
6960
      if (!NOTE_P (insn)
6961
          || NOTE_LINE_NUMBER (insn) < 0)
6962
        INSN_CUID (insn) = ++i;
6963
      else
6964
        /* Give a line number note the same cuid as preceding insn.  */
6965
        INSN_CUID (insn) = i;
6966
    }
6967
 
6968
  /* Loop over basic blocks.
6969
     Compute the maximum number of qty's needed for each basic block
6970
     (which is 2 for each SET).  */
6971
  insn = f;
6972
  while (insn)
6973
    {
6974
      cse_altered = 0;
6975
      cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps,
6976
                              flag_cse_skip_blocks);
6977
 
6978
      /* If this basic block was already processed or has no sets, skip it.  */
6979
      if (val.nsets == 0 || GET_MODE (insn) == QImode)
6980
        {
6981
          PUT_MODE (insn, VOIDmode);
6982
          insn = (val.last ? NEXT_INSN (val.last) : 0);
6983
          val.path_size = 0;
6984
          continue;
6985
        }
6986
 
6987
      cse_basic_block_start = val.low_cuid;
6988
      cse_basic_block_end = val.high_cuid;
6989
      max_qty = val.nsets * 2;
6990
 
6991
      if (dump_file)
6992
        fprintf (dump_file, ";; Processing block from %d to %d, %d sets.\n",
6993
                 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
6994
                 val.nsets);
6995
 
6996
      /* Make MAX_QTY bigger to give us room to optimize
6997
         past the end of this basic block, if that should prove useful.  */
6998
      if (max_qty < 500)
6999
        max_qty = 500;
7000
 
7001
      /* If this basic block is being extended by following certain jumps,
7002
         (see `cse_end_of_basic_block'), we reprocess the code from the start.
7003
         Otherwise, we start after this basic block.  */
7004
      if (val.path_size > 0)
7005
        cse_basic_block (insn, val.last, val.path);
7006
      else
7007
        {
7008
          int old_cse_jumps_altered = cse_jumps_altered;
7009
          rtx temp;
7010
 
7011
          /* When cse changes a conditional jump to an unconditional
7012
             jump, we want to reprocess the block, since it will give
7013
             us a new branch path to investigate.  */
7014
          cse_jumps_altered = 0;
7015
          temp = cse_basic_block (insn, val.last, val.path);
7016
          if (cse_jumps_altered == 0
7017
              || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7018
            insn = temp;
7019
 
7020
          cse_jumps_altered |= old_cse_jumps_altered;
7021
        }
7022
 
7023
      if (cse_altered)
7024
        ggc_collect ();
7025
 
7026
#ifdef USE_C_ALLOCA
7027
      alloca (0);
7028
#endif
7029
    }
7030
 
7031
  /* Clean up.  */
7032
  end_alias_analysis ();
7033
  free (uid_cuid);
7034
  free (reg_eqv_table);
7035
  free (val.path);
7036
  rtl_hooks = general_rtl_hooks;
7037
 
7038
  return cse_jumps_altered || recorded_label_ref;
7039
}
7040
 
7041
/* Process a single basic block.  FROM and TO and the limits of the basic
7042
   block.  NEXT_BRANCH points to the branch path when following jumps or
7043
   a null path when not following jumps.  */
7044
 
7045
static rtx
7046
cse_basic_block (rtx from, rtx to, struct branch_path *next_branch)
7047
{
7048
  rtx insn;
7049
  int to_usage = 0;
7050
  rtx libcall_insn = NULL_RTX;
7051
  int num_insns = 0;
7052
  int no_conflict = 0;
7053
 
7054
  /* Allocate the space needed by qty_table.  */
7055
  qty_table = XNEWVEC (struct qty_table_elem, max_qty);
7056
 
7057
  new_basic_block ();
7058
 
7059
  /* TO might be a label.  If so, protect it from being deleted.  */
7060
  if (to != 0 && LABEL_P (to))
7061
    ++LABEL_NUSES (to);
7062
 
7063
  for (insn = from; insn != to; insn = NEXT_INSN (insn))
7064
    {
7065
      enum rtx_code code = GET_CODE (insn);
7066
 
7067
      /* If we have processed 1,000 insns, flush the hash table to
7068
         avoid extreme quadratic behavior.  We must not include NOTEs
7069
         in the count since there may be more of them when generating
7070
         debugging information.  If we clear the table at different
7071
         times, code generated with -g -O might be different than code
7072
         generated with -O but not -g.
7073
 
7074
         ??? This is a real kludge and needs to be done some other way.
7075
         Perhaps for 2.9.  */
7076
      if (code != NOTE && num_insns++ > PARAM_VALUE (PARAM_MAX_CSE_INSNS))
7077
        {
7078
          flush_hash_table ();
7079
          num_insns = 0;
7080
        }
7081
 
7082
      /* See if this is a branch that is part of the path.  If so, and it is
7083
         to be taken, do so.  */
7084
      if (next_branch->branch == insn)
7085
        {
7086
          enum taken status = next_branch++->status;
7087
          if (status != PATH_NOT_TAKEN)
7088
            {
7089
              if (status == PATH_TAKEN)
7090
                record_jump_equiv (insn, 1);
7091
              else
7092
                invalidate_skipped_block (NEXT_INSN (insn));
7093
 
7094
              /* Set the last insn as the jump insn; it doesn't affect cc0.
7095
                 Then follow this branch.  */
7096
#ifdef HAVE_cc0
7097
              prev_insn_cc0 = 0;
7098
              prev_insn = insn;
7099
#endif
7100
              insn = JUMP_LABEL (insn);
7101
              continue;
7102
            }
7103
        }
7104
 
7105
      if (GET_MODE (insn) == QImode)
7106
        PUT_MODE (insn, VOIDmode);
7107
 
7108
      if (GET_RTX_CLASS (code) == RTX_INSN)
7109
        {
7110
          rtx p;
7111
 
7112
          /* Process notes first so we have all notes in canonical forms when
7113
             looking for duplicate operations.  */
7114
 
7115
          if (REG_NOTES (insn))
7116
            REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
7117
 
7118
          /* Track when we are inside in LIBCALL block.  Inside such a block,
7119
             we do not want to record destinations.  The last insn of a
7120
             LIBCALL block is not considered to be part of the block, since
7121
             its destination is the result of the block and hence should be
7122
             recorded.  */
7123
 
7124
          if (REG_NOTES (insn) != 0)
7125
            {
7126
              if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
7127
                libcall_insn = XEXP (p, 0);
7128
              else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7129
                {
7130
                  /* Keep libcall_insn for the last SET insn of a no-conflict
7131
                     block to prevent changing the destination.  */
7132
                  if (! no_conflict)
7133
                    libcall_insn = 0;
7134
                  else
7135
                    no_conflict = -1;
7136
                }
7137
              else if (find_reg_note (insn, REG_NO_CONFLICT, NULL_RTX))
7138
                no_conflict = 1;
7139
            }
7140
 
7141
          cse_insn (insn, libcall_insn);
7142
 
7143
          if (no_conflict == -1)
7144
            {
7145
              libcall_insn = 0;
7146
              no_conflict = 0;
7147
            }
7148
 
7149
          /* If we haven't already found an insn where we added a LABEL_REF,
7150
             check this one.  */
7151
          if (NONJUMP_INSN_P (insn) && ! recorded_label_ref
7152
              && for_each_rtx (&PATTERN (insn), check_for_label_ref,
7153
                               (void *) insn))
7154
            recorded_label_ref = 1;
7155
        }
7156
 
7157
      /* If INSN is now an unconditional jump, skip to the end of our
7158
         basic block by pretending that we just did the last insn in the
7159
         basic block.  If we are jumping to the end of our block, show
7160
         that we can have one usage of TO.  */
7161
 
7162
      if (any_uncondjump_p (insn))
7163
        {
7164
          if (to == 0)
7165
            {
7166
              free (qty_table);
7167
              return 0;
7168
            }
7169
 
7170
          if (JUMP_LABEL (insn) == to)
7171
            to_usage = 1;
7172
 
7173
          /* Maybe TO was deleted because the jump is unconditional.
7174
             If so, there is nothing left in this basic block.  */
7175
          /* ??? Perhaps it would be smarter to set TO
7176
             to whatever follows this insn,
7177
             and pretend the basic block had always ended here.  */
7178
          if (INSN_DELETED_P (to))
7179
            break;
7180
 
7181
          insn = PREV_INSN (to);
7182
        }
7183
 
7184
      /* See if it is ok to keep on going past the label
7185
         which used to end our basic block.  Remember that we incremented
7186
         the count of that label, so we decrement it here.  If we made
7187
         a jump unconditional, TO_USAGE will be one; in that case, we don't
7188
         want to count the use in that jump.  */
7189
 
7190
      if (to != 0 && NEXT_INSN (insn) == to
7191
          && LABEL_P (to) && --LABEL_NUSES (to) == to_usage)
7192
        {
7193
          struct cse_basic_block_data val;
7194
          rtx prev;
7195
 
7196
          insn = NEXT_INSN (to);
7197
 
7198
          /* If TO was the last insn in the function, we are done.  */
7199
          if (insn == 0)
7200
            {
7201
              free (qty_table);
7202
              return 0;
7203
            }
7204
 
7205
          /* If TO was preceded by a BARRIER we are done with this block
7206
             because it has no continuation.  */
7207
          prev = prev_nonnote_insn (to);
7208
          if (prev && BARRIER_P (prev))
7209
            {
7210
              free (qty_table);
7211
              return insn;
7212
            }
7213
 
7214
          /* Find the end of the following block.  Note that we won't be
7215
             following branches in this case.  */
7216
          to_usage = 0;
7217
          val.path_size = 0;
7218
          val.path = XNEWVEC (struct branch_path, PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH));
7219
          cse_end_of_basic_block (insn, &val, 0, 0);
7220
          free (val.path);
7221
 
7222
          /* If the tables we allocated have enough space left
7223
             to handle all the SETs in the next basic block,
7224
             continue through it.  Otherwise, return,
7225
             and that block will be scanned individually.  */
7226
          if (val.nsets * 2 + next_qty > max_qty)
7227
            break;
7228
 
7229
          cse_basic_block_start = val.low_cuid;
7230
          cse_basic_block_end = val.high_cuid;
7231
          to = val.last;
7232
 
7233
          /* Prevent TO from being deleted if it is a label.  */
7234
          if (to != 0 && LABEL_P (to))
7235
            ++LABEL_NUSES (to);
7236
 
7237
          /* Back up so we process the first insn in the extension.  */
7238
          insn = PREV_INSN (insn);
7239
        }
7240
    }
7241
 
7242
  gcc_assert (next_qty <= max_qty);
7243
 
7244
  free (qty_table);
7245
 
7246
  return to ? NEXT_INSN (to) : 0;
7247
}
7248
 
7249
/* Called via for_each_rtx to see if an insn is using a LABEL_REF for which
7250
   there isn't a REG_LABEL note.  Return one if so.  DATA is the insn.  */
7251
 
7252
static int
7253
check_for_label_ref (rtx *rtl, void *data)
7254
{
7255
  rtx insn = (rtx) data;
7256
 
7257
  /* If this insn uses a LABEL_REF and there isn't a REG_LABEL note for it,
7258
     we must rerun jump since it needs to place the note.  If this is a
7259
     LABEL_REF for a CODE_LABEL that isn't in the insn chain, don't do this
7260
     since no REG_LABEL will be added.  */
7261
  return (GET_CODE (*rtl) == LABEL_REF
7262
          && ! LABEL_REF_NONLOCAL_P (*rtl)
7263
          && LABEL_P (XEXP (*rtl, 0))
7264
          && INSN_UID (XEXP (*rtl, 0)) != 0
7265
          && ! find_reg_note (insn, REG_LABEL, XEXP (*rtl, 0)));
7266
}
7267
 
7268
/* Count the number of times registers are used (not set) in X.
7269
   COUNTS is an array in which we accumulate the count, INCR is how much
7270
   we count each register usage.
7271
 
7272
   Don't count a usage of DEST, which is the SET_DEST of a SET which
7273
   contains X in its SET_SRC.  This is because such a SET does not
7274
   modify the liveness of DEST.
7275
   DEST is set to pc_rtx for a trapping insn, which means that we must count
7276
   uses of a SET_DEST regardless because the insn can't be deleted here.  */
7277
 
7278
static void
7279
count_reg_usage (rtx x, int *counts, rtx dest, int incr)
7280
{
7281
  enum rtx_code code;
7282
  rtx note;
7283
  const char *fmt;
7284
  int i, j;
7285
 
7286
  if (x == 0)
7287
    return;
7288
 
7289
  switch (code = GET_CODE (x))
7290
    {
7291
    case REG:
7292
      if (x != dest)
7293
        counts[REGNO (x)] += incr;
7294
      return;
7295
 
7296
    case PC:
7297
    case CC0:
7298
    case CONST:
7299
    case CONST_INT:
7300
    case CONST_DOUBLE:
7301
    case CONST_VECTOR:
7302
    case SYMBOL_REF:
7303
    case LABEL_REF:
7304
      return;
7305
 
7306
    case CLOBBER:
7307
      /* If we are clobbering a MEM, mark any registers inside the address
7308
         as being used.  */
7309
      if (MEM_P (XEXP (x, 0)))
7310
        count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
7311
      return;
7312
 
7313
    case SET:
7314
      /* Unless we are setting a REG, count everything in SET_DEST.  */
7315
      if (!REG_P (SET_DEST (x)))
7316
        count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
7317
      count_reg_usage (SET_SRC (x), counts,
7318
                       dest ? dest : SET_DEST (x),
7319
                       incr);
7320
      return;
7321
 
7322
    case CALL_INSN:
7323
    case INSN:
7324
    case JUMP_INSN:
7325
    /* We expect dest to be NULL_RTX here.  If the insn may trap, mark
7326
       this fact by setting DEST to pc_rtx.  */
7327
      if (flag_non_call_exceptions && may_trap_p (PATTERN (x)))
7328
        dest = pc_rtx;
7329
      if (code == CALL_INSN)
7330
        count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, dest, incr);
7331
      count_reg_usage (PATTERN (x), counts, dest, incr);
7332
 
7333
      /* Things used in a REG_EQUAL note aren't dead since loop may try to
7334
         use them.  */
7335
 
7336
      note = find_reg_equal_equiv_note (x);
7337
      if (note)
7338
        {
7339
          rtx eqv = XEXP (note, 0);
7340
 
7341
          if (GET_CODE (eqv) == EXPR_LIST)
7342
          /* This REG_EQUAL note describes the result of a function call.
7343
             Process all the arguments.  */
7344
            do
7345
              {
7346
                count_reg_usage (XEXP (eqv, 0), counts, dest, incr);
7347
                eqv = XEXP (eqv, 1);
7348
              }
7349
            while (eqv && GET_CODE (eqv) == EXPR_LIST);
7350
          else
7351
            count_reg_usage (eqv, counts, dest, incr);
7352
        }
7353
      return;
7354
 
7355
    case EXPR_LIST:
7356
      if (REG_NOTE_KIND (x) == REG_EQUAL
7357
          || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE)
7358
          /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
7359
             involving registers in the address.  */
7360
          || GET_CODE (XEXP (x, 0)) == CLOBBER)
7361
        count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
7362
 
7363
      count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
7364
      return;
7365
 
7366
    case ASM_OPERANDS:
7367
      /* If the asm is volatile, then this insn cannot be deleted,
7368
         and so the inputs *must* be live.  */
7369
      if (MEM_VOLATILE_P (x))
7370
        dest = NULL_RTX;
7371
      /* Iterate over just the inputs, not the constraints as well.  */
7372
      for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
7373
        count_reg_usage (ASM_OPERANDS_INPUT (x, i), counts, dest, incr);
7374
      return;
7375
 
7376
    case INSN_LIST:
7377
      gcc_unreachable ();
7378
 
7379
    default:
7380
      break;
7381
    }
7382
 
7383
  fmt = GET_RTX_FORMAT (code);
7384
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7385
    {
7386
      if (fmt[i] == 'e')
7387
        count_reg_usage (XEXP (x, i), counts, dest, incr);
7388
      else if (fmt[i] == 'E')
7389
        for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7390
          count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
7391
    }
7392
}
7393
 
7394
/* Return true if set is live.  */
7395
static bool
7396
set_live_p (rtx set, rtx insn ATTRIBUTE_UNUSED, /* Only used with HAVE_cc0.  */
7397
            int *counts)
7398
{
7399
#ifdef HAVE_cc0
7400
  rtx tem;
7401
#endif
7402
 
7403
  if (set_noop_p (set))
7404
    ;
7405
 
7406
#ifdef HAVE_cc0
7407
  else if (GET_CODE (SET_DEST (set)) == CC0
7408
           && !side_effects_p (SET_SRC (set))
7409
           && ((tem = next_nonnote_insn (insn)) == 0
7410
               || !INSN_P (tem)
7411
               || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
7412
    return false;
7413
#endif
7414
  else if (!REG_P (SET_DEST (set))
7415
           || REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
7416
           || counts[REGNO (SET_DEST (set))] != 0
7417
           || side_effects_p (SET_SRC (set)))
7418
    return true;
7419
  return false;
7420
}
7421
 
7422
/* Return true if insn is live.  */
7423
 
7424
static bool
7425
insn_live_p (rtx insn, int *counts)
7426
{
7427
  int i;
7428
  if (flag_non_call_exceptions && may_trap_p (PATTERN (insn)))
7429
    return true;
7430
  else if (GET_CODE (PATTERN (insn)) == SET)
7431
    return set_live_p (PATTERN (insn), insn, counts);
7432
  else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7433
    {
7434
      for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7435
        {
7436
          rtx elt = XVECEXP (PATTERN (insn), 0, i);
7437
 
7438
          if (GET_CODE (elt) == SET)
7439
            {
7440
              if (set_live_p (elt, insn, counts))
7441
                return true;
7442
            }
7443
          else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7444
            return true;
7445
        }
7446
      return false;
7447
    }
7448
  else
7449
    return true;
7450
}
7451
 
7452
/* Return true if libcall is dead as a whole.  */
7453
 
7454
static bool
7455
dead_libcall_p (rtx insn, int *counts)
7456
{
7457
  rtx note, set, new;
7458
 
7459
  /* See if there's a REG_EQUAL note on this insn and try to
7460
     replace the source with the REG_EQUAL expression.
7461
 
7462
     We assume that insns with REG_RETVALs can only be reg->reg
7463
     copies at this point.  */
7464
  note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7465
  if (!note)
7466
    return false;
7467
 
7468
  set = single_set (insn);
7469
  if (!set)
7470
    return false;
7471
 
7472
  new = simplify_rtx (XEXP (note, 0));
7473
  if (!new)
7474
    new = XEXP (note, 0);
7475
 
7476
  /* While changing insn, we must update the counts accordingly.  */
7477
  count_reg_usage (insn, counts, NULL_RTX, -1);
7478
 
7479
  if (validate_change (insn, &SET_SRC (set), new, 0))
7480
    {
7481
      count_reg_usage (insn, counts, NULL_RTX, 1);
7482
      remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7483
      remove_note (insn, note);
7484
      return true;
7485
    }
7486
 
7487
  if (CONSTANT_P (new))
7488
    {
7489
      new = force_const_mem (GET_MODE (SET_DEST (set)), new);
7490
      if (new && validate_change (insn, &SET_SRC (set), new, 0))
7491
        {
7492
          count_reg_usage (insn, counts, NULL_RTX, 1);
7493
          remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7494
          remove_note (insn, note);
7495
          return true;
7496
        }
7497
    }
7498
 
7499
  count_reg_usage (insn, counts, NULL_RTX, 1);
7500
  return false;
7501
}
7502
 
7503
/* Scan all the insns and delete any that are dead; i.e., they store a register
7504
   that is never used or they copy a register to itself.
7505
 
7506
   This is used to remove insns made obviously dead by cse, loop or other
7507
   optimizations.  It improves the heuristics in loop since it won't try to
7508
   move dead invariants out of loops or make givs for dead quantities.  The
7509
   remaining passes of the compilation are also sped up.  */
7510
 
7511
int
7512
delete_trivially_dead_insns (rtx insns, int nreg)
7513
{
7514
  int *counts;
7515
  rtx insn, prev;
7516
  int in_libcall = 0, dead_libcall = 0;
7517
  int ndead = 0;
7518
 
7519
  timevar_push (TV_DELETE_TRIVIALLY_DEAD);
7520
  /* First count the number of times each register is used.  */
7521
  counts = XCNEWVEC (int, nreg);
7522
  for (insn = insns; insn; insn = NEXT_INSN (insn))
7523
    if (INSN_P (insn))
7524
      count_reg_usage (insn, counts, NULL_RTX, 1);
7525
 
7526
  /* Go from the last insn to the first and delete insns that only set unused
7527
     registers or copy a register to itself.  As we delete an insn, remove
7528
     usage counts for registers it uses.
7529
 
7530
     The first jump optimization pass may leave a real insn as the last
7531
     insn in the function.   We must not skip that insn or we may end
7532
     up deleting code that is not really dead.  */
7533
  for (insn = get_last_insn (); insn; insn = prev)
7534
    {
7535
      int live_insn = 0;
7536
 
7537
      prev = PREV_INSN (insn);
7538
      if (!INSN_P (insn))
7539
        continue;
7540
 
7541
      /* Don't delete any insns that are part of a libcall block unless
7542
         we can delete the whole libcall block.
7543
 
7544
         Flow or loop might get confused if we did that.  Remember
7545
         that we are scanning backwards.  */
7546
      if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7547
        {
7548
          in_libcall = 1;
7549
          live_insn = 1;
7550
          dead_libcall = dead_libcall_p (insn, counts);
7551
        }
7552
      else if (in_libcall)
7553
        live_insn = ! dead_libcall;
7554
      else
7555
        live_insn = insn_live_p (insn, counts);
7556
 
7557
      /* If this is a dead insn, delete it and show registers in it aren't
7558
         being used.  */
7559
 
7560
      if (! live_insn)
7561
        {
7562
          count_reg_usage (insn, counts, NULL_RTX, -1);
7563
          delete_insn_and_edges (insn);
7564
          ndead++;
7565
        }
7566
 
7567
      if (in_libcall && find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7568
        {
7569
          in_libcall = 0;
7570
          dead_libcall = 0;
7571
        }
7572
    }
7573
 
7574
  if (dump_file && ndead)
7575
    fprintf (dump_file, "Deleted %i trivially dead insns\n",
7576
             ndead);
7577
  /* Clean up.  */
7578
  free (counts);
7579
  timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
7580
  return ndead;
7581
}
7582
 
7583
/* This function is called via for_each_rtx.  The argument, NEWREG, is
7584
   a condition code register with the desired mode.  If we are looking
7585
   at the same register in a different mode, replace it with
7586
   NEWREG.  */
7587
 
7588
static int
7589
cse_change_cc_mode (rtx *loc, void *data)
7590
{
7591
  struct change_cc_mode_args* args = (struct change_cc_mode_args*)data;
7592
 
7593
  if (*loc
7594
      && REG_P (*loc)
7595
      && REGNO (*loc) == REGNO (args->newreg)
7596
      && GET_MODE (*loc) != GET_MODE (args->newreg))
7597
    {
7598
      validate_change (args->insn, loc, args->newreg, 1);
7599
 
7600
      return -1;
7601
    }
7602
  return 0;
7603
}
7604
 
7605
/* Change the mode of any reference to the register REGNO (NEWREG) to
7606
   GET_MODE (NEWREG) in INSN.  */
7607
 
7608
static void
7609
cse_change_cc_mode_insn (rtx insn, rtx newreg)
7610
{
7611
  struct change_cc_mode_args args;
7612
  int success;
7613
 
7614
  if (!INSN_P (insn))
7615
    return;
7616
 
7617
  args.insn = insn;
7618
  args.newreg = newreg;
7619
 
7620
  for_each_rtx (&PATTERN (insn), cse_change_cc_mode, &args);
7621
  for_each_rtx (&REG_NOTES (insn), cse_change_cc_mode, &args);
7622
 
7623
  /* If the following assertion was triggered, there is most probably
7624
     something wrong with the cc_modes_compatible back end function.
7625
     CC modes only can be considered compatible if the insn - with the mode
7626
     replaced by any of the compatible modes - can still be recognized.  */
7627
  success = apply_change_group ();
7628
  gcc_assert (success);
7629
}
7630
 
7631
/* Change the mode of any reference to the register REGNO (NEWREG) to
7632
   GET_MODE (NEWREG), starting at START.  Stop before END.  Stop at
7633
   any instruction which modifies NEWREG.  */
7634
 
7635
static void
7636
cse_change_cc_mode_insns (rtx start, rtx end, rtx newreg)
7637
{
7638
  rtx insn;
7639
 
7640
  for (insn = start; insn != end; insn = NEXT_INSN (insn))
7641
    {
7642
      if (! INSN_P (insn))
7643
        continue;
7644
 
7645
      if (reg_set_p (newreg, insn))
7646
        return;
7647
 
7648
      cse_change_cc_mode_insn (insn, newreg);
7649
    }
7650
}
7651
 
7652
/* BB is a basic block which finishes with CC_REG as a condition code
7653
   register which is set to CC_SRC.  Look through the successors of BB
7654
   to find blocks which have a single predecessor (i.e., this one),
7655
   and look through those blocks for an assignment to CC_REG which is
7656
   equivalent to CC_SRC.  CAN_CHANGE_MODE indicates whether we are
7657
   permitted to change the mode of CC_SRC to a compatible mode.  This
7658
   returns VOIDmode if no equivalent assignments were found.
7659
   Otherwise it returns the mode which CC_SRC should wind up with.
7660
 
7661
   The main complexity in this function is handling the mode issues.
7662
   We may have more than one duplicate which we can eliminate, and we
7663
   try to find a mode which will work for multiple duplicates.  */
7664
 
7665
static enum machine_mode
7666
cse_cc_succs (basic_block bb, rtx cc_reg, rtx cc_src, bool can_change_mode)
7667
{
7668
  bool found_equiv;
7669
  enum machine_mode mode;
7670
  unsigned int insn_count;
7671
  edge e;
7672
  rtx insns[2];
7673
  enum machine_mode modes[2];
7674
  rtx last_insns[2];
7675
  unsigned int i;
7676
  rtx newreg;
7677
  edge_iterator ei;
7678
 
7679
  /* We expect to have two successors.  Look at both before picking
7680
     the final mode for the comparison.  If we have more successors
7681
     (i.e., some sort of table jump, although that seems unlikely),
7682
     then we require all beyond the first two to use the same
7683
     mode.  */
7684
 
7685
  found_equiv = false;
7686
  mode = GET_MODE (cc_src);
7687
  insn_count = 0;
7688
  FOR_EACH_EDGE (e, ei, bb->succs)
7689
    {
7690
      rtx insn;
7691
      rtx end;
7692
 
7693
      if (e->flags & EDGE_COMPLEX)
7694
        continue;
7695
 
7696
      if (EDGE_COUNT (e->dest->preds) != 1
7697
          || e->dest == EXIT_BLOCK_PTR)
7698
        continue;
7699
 
7700
      end = NEXT_INSN (BB_END (e->dest));
7701
      for (insn = BB_HEAD (e->dest); insn != end; insn = NEXT_INSN (insn))
7702
        {
7703
          rtx set;
7704
 
7705
          if (! INSN_P (insn))
7706
            continue;
7707
 
7708
          /* If CC_SRC is modified, we have to stop looking for
7709
             something which uses it.  */
7710
          if (modified_in_p (cc_src, insn))
7711
            break;
7712
 
7713
          /* Check whether INSN sets CC_REG to CC_SRC.  */
7714
          set = single_set (insn);
7715
          if (set
7716
              && REG_P (SET_DEST (set))
7717
              && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7718
            {
7719
              bool found;
7720
              enum machine_mode set_mode;
7721
              enum machine_mode comp_mode;
7722
 
7723
              found = false;
7724
              set_mode = GET_MODE (SET_SRC (set));
7725
              comp_mode = set_mode;
7726
              if (rtx_equal_p (cc_src, SET_SRC (set)))
7727
                found = true;
7728
              else if (GET_CODE (cc_src) == COMPARE
7729
                       && GET_CODE (SET_SRC (set)) == COMPARE
7730
                       && mode != set_mode
7731
                       && rtx_equal_p (XEXP (cc_src, 0),
7732
                                       XEXP (SET_SRC (set), 0))
7733
                       && rtx_equal_p (XEXP (cc_src, 1),
7734
                                       XEXP (SET_SRC (set), 1)))
7735
 
7736
                {
7737
                  comp_mode = targetm.cc_modes_compatible (mode, set_mode);
7738
                  if (comp_mode != VOIDmode
7739
                      && (can_change_mode || comp_mode == mode))
7740
                    found = true;
7741
                }
7742
 
7743
              if (found)
7744
                {
7745
                  found_equiv = true;
7746
                  if (insn_count < ARRAY_SIZE (insns))
7747
                    {
7748
                      insns[insn_count] = insn;
7749
                      modes[insn_count] = set_mode;
7750
                      last_insns[insn_count] = end;
7751
                      ++insn_count;
7752
 
7753
                      if (mode != comp_mode)
7754
                        {
7755
                          gcc_assert (can_change_mode);
7756
                          mode = comp_mode;
7757
 
7758
                          /* The modified insn will be re-recognized later.  */
7759
                          PUT_MODE (cc_src, mode);
7760
                        }
7761
                    }
7762
                  else
7763
                    {
7764
                      if (set_mode != mode)
7765
                        {
7766
                          /* We found a matching expression in the
7767
                             wrong mode, but we don't have room to
7768
                             store it in the array.  Punt.  This case
7769
                             should be rare.  */
7770
                          break;
7771
                        }
7772
                      /* INSN sets CC_REG to a value equal to CC_SRC
7773
                         with the right mode.  We can simply delete
7774
                         it.  */
7775
                      delete_insn (insn);
7776
                    }
7777
 
7778
                  /* We found an instruction to delete.  Keep looking,
7779
                     in the hopes of finding a three-way jump.  */
7780
                  continue;
7781
                }
7782
 
7783
              /* We found an instruction which sets the condition
7784
                 code, so don't look any farther.  */
7785
              break;
7786
            }
7787
 
7788
          /* If INSN sets CC_REG in some other way, don't look any
7789
             farther.  */
7790
          if (reg_set_p (cc_reg, insn))
7791
            break;
7792
        }
7793
 
7794
      /* If we fell off the bottom of the block, we can keep looking
7795
         through successors.  We pass CAN_CHANGE_MODE as false because
7796
         we aren't prepared to handle compatibility between the
7797
         further blocks and this block.  */
7798
      if (insn == end)
7799
        {
7800
          enum machine_mode submode;
7801
 
7802
          submode = cse_cc_succs (e->dest, cc_reg, cc_src, false);
7803
          if (submode != VOIDmode)
7804
            {
7805
              gcc_assert (submode == mode);
7806
              found_equiv = true;
7807
              can_change_mode = false;
7808
            }
7809
        }
7810
    }
7811
 
7812
  if (! found_equiv)
7813
    return VOIDmode;
7814
 
7815
  /* Now INSN_COUNT is the number of instructions we found which set
7816
     CC_REG to a value equivalent to CC_SRC.  The instructions are in
7817
     INSNS.  The modes used by those instructions are in MODES.  */
7818
 
7819
  newreg = NULL_RTX;
7820
  for (i = 0; i < insn_count; ++i)
7821
    {
7822
      if (modes[i] != mode)
7823
        {
7824
          /* We need to change the mode of CC_REG in INSNS[i] and
7825
             subsequent instructions.  */
7826
          if (! newreg)
7827
            {
7828
              if (GET_MODE (cc_reg) == mode)
7829
                newreg = cc_reg;
7830
              else
7831
                newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7832
            }
7833
          cse_change_cc_mode_insns (NEXT_INSN (insns[i]), last_insns[i],
7834
                                    newreg);
7835
        }
7836
 
7837
      delete_insn (insns[i]);
7838
    }
7839
 
7840
  return mode;
7841
}
7842
 
7843
/* If we have a fixed condition code register (or two), walk through
7844
   the instructions and try to eliminate duplicate assignments.  */
7845
 
7846
static void
7847
cse_condition_code_reg (void)
7848
{
7849
  unsigned int cc_regno_1;
7850
  unsigned int cc_regno_2;
7851
  rtx cc_reg_1;
7852
  rtx cc_reg_2;
7853
  basic_block bb;
7854
 
7855
  if (! targetm.fixed_condition_code_regs (&cc_regno_1, &cc_regno_2))
7856
    return;
7857
 
7858
  cc_reg_1 = gen_rtx_REG (CCmode, cc_regno_1);
7859
  if (cc_regno_2 != INVALID_REGNUM)
7860
    cc_reg_2 = gen_rtx_REG (CCmode, cc_regno_2);
7861
  else
7862
    cc_reg_2 = NULL_RTX;
7863
 
7864
  FOR_EACH_BB (bb)
7865
    {
7866
      rtx last_insn;
7867
      rtx cc_reg;
7868
      rtx insn;
7869
      rtx cc_src_insn;
7870
      rtx cc_src;
7871
      enum machine_mode mode;
7872
      enum machine_mode orig_mode;
7873
 
7874
      /* Look for blocks which end with a conditional jump based on a
7875
         condition code register.  Then look for the instruction which
7876
         sets the condition code register.  Then look through the
7877
         successor blocks for instructions which set the condition
7878
         code register to the same value.  There are other possible
7879
         uses of the condition code register, but these are by far the
7880
         most common and the ones which we are most likely to be able
7881
         to optimize.  */
7882
 
7883
      last_insn = BB_END (bb);
7884
      if (!JUMP_P (last_insn))
7885
        continue;
7886
 
7887
      if (reg_referenced_p (cc_reg_1, PATTERN (last_insn)))
7888
        cc_reg = cc_reg_1;
7889
      else if (cc_reg_2 && reg_referenced_p (cc_reg_2, PATTERN (last_insn)))
7890
        cc_reg = cc_reg_2;
7891
      else
7892
        continue;
7893
 
7894
      cc_src_insn = NULL_RTX;
7895
      cc_src = NULL_RTX;
7896
      for (insn = PREV_INSN (last_insn);
7897
           insn && insn != PREV_INSN (BB_HEAD (bb));
7898
           insn = PREV_INSN (insn))
7899
        {
7900
          rtx set;
7901
 
7902
          if (! INSN_P (insn))
7903
            continue;
7904
          set = single_set (insn);
7905
          if (set
7906
              && REG_P (SET_DEST (set))
7907
              && REGNO (SET_DEST (set)) == REGNO (cc_reg))
7908
            {
7909
              cc_src_insn = insn;
7910
              cc_src = SET_SRC (set);
7911
              break;
7912
            }
7913
          else if (reg_set_p (cc_reg, insn))
7914
            break;
7915
        }
7916
 
7917
      if (! cc_src_insn)
7918
        continue;
7919
 
7920
      if (modified_between_p (cc_src, cc_src_insn, NEXT_INSN (last_insn)))
7921
        continue;
7922
 
7923
      /* Now CC_REG is a condition code register used for a
7924
         conditional jump at the end of the block, and CC_SRC, in
7925
         CC_SRC_INSN, is the value to which that condition code
7926
         register is set, and CC_SRC is still meaningful at the end of
7927
         the basic block.  */
7928
 
7929
      orig_mode = GET_MODE (cc_src);
7930
      mode = cse_cc_succs (bb, cc_reg, cc_src, true);
7931
      if (mode != VOIDmode)
7932
        {
7933
          gcc_assert (mode == GET_MODE (cc_src));
7934
          if (mode != orig_mode)
7935
            {
7936
              rtx newreg = gen_rtx_REG (mode, REGNO (cc_reg));
7937
 
7938
              cse_change_cc_mode_insn (cc_src_insn, newreg);
7939
 
7940
              /* Do the same in the following insns that use the
7941
                 current value of CC_REG within BB.  */
7942
              cse_change_cc_mode_insns (NEXT_INSN (cc_src_insn),
7943
                                        NEXT_INSN (last_insn),
7944
                                        newreg);
7945
            }
7946
        }
7947
    }
7948
}
7949
 
7950
 
7951
/* Perform common subexpression elimination.  Nonzero value from
7952
   `cse_main' means that jumps were simplified and some code may now
7953
   be unreachable, so do jump optimization again.  */
7954
static bool
7955
gate_handle_cse (void)
7956
{
7957
  return optimize > 0;
7958
}
7959
 
7960
static unsigned int
7961
rest_of_handle_cse (void)
7962
{
7963
  int tem;
7964
 
7965
  if (dump_file)
7966
    dump_flow_info (dump_file, dump_flags);
7967
 
7968
  reg_scan (get_insns (), max_reg_num ());
7969
 
7970
  tem = cse_main (get_insns (), max_reg_num ());
7971
  if (tem)
7972
    rebuild_jump_labels (get_insns ());
7973
  if (purge_all_dead_edges ())
7974
    delete_unreachable_blocks ();
7975
 
7976
  delete_trivially_dead_insns (get_insns (), max_reg_num ());
7977
 
7978
  /* If we are not running more CSE passes, then we are no longer
7979
     expecting CSE to be run.  But always rerun it in a cheap mode.  */
7980
  cse_not_expected = !flag_rerun_cse_after_loop && !flag_gcse;
7981
 
7982
  if (tem)
7983
    delete_dead_jumptables ();
7984
 
7985
  if (tem || optimize > 1)
7986
    cleanup_cfg (CLEANUP_EXPENSIVE);
7987
  return 0;
7988
}
7989
 
7990
struct tree_opt_pass pass_cse =
7991
{
7992
  "cse1",                               /* name */
7993
  gate_handle_cse,                      /* gate */
7994
  rest_of_handle_cse,                   /* execute */
7995
  NULL,                                 /* sub */
7996
  NULL,                                 /* next */
7997
  0,                                    /* static_pass_number */
7998
  TV_CSE,                               /* tv_id */
7999
  0,                                    /* properties_required */
8000
  0,                                    /* properties_provided */
8001
  0,                                    /* properties_destroyed */
8002
  0,                                    /* todo_flags_start */
8003
  TODO_dump_func |
8004
  TODO_ggc_collect,                     /* todo_flags_finish */
8005
  's'                                   /* letter */
8006
};
8007
 
8008
 
8009
static bool
8010
gate_handle_cse2 (void)
8011
{
8012
  return optimize > 0 && flag_rerun_cse_after_loop;
8013
}
8014
 
8015
/* Run second CSE pass after loop optimizations.  */
8016
static unsigned int
8017
rest_of_handle_cse2 (void)
8018
{
8019
  int tem;
8020
 
8021
  if (dump_file)
8022
    dump_flow_info (dump_file, dump_flags);
8023
 
8024
  tem = cse_main (get_insns (), max_reg_num ());
8025
 
8026
  /* Run a pass to eliminate duplicated assignments to condition code
8027
     registers.  We have to run this after bypass_jumps, because it
8028
     makes it harder for that pass to determine whether a jump can be
8029
     bypassed safely.  */
8030
  cse_condition_code_reg ();
8031
 
8032
  purge_all_dead_edges ();
8033
  delete_trivially_dead_insns (get_insns (), max_reg_num ());
8034
 
8035
  if (tem)
8036
    {
8037
      timevar_push (TV_JUMP);
8038
      rebuild_jump_labels (get_insns ());
8039
      delete_dead_jumptables ();
8040
      cleanup_cfg (CLEANUP_EXPENSIVE);
8041
      timevar_pop (TV_JUMP);
8042
    }
8043
  reg_scan (get_insns (), max_reg_num ());
8044
  cse_not_expected = 1;
8045
  return 0;
8046
}
8047
 
8048
 
8049
struct tree_opt_pass pass_cse2 =
8050
{
8051
  "cse2",                               /* name */
8052
  gate_handle_cse2,                     /* gate */
8053
  rest_of_handle_cse2,                  /* execute */
8054
  NULL,                                 /* sub */
8055
  NULL,                                 /* next */
8056
  0,                                    /* static_pass_number */
8057
  TV_CSE2,                              /* tv_id */
8058
  0,                                    /* properties_required */
8059
  0,                                    /* properties_provided */
8060
  0,                                    /* properties_destroyed */
8061
  0,                                    /* todo_flags_start */
8062
  TODO_dump_func |
8063
  TODO_ggc_collect,                     /* todo_flags_finish */
8064
  't'                                   /* letter */
8065
};
8066
 

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.