OpenCores
URL https://opencores.org/ocsvn/scarts/scarts/trunk

Subversion Repositories scarts

[/] [scarts/] [trunk/] [toolchain/] [scarts-gcc/] [gcc-4.1.1/] [gcc/] [combine.c] - Blame information for rev 16

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 12 jlechner
/* Optimize by combining instructions for GNU compiler.
2
   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3
   1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4
 
5
This file is part of GCC.
6
 
7
GCC is free software; you can redistribute it and/or modify it under
8
the terms of the GNU General Public License as published by the Free
9
Software Foundation; either version 2, or (at your option) any later
10
version.
11
 
12
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13
WARRANTY; without even the implied warranty of MERCHANTABILITY or
14
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15
for more details.
16
 
17
You should have received a copy of the GNU General Public License
18
along with GCC; see the file COPYING.  If not, write to the Free
19
Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20
02110-1301, USA.  */
21
 
22
/* This module is essentially the "combiner" phase of the U. of Arizona
23
   Portable Optimizer, but redone to work on our list-structured
24
   representation for RTL instead of their string representation.
25
 
26
   The LOG_LINKS of each insn identify the most recent assignment
27
   to each REG used in the insn.  It is a list of previous insns,
28
   each of which contains a SET for a REG that is used in this insn
29
   and not used or set in between.  LOG_LINKs never cross basic blocks.
30
   They were set up by the preceding pass (lifetime analysis).
31
 
32
   We try to combine each pair of insns joined by a logical link.
33
   We also try to combine triples of insns A, B and C when
34
   C has a link back to B and B has a link back to A.
35
 
36
   LOG_LINKS does not have links for use of the CC0.  They don't
37
   need to, because the insn that sets the CC0 is always immediately
38
   before the insn that tests it.  So we always regard a branch
39
   insn as having a logical link to the preceding insn.  The same is true
40
   for an insn explicitly using CC0.
41
 
42
   We check (with use_crosses_set_p) to avoid combining in such a way
43
   as to move a computation to a place where its value would be different.
44
 
45
   Combination is done by mathematically substituting the previous
46
   insn(s) values for the regs they set into the expressions in
47
   the later insns that refer to these regs.  If the result is a valid insn
48
   for our target machine, according to the machine description,
49
   we install it, delete the earlier insns, and update the data flow
50
   information (LOG_LINKS and REG_NOTES) for what we did.
51
 
52
   There are a few exceptions where the dataflow information created by
53
   flow.c aren't completely updated:
54
 
55
   - reg_live_length is not updated
56
   - reg_n_refs is not adjusted in the rare case when a register is
57
     no longer required in a computation
58
   - there are extremely rare cases (see distribute_regnotes) when a
59
     REG_DEAD note is lost
60
   - a LOG_LINKS entry that refers to an insn with multiple SETs may be
61
     removed because there is no way to know which register it was
62
     linking
63
 
64
   To simplify substitution, we combine only when the earlier insn(s)
65
   consist of only a single assignment.  To simplify updating afterward,
66
   we never combine when a subroutine call appears in the middle.
67
 
68
   Since we do not represent assignments to CC0 explicitly except when that
69
   is all an insn does, there is no LOG_LINKS entry in an insn that uses
70
   the condition code for the insn that set the condition code.
71
   Fortunately, these two insns must be consecutive.
72
   Therefore, every JUMP_INSN is taken to have an implicit logical link
73
   to the preceding insn.  This is not quite right, since non-jumps can
74
   also use the condition code; but in practice such insns would not
75
   combine anyway.  */
76
 
77
#include "config.h"
78
#include "system.h"
79
#include "coretypes.h"
80
#include "tm.h"
81
#include "rtl.h"
82
#include "tree.h"
83
#include "tm_p.h"
84
#include "flags.h"
85
#include "regs.h"
86
#include "hard-reg-set.h"
87
#include "basic-block.h"
88
#include "insn-config.h"
89
#include "function.h"
90
/* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
91
#include "expr.h"
92
#include "insn-attr.h"
93
#include "recog.h"
94
#include "real.h"
95
#include "toplev.h"
96
#include "target.h"
97
#include "optabs.h"
98
#include "insn-codes.h"
99
#include "rtlhooks-def.h"
100
/* Include output.h for dump_file.  */
101
#include "output.h"
102
#include "params.h"
103
#include "timevar.h"
104
#include "tree-pass.h"
105
 
106
/* Number of attempts to combine instructions in this function.  */
107
 
108
static int combine_attempts;
109
 
110
/* Number of attempts that got as far as substitution in this function.  */
111
 
112
static int combine_merges;
113
 
114
/* Number of instructions combined with added SETs in this function.  */
115
 
116
static int combine_extras;
117
 
118
/* Number of instructions combined in this function.  */
119
 
120
static int combine_successes;
121
 
122
/* Totals over entire compilation.  */
123
 
124
static int total_attempts, total_merges, total_extras, total_successes;
125
 
126
 
127
/* Vector mapping INSN_UIDs to cuids.
128
   The cuids are like uids but increase monotonically always.
129
   Combine always uses cuids so that it can compare them.
130
   But actually renumbering the uids, which we used to do,
131
   proves to be a bad idea because it makes it hard to compare
132
   the dumps produced by earlier passes with those from later passes.  */
133
 
134
static int *uid_cuid;
135
static int max_uid_cuid;
136
 
137
/* Get the cuid of an insn.  */
138
 
139
#define INSN_CUID(INSN) \
140
(INSN_UID (INSN) > max_uid_cuid ? insn_cuid (INSN) : uid_cuid[INSN_UID (INSN)])
141
 
142
/* In case BITS_PER_WORD == HOST_BITS_PER_WIDE_INT, shifting by
143
   BITS_PER_WORD would invoke undefined behavior.  Work around it.  */
144
 
145
#define UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD(val) \
146
  (((unsigned HOST_WIDE_INT) (val) << (BITS_PER_WORD - 1)) << 1)
147
 
148
/* Maximum register number, which is the size of the tables below.  */
149
 
150
static unsigned int combine_max_regno;
151
 
152
struct reg_stat {
153
  /* Record last point of death of (hard or pseudo) register n.  */
154
  rtx                           last_death;
155
 
156
  /* Record last point of modification of (hard or pseudo) register n.  */
157
  rtx                           last_set;
158
 
159
  /* The next group of fields allows the recording of the last value assigned
160
     to (hard or pseudo) register n.  We use this information to see if an
161
     operation being processed is redundant given a prior operation performed
162
     on the register.  For example, an `and' with a constant is redundant if
163
     all the zero bits are already known to be turned off.
164
 
165
     We use an approach similar to that used by cse, but change it in the
166
     following ways:
167
 
168
     (1) We do not want to reinitialize at each label.
169
     (2) It is useful, but not critical, to know the actual value assigned
170
         to a register.  Often just its form is helpful.
171
 
172
     Therefore, we maintain the following fields:
173
 
174
     last_set_value             the last value assigned
175
     last_set_label             records the value of label_tick when the
176
                                register was assigned
177
     last_set_table_tick        records the value of label_tick when a
178
                                value using the register is assigned
179
     last_set_invalid           set to nonzero when it is not valid
180
                                to use the value of this register in some
181
                                register's value
182
 
183
     To understand the usage of these tables, it is important to understand
184
     the distinction between the value in last_set_value being valid and
185
     the register being validly contained in some other expression in the
186
     table.
187
 
188
     (The next two parameters are out of date).
189
 
190
     reg_stat[i].last_set_value is valid if it is nonzero, and either
191
     reg_n_sets[i] is 1 or reg_stat[i].last_set_label == label_tick.
192
 
193
     Register I may validly appear in any expression returned for the value
194
     of another register if reg_n_sets[i] is 1.  It may also appear in the
195
     value for register J if reg_stat[j].last_set_invalid is zero, or
196
     reg_stat[i].last_set_label < reg_stat[j].last_set_label.
197
 
198
     If an expression is found in the table containing a register which may
199
     not validly appear in an expression, the register is replaced by
200
     something that won't match, (clobber (const_int 0)).  */
201
 
202
  /* Record last value assigned to (hard or pseudo) register n.  */
203
 
204
  rtx                           last_set_value;
205
 
206
  /* Record the value of label_tick when an expression involving register n
207
     is placed in last_set_value.  */
208
 
209
  int                           last_set_table_tick;
210
 
211
  /* Record the value of label_tick when the value for register n is placed in
212
     last_set_value.  */
213
 
214
  int                           last_set_label;
215
 
216
  /* These fields are maintained in parallel with last_set_value and are
217
     used to store the mode in which the register was last set, the bits
218
     that were known to be zero when it was last set, and the number of
219
     sign bits copies it was known to have when it was last set.  */
220
 
221
  unsigned HOST_WIDE_INT        last_set_nonzero_bits;
222
  char                          last_set_sign_bit_copies;
223
  ENUM_BITFIELD(machine_mode)   last_set_mode : 8;
224
 
225
  /* Set nonzero if references to register n in expressions should not be
226
     used.  last_set_invalid is set nonzero when this register is being
227
     assigned to and last_set_table_tick == label_tick.  */
228
 
229
  char                          last_set_invalid;
230
 
231
  /* Some registers that are set more than once and used in more than one
232
     basic block are nevertheless always set in similar ways.  For example,
233
     a QImode register may be loaded from memory in two places on a machine
234
     where byte loads zero extend.
235
 
236
     We record in the following fields if a register has some leading bits
237
     that are always equal to the sign bit, and what we know about the
238
     nonzero bits of a register, specifically which bits are known to be
239
     zero.
240
 
241
     If an entry is zero, it means that we don't know anything special.  */
242
 
243
  unsigned char                 sign_bit_copies;
244
 
245
  unsigned HOST_WIDE_INT        nonzero_bits;
246
};
247
 
248
static struct reg_stat *reg_stat;
249
 
250
/* Record the cuid of the last insn that invalidated memory
251
   (anything that writes memory, and subroutine calls, but not pushes).  */
252
 
253
static int mem_last_set;
254
 
255
/* Record the cuid of the last CALL_INSN
256
   so we can tell whether a potential combination crosses any calls.  */
257
 
258
static int last_call_cuid;
259
 
260
/* When `subst' is called, this is the insn that is being modified
261
   (by combining in a previous insn).  The PATTERN of this insn
262
   is still the old pattern partially modified and it should not be
263
   looked at, but this may be used to examine the successors of the insn
264
   to judge whether a simplification is valid.  */
265
 
266
static rtx subst_insn;
267
 
268
/* This is the lowest CUID that `subst' is currently dealing with.
269
   get_last_value will not return a value if the register was set at or
270
   after this CUID.  If not for this mechanism, we could get confused if
271
   I2 or I1 in try_combine were an insn that used the old value of a register
272
   to obtain a new value.  In that case, we might erroneously get the
273
   new value of the register when we wanted the old one.  */
274
 
275
static int subst_low_cuid;
276
 
277
/* This contains any hard registers that are used in newpat; reg_dead_at_p
278
   must consider all these registers to be always live.  */
279
 
280
static HARD_REG_SET newpat_used_regs;
281
 
282
/* This is an insn to which a LOG_LINKS entry has been added.  If this
283
   insn is the earlier than I2 or I3, combine should rescan starting at
284
   that location.  */
285
 
286
static rtx added_links_insn;
287
 
288
/* Basic block in which we are performing combines.  */
289
static basic_block this_basic_block;
290
 
291
/* A bitmap indicating which blocks had registers go dead at entry.
292
   After combine, we'll need to re-do global life analysis with
293
   those blocks as starting points.  */
294
static sbitmap refresh_blocks;
295
 
296
/* The following array records the insn_rtx_cost for every insn
297
   in the instruction stream.  */
298
 
299
static int *uid_insn_cost;
300
 
301
/* Length of the currently allocated uid_insn_cost array.  */
302
 
303
static int last_insn_cost;
304
 
305
/* Incremented for each label.  */
306
 
307
static int label_tick;
308
 
309
/* Mode used to compute significance in reg_stat[].nonzero_bits.  It is the
310
   largest integer mode that can fit in HOST_BITS_PER_WIDE_INT.  */
311
 
312
static enum machine_mode nonzero_bits_mode;
313
 
314
/* Nonzero when reg_stat[].nonzero_bits and reg_stat[].sign_bit_copies can
315
   be safely used.  It is zero while computing them and after combine has
316
   completed.  This former test prevents propagating values based on
317
   previously set values, which can be incorrect if a variable is modified
318
   in a loop.  */
319
 
320
static int nonzero_sign_valid;
321
 
322
 
323
/* Record one modification to rtl structure
324
   to be undone by storing old_contents into *where.
325
   is_int is 1 if the contents are an int.  */
326
 
327
struct undo
328
{
329
  struct undo *next;
330
  int is_int;
331
  union {rtx r; int i;} old_contents;
332
  union {rtx *r; int *i;} where;
333
};
334
 
335
/* Record a bunch of changes to be undone, up to MAX_UNDO of them.
336
   num_undo says how many are currently recorded.
337
 
338
   other_insn is nonzero if we have modified some other insn in the process
339
   of working on subst_insn.  It must be verified too.  */
340
 
341
struct undobuf
342
{
343
  struct undo *undos;
344
  struct undo *frees;
345
  rtx other_insn;
346
};
347
 
348
static struct undobuf undobuf;
349
 
350
/* Number of times the pseudo being substituted for
351
   was found and replaced.  */
352
 
353
static int n_occurrences;
354
 
355
static rtx reg_nonzero_bits_for_combine (rtx, enum machine_mode, rtx,
356
                                         enum machine_mode,
357
                                         unsigned HOST_WIDE_INT,
358
                                         unsigned HOST_WIDE_INT *);
359
static rtx reg_num_sign_bit_copies_for_combine (rtx, enum machine_mode, rtx,
360
                                                enum machine_mode,
361
                                                unsigned int, unsigned int *);
362
static void do_SUBST (rtx *, rtx);
363
static void do_SUBST_INT (int *, int);
364
static void init_reg_last (void);
365
static void setup_incoming_promotions (void);
366
static void set_nonzero_bits_and_sign_copies (rtx, rtx, void *);
367
static int cant_combine_insn_p (rtx);
368
static int can_combine_p (rtx, rtx, rtx, rtx, rtx *, rtx *);
369
static int combinable_i3pat (rtx, rtx *, rtx, rtx, int, rtx *);
370
static int contains_muldiv (rtx);
371
static rtx try_combine (rtx, rtx, rtx, int *);
372
static void undo_all (void);
373
static void undo_commit (void);
374
static rtx *find_split_point (rtx *, rtx);
375
static rtx subst (rtx, rtx, rtx, int, int);
376
static rtx combine_simplify_rtx (rtx, enum machine_mode, int);
377
static rtx simplify_if_then_else (rtx);
378
static rtx simplify_set (rtx);
379
static rtx simplify_logical (rtx);
380
static rtx expand_compound_operation (rtx);
381
static rtx expand_field_assignment (rtx);
382
static rtx make_extraction (enum machine_mode, rtx, HOST_WIDE_INT,
383
                            rtx, unsigned HOST_WIDE_INT, int, int, int);
384
static rtx extract_left_shift (rtx, int);
385
static rtx make_compound_operation (rtx, enum rtx_code);
386
static int get_pos_from_mask (unsigned HOST_WIDE_INT,
387
                              unsigned HOST_WIDE_INT *);
388
static rtx force_to_mode (rtx, enum machine_mode,
389
                          unsigned HOST_WIDE_INT, rtx, int);
390
static rtx if_then_else_cond (rtx, rtx *, rtx *);
391
static rtx known_cond (rtx, enum rtx_code, rtx, rtx);
392
static int rtx_equal_for_field_assignment_p (rtx, rtx);
393
static rtx make_field_assignment (rtx);
394
static rtx apply_distributive_law (rtx);
395
static rtx distribute_and_simplify_rtx (rtx, int);
396
static rtx simplify_and_const_int (rtx, enum machine_mode, rtx,
397
                                   unsigned HOST_WIDE_INT);
398
static int merge_outer_ops (enum rtx_code *, HOST_WIDE_INT *, enum rtx_code,
399
                            HOST_WIDE_INT, enum machine_mode, int *);
400
static rtx simplify_shift_const (rtx, enum rtx_code, enum machine_mode, rtx,
401
                                 int);
402
static int recog_for_combine (rtx *, rtx, rtx *);
403
static rtx gen_lowpart_for_combine (enum machine_mode, rtx);
404
static enum rtx_code simplify_comparison (enum rtx_code, rtx *, rtx *);
405
static void update_table_tick (rtx);
406
static void record_value_for_reg (rtx, rtx, rtx);
407
static void check_promoted_subreg (rtx, rtx);
408
static void record_dead_and_set_regs_1 (rtx, rtx, void *);
409
static void record_dead_and_set_regs (rtx);
410
static int get_last_value_validate (rtx *, rtx, int, int);
411
static rtx get_last_value (rtx);
412
static int use_crosses_set_p (rtx, int);
413
static void reg_dead_at_p_1 (rtx, rtx, void *);
414
static int reg_dead_at_p (rtx, rtx);
415
static void move_deaths (rtx, rtx, int, rtx, rtx *);
416
static int reg_bitfield_target_p (rtx, rtx);
417
static void distribute_notes (rtx, rtx, rtx, rtx, rtx, rtx);
418
static void distribute_links (rtx);
419
static void mark_used_regs_combine (rtx);
420
static int insn_cuid (rtx);
421
static void record_promoted_value (rtx, rtx);
422
static int unmentioned_reg_p_1 (rtx *, void *);
423
static bool unmentioned_reg_p (rtx, rtx);
424
 
425
 
426
/* It is not safe to use ordinary gen_lowpart in combine.
427
   See comments in gen_lowpart_for_combine.  */
428
#undef RTL_HOOKS_GEN_LOWPART
429
#define RTL_HOOKS_GEN_LOWPART              gen_lowpart_for_combine
430
 
431
/* Our implementation of gen_lowpart never emits a new pseudo.  */
432
#undef RTL_HOOKS_GEN_LOWPART_NO_EMIT
433
#define RTL_HOOKS_GEN_LOWPART_NO_EMIT      gen_lowpart_for_combine
434
 
435
#undef RTL_HOOKS_REG_NONZERO_REG_BITS
436
#define RTL_HOOKS_REG_NONZERO_REG_BITS     reg_nonzero_bits_for_combine
437
 
438
#undef RTL_HOOKS_REG_NUM_SIGN_BIT_COPIES
439
#define RTL_HOOKS_REG_NUM_SIGN_BIT_COPIES  reg_num_sign_bit_copies_for_combine
440
 
441
static const struct rtl_hooks combine_rtl_hooks = RTL_HOOKS_INITIALIZER;
442
 
443
 
444
/* Substitute NEWVAL, an rtx expression, into INTO, a place in some
445
   insn.  The substitution can be undone by undo_all.  If INTO is already
446
   set to NEWVAL, do not record this change.  Because computing NEWVAL might
447
   also call SUBST, we have to compute it before we put anything into
448
   the undo table.  */
449
 
450
static void
451
do_SUBST (rtx *into, rtx newval)
452
{
453
  struct undo *buf;
454
  rtx oldval = *into;
455
 
456
  if (oldval == newval)
457
    return;
458
 
459
  /* We'd like to catch as many invalid transformations here as
460
     possible.  Unfortunately, there are way too many mode changes
461
     that are perfectly valid, so we'd waste too much effort for
462
     little gain doing the checks here.  Focus on catching invalid
463
     transformations involving integer constants.  */
464
  if (GET_MODE_CLASS (GET_MODE (oldval)) == MODE_INT
465
      && GET_CODE (newval) == CONST_INT)
466
    {
467
      /* Sanity check that we're replacing oldval with a CONST_INT
468
         that is a valid sign-extension for the original mode.  */
469
      gcc_assert (INTVAL (newval)
470
                  == trunc_int_for_mode (INTVAL (newval), GET_MODE (oldval)));
471
 
472
      /* Replacing the operand of a SUBREG or a ZERO_EXTEND with a
473
         CONST_INT is not valid, because after the replacement, the
474
         original mode would be gone.  Unfortunately, we can't tell
475
         when do_SUBST is called to replace the operand thereof, so we
476
         perform this test on oldval instead, checking whether an
477
         invalid replacement took place before we got here.  */
478
      gcc_assert (!(GET_CODE (oldval) == SUBREG
479
                    && GET_CODE (SUBREG_REG (oldval)) == CONST_INT));
480
      gcc_assert (!(GET_CODE (oldval) == ZERO_EXTEND
481
                    && GET_CODE (XEXP (oldval, 0)) == CONST_INT));
482
    }
483
 
484
  if (undobuf.frees)
485
    buf = undobuf.frees, undobuf.frees = buf->next;
486
  else
487
    buf = xmalloc (sizeof (struct undo));
488
 
489
  buf->is_int = 0;
490
  buf->where.r = into;
491
  buf->old_contents.r = oldval;
492
  *into = newval;
493
 
494
  buf->next = undobuf.undos, undobuf.undos = buf;
495
}
496
 
497
#define SUBST(INTO, NEWVAL)     do_SUBST(&(INTO), (NEWVAL))
498
 
499
/* Similar to SUBST, but NEWVAL is an int expression.  Note that substitution
500
   for the value of a HOST_WIDE_INT value (including CONST_INT) is
501
   not safe.  */
502
 
503
static void
504
do_SUBST_INT (int *into, int newval)
505
{
506
  struct undo *buf;
507
  int oldval = *into;
508
 
509
  if (oldval == newval)
510
    return;
511
 
512
  if (undobuf.frees)
513
    buf = undobuf.frees, undobuf.frees = buf->next;
514
  else
515
    buf = xmalloc (sizeof (struct undo));
516
 
517
  buf->is_int = 1;
518
  buf->where.i = into;
519
  buf->old_contents.i = oldval;
520
  *into = newval;
521
 
522
  buf->next = undobuf.undos, undobuf.undos = buf;
523
}
524
 
525
#define SUBST_INT(INTO, NEWVAL)  do_SUBST_INT(&(INTO), (NEWVAL))
526
 
527
/* Subroutine of try_combine.  Determine whether the combine replacement
528
   patterns NEWPAT and NEWI2PAT are cheaper according to insn_rtx_cost
529
   that the original instruction sequence I1, I2 and I3.  Note that I1
530
   and/or NEWI2PAT may be NULL_RTX.  This function returns false, if the
531
   costs of all instructions can be estimated, and the replacements are
532
   more expensive than the original sequence.  */
533
 
534
static bool
535
combine_validate_cost (rtx i1, rtx i2, rtx i3, rtx newpat, rtx newi2pat)
536
{
537
  int i1_cost, i2_cost, i3_cost;
538
  int new_i2_cost, new_i3_cost;
539
  int old_cost, new_cost;
540
 
541
  /* Lookup the original insn_rtx_costs.  */
542
  i2_cost = INSN_UID (i2) <= last_insn_cost
543
            ? uid_insn_cost[INSN_UID (i2)] : 0;
544
  i3_cost = INSN_UID (i3) <= last_insn_cost
545
            ? uid_insn_cost[INSN_UID (i3)] : 0;
546
 
547
  if (i1)
548
    {
549
      i1_cost = INSN_UID (i1) <= last_insn_cost
550
                ? uid_insn_cost[INSN_UID (i1)] : 0;
551
      old_cost = (i1_cost > 0 && i2_cost > 0 && i3_cost > 0)
552
                 ? i1_cost + i2_cost + i3_cost : 0;
553
    }
554
  else
555
    {
556
      old_cost = (i2_cost > 0 && i3_cost > 0) ? i2_cost + i3_cost : 0;
557
      i1_cost = 0;
558
    }
559
 
560
  /* Calculate the replacement insn_rtx_costs.  */
561
  new_i3_cost = insn_rtx_cost (newpat);
562
  if (newi2pat)
563
    {
564
      new_i2_cost = insn_rtx_cost (newi2pat);
565
      new_cost = (new_i2_cost > 0 && new_i3_cost > 0)
566
                 ? new_i2_cost + new_i3_cost : 0;
567
    }
568
  else
569
    {
570
      new_cost = new_i3_cost;
571
      new_i2_cost = 0;
572
    }
573
 
574
  if (undobuf.other_insn)
575
    {
576
      int old_other_cost, new_other_cost;
577
 
578
      old_other_cost = (INSN_UID (undobuf.other_insn) <= last_insn_cost
579
                        ? uid_insn_cost[INSN_UID (undobuf.other_insn)] : 0);
580
      new_other_cost = insn_rtx_cost (PATTERN (undobuf.other_insn));
581
      if (old_other_cost > 0 && new_other_cost > 0)
582
        {
583
          old_cost += old_other_cost;
584
          new_cost += new_other_cost;
585
        }
586
      else
587
        old_cost = 0;
588
    }
589
 
590
  /* Disallow this recombination if both new_cost and old_cost are
591
     greater than zero, and new_cost is greater than old cost.  */
592
  if (old_cost > 0
593
      && new_cost > old_cost)
594
    {
595
      if (dump_file)
596
        {
597
          if (i1)
598
            {
599
              fprintf (dump_file,
600
                       "rejecting combination of insns %d, %d and %d\n",
601
                       INSN_UID (i1), INSN_UID (i2), INSN_UID (i3));
602
              fprintf (dump_file, "original costs %d + %d + %d = %d\n",
603
                       i1_cost, i2_cost, i3_cost, old_cost);
604
            }
605
          else
606
            {
607
              fprintf (dump_file,
608
                       "rejecting combination of insns %d and %d\n",
609
                       INSN_UID (i2), INSN_UID (i3));
610
              fprintf (dump_file, "original costs %d + %d = %d\n",
611
                       i2_cost, i3_cost, old_cost);
612
            }
613
 
614
          if (newi2pat)
615
            {
616
              fprintf (dump_file, "replacement costs %d + %d = %d\n",
617
                       new_i2_cost, new_i3_cost, new_cost);
618
            }
619
          else
620
            fprintf (dump_file, "replacement cost %d\n", new_cost);
621
        }
622
 
623
      return false;
624
    }
625
 
626
  /* Update the uid_insn_cost array with the replacement costs.  */
627
  uid_insn_cost[INSN_UID (i2)] = new_i2_cost;
628
  uid_insn_cost[INSN_UID (i3)] = new_i3_cost;
629
  if (i1)
630
    uid_insn_cost[INSN_UID (i1)] = 0;
631
 
632
  return true;
633
}
634
 
635
/* Main entry point for combiner.  F is the first insn of the function.
636
   NREGS is the first unused pseudo-reg number.
637
 
638
   Return nonzero if the combiner has turned an indirect jump
639
   instruction into a direct jump.  */
640
int
641
combine_instructions (rtx f, unsigned int nregs)
642
{
643
  rtx insn, next;
644
#ifdef HAVE_cc0
645
  rtx prev;
646
#endif
647
  int i;
648
  unsigned int j = 0;
649
  rtx links, nextlinks;
650
  sbitmap_iterator sbi;
651
 
652
  int new_direct_jump_p = 0;
653
 
654
  combine_attempts = 0;
655
  combine_merges = 0;
656
  combine_extras = 0;
657
  combine_successes = 0;
658
 
659
  combine_max_regno = nregs;
660
 
661
  rtl_hooks = combine_rtl_hooks;
662
 
663
  reg_stat = xcalloc (nregs, sizeof (struct reg_stat));
664
 
665
  init_recog_no_volatile ();
666
 
667
  /* Compute maximum uid value so uid_cuid can be allocated.  */
668
 
669
  for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
670
    if (INSN_UID (insn) > i)
671
      i = INSN_UID (insn);
672
 
673
  uid_cuid = xmalloc ((i + 1) * sizeof (int));
674
  max_uid_cuid = i;
675
 
676
  nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
677
 
678
  /* Don't use reg_stat[].nonzero_bits when computing it.  This can cause
679
     problems when, for example, we have j <<= 1 in a loop.  */
680
 
681
  nonzero_sign_valid = 0;
682
 
683
  /* Compute the mapping from uids to cuids.
684
     Cuids are numbers assigned to insns, like uids,
685
     except that cuids increase monotonically through the code.
686
 
687
     Scan all SETs and see if we can deduce anything about what
688
     bits are known to be zero for some registers and how many copies
689
     of the sign bit are known to exist for those registers.
690
 
691
     Also set any known values so that we can use it while searching
692
     for what bits are known to be set.  */
693
 
694
  label_tick = 1;
695
 
696
  setup_incoming_promotions ();
697
 
698
  refresh_blocks = sbitmap_alloc (last_basic_block);
699
  sbitmap_zero (refresh_blocks);
700
 
701
  /* Allocate array of current insn_rtx_costs.  */
702
  uid_insn_cost = xcalloc (max_uid_cuid + 1, sizeof (int));
703
  last_insn_cost = max_uid_cuid;
704
 
705
  for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
706
    {
707
      uid_cuid[INSN_UID (insn)] = ++i;
708
      subst_low_cuid = i;
709
      subst_insn = insn;
710
 
711
      if (INSN_P (insn))
712
        {
713
          note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies,
714
                       NULL);
715
          record_dead_and_set_regs (insn);
716
 
717
#ifdef AUTO_INC_DEC
718
          for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
719
            if (REG_NOTE_KIND (links) == REG_INC)
720
              set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX,
721
                                                NULL);
722
#endif
723
 
724
          /* Record the current insn_rtx_cost of this instruction.  */
725
          if (NONJUMP_INSN_P (insn))
726
            uid_insn_cost[INSN_UID (insn)] = insn_rtx_cost (PATTERN (insn));
727
          if (dump_file)
728
            fprintf(dump_file, "insn_cost %d: %d\n",
729
                    INSN_UID (insn), uid_insn_cost[INSN_UID (insn)]);
730
        }
731
 
732
      if (LABEL_P (insn))
733
        label_tick++;
734
    }
735
 
736
  nonzero_sign_valid = 1;
737
 
738
  /* Now scan all the insns in forward order.  */
739
 
740
  label_tick = 1;
741
  last_call_cuid = 0;
742
  mem_last_set = 0;
743
  init_reg_last ();
744
  setup_incoming_promotions ();
745
 
746
  FOR_EACH_BB (this_basic_block)
747
    {
748
      for (insn = BB_HEAD (this_basic_block);
749
           insn != NEXT_INSN (BB_END (this_basic_block));
750
           insn = next ? next : NEXT_INSN (insn))
751
        {
752
          next = 0;
753
 
754
          if (LABEL_P (insn))
755
            label_tick++;
756
 
757
          else if (INSN_P (insn))
758
            {
759
              /* See if we know about function return values before this
760
                 insn based upon SUBREG flags.  */
761
              check_promoted_subreg (insn, PATTERN (insn));
762
 
763
              /* Try this insn with each insn it links back to.  */
764
 
765
              for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
766
                if ((next = try_combine (insn, XEXP (links, 0),
767
                                         NULL_RTX, &new_direct_jump_p)) != 0)
768
                  goto retry;
769
 
770
              /* Try each sequence of three linked insns ending with this one.  */
771
 
772
              for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
773
                {
774
                  rtx link = XEXP (links, 0);
775
 
776
                  /* If the linked insn has been replaced by a note, then there
777
                     is no point in pursuing this chain any further.  */
778
                  if (NOTE_P (link))
779
                    continue;
780
 
781
                  for (nextlinks = LOG_LINKS (link);
782
                       nextlinks;
783
                       nextlinks = XEXP (nextlinks, 1))
784
                    if ((next = try_combine (insn, link,
785
                                             XEXP (nextlinks, 0),
786
                                             &new_direct_jump_p)) != 0)
787
                      goto retry;
788
                }
789
 
790
#ifdef HAVE_cc0
791
              /* Try to combine a jump insn that uses CC0
792
                 with a preceding insn that sets CC0, and maybe with its
793
                 logical predecessor as well.
794
                 This is how we make decrement-and-branch insns.
795
                 We need this special code because data flow connections
796
                 via CC0 do not get entered in LOG_LINKS.  */
797
 
798
              if (JUMP_P (insn)
799
                  && (prev = prev_nonnote_insn (insn)) != 0
800
                  && NONJUMP_INSN_P (prev)
801
                  && sets_cc0_p (PATTERN (prev)))
802
                {
803
                  if ((next = try_combine (insn, prev,
804
                                           NULL_RTX, &new_direct_jump_p)) != 0)
805
                    goto retry;
806
 
807
                  for (nextlinks = LOG_LINKS (prev); nextlinks;
808
                       nextlinks = XEXP (nextlinks, 1))
809
                    if ((next = try_combine (insn, prev,
810
                                             XEXP (nextlinks, 0),
811
                                             &new_direct_jump_p)) != 0)
812
                      goto retry;
813
                }
814
 
815
              /* Do the same for an insn that explicitly references CC0.  */
816
              if (NONJUMP_INSN_P (insn)
817
                  && (prev = prev_nonnote_insn (insn)) != 0
818
                  && NONJUMP_INSN_P (prev)
819
                  && sets_cc0_p (PATTERN (prev))
820
                  && GET_CODE (PATTERN (insn)) == SET
821
                  && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
822
                {
823
                  if ((next = try_combine (insn, prev,
824
                                           NULL_RTX, &new_direct_jump_p)) != 0)
825
                    goto retry;
826
 
827
                  for (nextlinks = LOG_LINKS (prev); nextlinks;
828
                       nextlinks = XEXP (nextlinks, 1))
829
                    if ((next = try_combine (insn, prev,
830
                                             XEXP (nextlinks, 0),
831
                                             &new_direct_jump_p)) != 0)
832
                      goto retry;
833
                }
834
 
835
              /* Finally, see if any of the insns that this insn links to
836
                 explicitly references CC0.  If so, try this insn, that insn,
837
                 and its predecessor if it sets CC0.  */
838
              for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
839
                if (NONJUMP_INSN_P (XEXP (links, 0))
840
                    && GET_CODE (PATTERN (XEXP (links, 0))) == SET
841
                    && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
842
                    && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
843
                    && NONJUMP_INSN_P (prev)
844
                    && sets_cc0_p (PATTERN (prev))
845
                    && (next = try_combine (insn, XEXP (links, 0),
846
                                            prev, &new_direct_jump_p)) != 0)
847
                  goto retry;
848
#endif
849
 
850
              /* Try combining an insn with two different insns whose results it
851
                 uses.  */
852
              for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
853
                for (nextlinks = XEXP (links, 1); nextlinks;
854
                     nextlinks = XEXP (nextlinks, 1))
855
                  if ((next = try_combine (insn, XEXP (links, 0),
856
                                           XEXP (nextlinks, 0),
857
                                           &new_direct_jump_p)) != 0)
858
                    goto retry;
859
 
860
              /* Try this insn with each REG_EQUAL note it links back to.  */
861
              for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
862
                {
863
                  rtx set, note;
864
                  rtx temp = XEXP (links, 0);
865
                  if ((set = single_set (temp)) != 0
866
                      && (note = find_reg_equal_equiv_note (temp)) != 0
867
                      && (note = XEXP (note, 0), GET_CODE (note)) != EXPR_LIST
868
                      /* Avoid using a register that may already been marked
869
                         dead by an earlier instruction.  */
870
                      && ! unmentioned_reg_p (note, SET_SRC (set))
871
                      && (GET_MODE (note) == VOIDmode
872
                          ? SCALAR_INT_MODE_P (GET_MODE (SET_DEST (set)))
873
                          : GET_MODE (SET_DEST (set)) == GET_MODE (note)))
874
                    {
875
                      /* Temporarily replace the set's source with the
876
                         contents of the REG_EQUAL note.  The insn will
877
                         be deleted or recognized by try_combine.  */
878
                      rtx orig = SET_SRC (set);
879
                      SET_SRC (set) = note;
880
                      next = try_combine (insn, temp, NULL_RTX,
881
                                          &new_direct_jump_p);
882
                      if (next)
883
                        goto retry;
884
                      SET_SRC (set) = orig;
885
                    }
886
                }
887
 
888
              if (!NOTE_P (insn))
889
                record_dead_and_set_regs (insn);
890
 
891
            retry:
892
              ;
893
            }
894
        }
895
    }
896
  clear_bb_flags ();
897
 
898
  EXECUTE_IF_SET_IN_SBITMAP (refresh_blocks, 0, j, sbi)
899
    BASIC_BLOCK (j)->flags |= BB_DIRTY;
900
  new_direct_jump_p |= purge_all_dead_edges ();
901
  delete_noop_moves ();
902
 
903
  update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
904
                                    PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
905
                                    | PROP_KILL_DEAD_CODE);
906
 
907
  /* Clean up.  */
908
  sbitmap_free (refresh_blocks);
909
  free (uid_insn_cost);
910
  free (reg_stat);
911
  free (uid_cuid);
912
 
913
  {
914
    struct undo *undo, *next;
915
    for (undo = undobuf.frees; undo; undo = next)
916
      {
917
        next = undo->next;
918
        free (undo);
919
      }
920
    undobuf.frees = 0;
921
  }
922
 
923
  total_attempts += combine_attempts;
924
  total_merges += combine_merges;
925
  total_extras += combine_extras;
926
  total_successes += combine_successes;
927
 
928
  nonzero_sign_valid = 0;
929
  rtl_hooks = general_rtl_hooks;
930
 
931
  /* Make recognizer allow volatile MEMs again.  */
932
  init_recog ();
933
 
934
  return new_direct_jump_p;
935
}
936
 
937
/* Wipe the last_xxx fields of reg_stat in preparation for another pass.  */
938
 
939
static void
940
init_reg_last (void)
941
{
942
  unsigned int i;
943
  for (i = 0; i < combine_max_regno; i++)
944
    memset (reg_stat + i, 0, offsetof (struct reg_stat, sign_bit_copies));
945
}
946
 
947
/* Set up any promoted values for incoming argument registers.  */
948
 
949
static void
950
setup_incoming_promotions (void)
951
{
952
  unsigned int regno;
953
  rtx reg;
954
  enum machine_mode mode;
955
  int unsignedp;
956
  rtx first = get_insns ();
957
 
958
  if (targetm.calls.promote_function_args (TREE_TYPE (cfun->decl)))
959
    {
960
      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
961
        /* Check whether this register can hold an incoming pointer
962
           argument.  FUNCTION_ARG_REGNO_P tests outgoing register
963
           numbers, so translate if necessary due to register windows.  */
964
        if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (regno))
965
            && (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
966
          {
967
            record_value_for_reg
968
              (reg, first, gen_rtx_fmt_e ((unsignedp ? ZERO_EXTEND
969
                                           : SIGN_EXTEND),
970
                                          GET_MODE (reg),
971
                                          gen_rtx_CLOBBER (mode, const0_rtx)));
972
          }
973
    }
974
}
975
 
976
/* Called via note_stores.  If X is a pseudo that is narrower than
977
   HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
978
 
979
   If we are setting only a portion of X and we can't figure out what
980
   portion, assume all bits will be used since we don't know what will
981
   be happening.
982
 
983
   Similarly, set how many bits of X are known to be copies of the sign bit
984
   at all locations in the function.  This is the smallest number implied
985
   by any set of X.  */
986
 
987
static void
988
set_nonzero_bits_and_sign_copies (rtx x, rtx set,
989
                                  void *data ATTRIBUTE_UNUSED)
990
{
991
  unsigned int num;
992
 
993
  if (REG_P (x)
994
      && REGNO (x) >= FIRST_PSEUDO_REGISTER
995
      /* If this register is undefined at the start of the file, we can't
996
         say what its contents were.  */
997
      && ! REGNO_REG_SET_P
998
         (ENTRY_BLOCK_PTR->next_bb->il.rtl->global_live_at_start, REGNO (x))
999
      && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
1000
    {
1001
      if (set == 0 || GET_CODE (set) == CLOBBER)
1002
        {
1003
          reg_stat[REGNO (x)].nonzero_bits = GET_MODE_MASK (GET_MODE (x));
1004
          reg_stat[REGNO (x)].sign_bit_copies = 1;
1005
          return;
1006
        }
1007
 
1008
      /* If this is a complex assignment, see if we can convert it into a
1009
         simple assignment.  */
1010
      set = expand_field_assignment (set);
1011
 
1012
      /* If this is a simple assignment, or we have a paradoxical SUBREG,
1013
         set what we know about X.  */
1014
 
1015
      if (SET_DEST (set) == x
1016
          || (GET_CODE (SET_DEST (set)) == SUBREG
1017
              && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
1018
                  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
1019
              && SUBREG_REG (SET_DEST (set)) == x))
1020
        {
1021
          rtx src = SET_SRC (set);
1022
 
1023
#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
1024
          /* If X is narrower than a word and SRC is a non-negative
1025
             constant that would appear negative in the mode of X,
1026
             sign-extend it for use in reg_stat[].nonzero_bits because some
1027
             machines (maybe most) will actually do the sign-extension
1028
             and this is the conservative approach.
1029
 
1030
             ??? For 2.5, try to tighten up the MD files in this regard
1031
             instead of this kludge.  */
1032
 
1033
          if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
1034
              && GET_CODE (src) == CONST_INT
1035
              && INTVAL (src) > 0
1036
              && 0 != (INTVAL (src)
1037
                       & ((HOST_WIDE_INT) 1
1038
                          << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
1039
            src = GEN_INT (INTVAL (src)
1040
                           | ((HOST_WIDE_INT) (-1)
1041
                              << GET_MODE_BITSIZE (GET_MODE (x))));
1042
#endif
1043
 
1044
          /* Don't call nonzero_bits if it cannot change anything.  */
1045
          if (reg_stat[REGNO (x)].nonzero_bits != ~(unsigned HOST_WIDE_INT) 0)
1046
            reg_stat[REGNO (x)].nonzero_bits
1047
              |= nonzero_bits (src, nonzero_bits_mode);
1048
          num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
1049
          if (reg_stat[REGNO (x)].sign_bit_copies == 0
1050
              || reg_stat[REGNO (x)].sign_bit_copies > num)
1051
            reg_stat[REGNO (x)].sign_bit_copies = num;
1052
        }
1053
      else
1054
        {
1055
          reg_stat[REGNO (x)].nonzero_bits = GET_MODE_MASK (GET_MODE (x));
1056
          reg_stat[REGNO (x)].sign_bit_copies = 1;
1057
        }
1058
    }
1059
}
1060
 
1061
/* See if INSN can be combined into I3.  PRED and SUCC are optionally
1062
   insns that were previously combined into I3 or that will be combined
1063
   into the merger of INSN and I3.
1064
 
1065
   Return 0 if the combination is not allowed for any reason.
1066
 
1067
   If the combination is allowed, *PDEST will be set to the single
1068
   destination of INSN and *PSRC to the single source, and this function
1069
   will return 1.  */
1070
 
1071
static int
1072
can_combine_p (rtx insn, rtx i3, rtx pred ATTRIBUTE_UNUSED, rtx succ,
1073
               rtx *pdest, rtx *psrc)
1074
{
1075
  int i;
1076
  rtx set = 0, src, dest;
1077
  rtx p;
1078
#ifdef AUTO_INC_DEC
1079
  rtx link;
1080
#endif
1081
  int all_adjacent = (succ ? (next_active_insn (insn) == succ
1082
                              && next_active_insn (succ) == i3)
1083
                      : next_active_insn (insn) == i3);
1084
 
1085
  /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
1086
     or a PARALLEL consisting of such a SET and CLOBBERs.
1087
 
1088
     If INSN has CLOBBER parallel parts, ignore them for our processing.
1089
     By definition, these happen during the execution of the insn.  When it
1090
     is merged with another insn, all bets are off.  If they are, in fact,
1091
     needed and aren't also supplied in I3, they may be added by
1092
     recog_for_combine.  Otherwise, it won't match.
1093
 
1094
     We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
1095
     note.
1096
 
1097
     Get the source and destination of INSN.  If more than one, can't
1098
     combine.  */
1099
 
1100
  if (GET_CODE (PATTERN (insn)) == SET)
1101
    set = PATTERN (insn);
1102
  else if (GET_CODE (PATTERN (insn)) == PARALLEL
1103
           && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1104
    {
1105
      for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1106
        {
1107
          rtx elt = XVECEXP (PATTERN (insn), 0, i);
1108
          rtx note;
1109
 
1110
          switch (GET_CODE (elt))
1111
            {
1112
            /* This is important to combine floating point insns
1113
               for the SH4 port.  */
1114
            case USE:
1115
              /* Combining an isolated USE doesn't make sense.
1116
                 We depend here on combinable_i3pat to reject them.  */
1117
              /* The code below this loop only verifies that the inputs of
1118
                 the SET in INSN do not change.  We call reg_set_between_p
1119
                 to verify that the REG in the USE does not change between
1120
                 I3 and INSN.
1121
                 If the USE in INSN was for a pseudo register, the matching
1122
                 insn pattern will likely match any register; combining this
1123
                 with any other USE would only be safe if we knew that the
1124
                 used registers have identical values, or if there was
1125
                 something to tell them apart, e.g. different modes.  For
1126
                 now, we forgo such complicated tests and simply disallow
1127
                 combining of USES of pseudo registers with any other USE.  */
1128
              if (REG_P (XEXP (elt, 0))
1129
                  && GET_CODE (PATTERN (i3)) == PARALLEL)
1130
                {
1131
                  rtx i3pat = PATTERN (i3);
1132
                  int i = XVECLEN (i3pat, 0) - 1;
1133
                  unsigned int regno = REGNO (XEXP (elt, 0));
1134
 
1135
                  do
1136
                    {
1137
                      rtx i3elt = XVECEXP (i3pat, 0, i);
1138
 
1139
                      if (GET_CODE (i3elt) == USE
1140
                          && REG_P (XEXP (i3elt, 0))
1141
                          && (REGNO (XEXP (i3elt, 0)) == regno
1142
                              ? reg_set_between_p (XEXP (elt, 0),
1143
                                                   PREV_INSN (insn), i3)
1144
                              : regno >= FIRST_PSEUDO_REGISTER))
1145
                        return 0;
1146
                    }
1147
                  while (--i >= 0);
1148
                }
1149
              break;
1150
 
1151
              /* We can ignore CLOBBERs.  */
1152
            case CLOBBER:
1153
              break;
1154
 
1155
            case SET:
1156
              /* Ignore SETs whose result isn't used but not those that
1157
                 have side-effects.  */
1158
              if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
1159
                  && (!(note = find_reg_note (insn, REG_EH_REGION, NULL_RTX))
1160
                      || INTVAL (XEXP (note, 0)) <= 0)
1161
                  && ! side_effects_p (elt))
1162
                break;
1163
 
1164
              /* If we have already found a SET, this is a second one and
1165
                 so we cannot combine with this insn.  */
1166
              if (set)
1167
                return 0;
1168
 
1169
              set = elt;
1170
              break;
1171
 
1172
            default:
1173
              /* Anything else means we can't combine.  */
1174
              return 0;
1175
            }
1176
        }
1177
 
1178
      if (set == 0
1179
          /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
1180
             so don't do anything with it.  */
1181
          || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
1182
        return 0;
1183
    }
1184
  else
1185
    return 0;
1186
 
1187
  if (set == 0)
1188
    return 0;
1189
 
1190
  set = expand_field_assignment (set);
1191
  src = SET_SRC (set), dest = SET_DEST (set);
1192
 
1193
  /* Don't eliminate a store in the stack pointer.  */
1194
  if (dest == stack_pointer_rtx
1195
      /* Don't combine with an insn that sets a register to itself if it has
1196
         a REG_EQUAL note.  This may be part of a REG_NO_CONFLICT sequence.  */
1197
      || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
1198
      /* Can't merge an ASM_OPERANDS.  */
1199
      || GET_CODE (src) == ASM_OPERANDS
1200
      /* Can't merge a function call.  */
1201
      || GET_CODE (src) == CALL
1202
      /* Don't eliminate a function call argument.  */
1203
      || (CALL_P (i3)
1204
          && (find_reg_fusage (i3, USE, dest)
1205
              || (REG_P (dest)
1206
                  && REGNO (dest) < FIRST_PSEUDO_REGISTER
1207
                  && global_regs[REGNO (dest)])))
1208
      /* Don't substitute into an incremented register.  */
1209
      || FIND_REG_INC_NOTE (i3, dest)
1210
      || (succ && FIND_REG_INC_NOTE (succ, dest))
1211
      /* Don't substitute into a non-local goto, this confuses CFG.  */
1212
      || (JUMP_P (i3) && find_reg_note (i3, REG_NON_LOCAL_GOTO, NULL_RTX))
1213
#if 0
1214
      /* Don't combine the end of a libcall into anything.  */
1215
      /* ??? This gives worse code, and appears to be unnecessary, since no
1216
         pass after flow uses REG_LIBCALL/REG_RETVAL notes.  Local-alloc does
1217
         use REG_RETVAL notes for noconflict blocks, but other code here
1218
         makes sure that those insns don't disappear.  */
1219
      || find_reg_note (insn, REG_RETVAL, NULL_RTX)
1220
#endif
1221
      /* Make sure that DEST is not used after SUCC but before I3.  */
1222
      || (succ && ! all_adjacent
1223
          && reg_used_between_p (dest, succ, i3))
1224
      /* Make sure that the value that is to be substituted for the register
1225
         does not use any registers whose values alter in between.  However,
1226
         If the insns are adjacent, a use can't cross a set even though we
1227
         think it might (this can happen for a sequence of insns each setting
1228
         the same destination; last_set of that register might point to
1229
         a NOTE).  If INSN has a REG_EQUIV note, the register is always
1230
         equivalent to the memory so the substitution is valid even if there
1231
         are intervening stores.  Also, don't move a volatile asm or
1232
         UNSPEC_VOLATILE across any other insns.  */
1233
      || (! all_adjacent
1234
          && (((!MEM_P (src)
1235
                || ! find_reg_note (insn, REG_EQUIV, src))
1236
               && use_crosses_set_p (src, INSN_CUID (insn)))
1237
              || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
1238
              || GET_CODE (src) == UNSPEC_VOLATILE))
1239
      /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
1240
         better register allocation by not doing the combine.  */
1241
      || find_reg_note (i3, REG_NO_CONFLICT, dest)
1242
      || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
1243
      /* Don't combine across a CALL_INSN, because that would possibly
1244
         change whether the life span of some REGs crosses calls or not,
1245
         and it is a pain to update that information.
1246
         Exception: if source is a constant, moving it later can't hurt.
1247
         Accept that special case, because it helps -fforce-addr a lot.  */
1248
      || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
1249
    return 0;
1250
 
1251
  /* DEST must either be a REG or CC0.  */
1252
  if (REG_P (dest))
1253
    {
1254
      /* If register alignment is being enforced for multi-word items in all
1255
         cases except for parameters, it is possible to have a register copy
1256
         insn referencing a hard register that is not allowed to contain the
1257
         mode being copied and which would not be valid as an operand of most
1258
         insns.  Eliminate this problem by not combining with such an insn.
1259
 
1260
         Also, on some machines we don't want to extend the life of a hard
1261
         register.  */
1262
 
1263
      if (REG_P (src)
1264
          && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
1265
               && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
1266
              /* Don't extend the life of a hard register unless it is
1267
                 user variable (if we have few registers) or it can't
1268
                 fit into the desired register (meaning something special
1269
                 is going on).
1270
                 Also avoid substituting a return register into I3, because
1271
                 reload can't handle a conflict with constraints of other
1272
                 inputs.  */
1273
              || (REGNO (src) < FIRST_PSEUDO_REGISTER
1274
                  && ! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src)))))
1275
        return 0;
1276
    }
1277
  else if (GET_CODE (dest) != CC0)
1278
    return 0;
1279
 
1280
 
1281
  if (GET_CODE (PATTERN (i3)) == PARALLEL)
1282
    for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
1283
      if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER)
1284
        {
1285
          /* Don't substitute for a register intended as a clobberable
1286
             operand.  */
1287
          rtx reg = XEXP (XVECEXP (PATTERN (i3), 0, i), 0);
1288
          if (rtx_equal_p (reg, dest))
1289
            return 0;
1290
 
1291
          /* If the clobber represents an earlyclobber operand, we must not
1292
             substitute an expression containing the clobbered register.
1293
             As we do not analyze the constraint strings here, we have to
1294
             make the conservative assumption.  However, if the register is
1295
             a fixed hard reg, the clobber cannot represent any operand;
1296
             we leave it up to the machine description to either accept or
1297
             reject use-and-clobber patterns.  */
1298
          if (!REG_P (reg)
1299
              || REGNO (reg) >= FIRST_PSEUDO_REGISTER
1300
              || !fixed_regs[REGNO (reg)])
1301
            if (reg_overlap_mentioned_p (reg, src))
1302
              return 0;
1303
        }
1304
 
1305
  /* If INSN contains anything volatile, or is an `asm' (whether volatile
1306
     or not), reject, unless nothing volatile comes between it and I3 */
1307
 
1308
  if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
1309
    {
1310
      /* Make sure succ doesn't contain a volatile reference.  */
1311
      if (succ != 0 && volatile_refs_p (PATTERN (succ)))
1312
        return 0;
1313
 
1314
      for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1315
        if (INSN_P (p) && p != succ && volatile_refs_p (PATTERN (p)))
1316
          return 0;
1317
    }
1318
 
1319
  /* If INSN is an asm, and DEST is a hard register, reject, since it has
1320
     to be an explicit register variable, and was chosen for a reason.  */
1321
 
1322
  if (GET_CODE (src) == ASM_OPERANDS
1323
      && REG_P (dest) && REGNO (dest) < FIRST_PSEUDO_REGISTER)
1324
    return 0;
1325
 
1326
  /* If there are any volatile insns between INSN and I3, reject, because
1327
     they might affect machine state.  */
1328
 
1329
  for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1330
    if (INSN_P (p) && p != succ && volatile_insn_p (PATTERN (p)))
1331
      return 0;
1332
 
1333
  /* If INSN contains an autoincrement or autodecrement, make sure that
1334
     register is not used between there and I3, and not already used in
1335
     I3 either.  Neither must it be used in PRED or SUCC, if they exist.
1336
     Also insist that I3 not be a jump; if it were one
1337
     and the incremented register were spilled, we would lose.  */
1338
 
1339
#ifdef AUTO_INC_DEC
1340
  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1341
    if (REG_NOTE_KIND (link) == REG_INC
1342
        && (JUMP_P (i3)
1343
            || reg_used_between_p (XEXP (link, 0), insn, i3)
1344
            || (pred != NULL_RTX
1345
                && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (pred)))
1346
            || (succ != NULL_RTX
1347
                && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (succ)))
1348
            || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
1349
      return 0;
1350
#endif
1351
 
1352
#ifdef HAVE_cc0
1353
  /* Don't combine an insn that follows a CC0-setting insn.
1354
     An insn that uses CC0 must not be separated from the one that sets it.
1355
     We do, however, allow I2 to follow a CC0-setting insn if that insn
1356
     is passed as I1; in that case it will be deleted also.
1357
     We also allow combining in this case if all the insns are adjacent
1358
     because that would leave the two CC0 insns adjacent as well.
1359
     It would be more logical to test whether CC0 occurs inside I1 or I2,
1360
     but that would be much slower, and this ought to be equivalent.  */
1361
 
1362
  p = prev_nonnote_insn (insn);
1363
  if (p && p != pred && NONJUMP_INSN_P (p) && sets_cc0_p (PATTERN (p))
1364
      && ! all_adjacent)
1365
    return 0;
1366
#endif
1367
 
1368
  /* If we get here, we have passed all the tests and the combination is
1369
     to be allowed.  */
1370
 
1371
  *pdest = dest;
1372
  *psrc = src;
1373
 
1374
  return 1;
1375
}
1376
 
1377
/* LOC is the location within I3 that contains its pattern or the component
1378
   of a PARALLEL of the pattern.  We validate that it is valid for combining.
1379
 
1380
   One problem is if I3 modifies its output, as opposed to replacing it
1381
   entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1382
   so would produce an insn that is not equivalent to the original insns.
1383
 
1384
   Consider:
1385
 
1386
         (set (reg:DI 101) (reg:DI 100))
1387
         (set (subreg:SI (reg:DI 101) 0) <foo>)
1388
 
1389
   This is NOT equivalent to:
1390
 
1391
         (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
1392
                    (set (reg:DI 101) (reg:DI 100))])
1393
 
1394
   Not only does this modify 100 (in which case it might still be valid
1395
   if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
1396
 
1397
   We can also run into a problem if I2 sets a register that I1
1398
   uses and I1 gets directly substituted into I3 (not via I2).  In that
1399
   case, we would be getting the wrong value of I2DEST into I3, so we
1400
   must reject the combination.  This case occurs when I2 and I1 both
1401
   feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1402
   If I1_NOT_IN_SRC is nonzero, it means that finding I1 in the source
1403
   of a SET must prevent combination from occurring.
1404
 
1405
   Before doing the above check, we first try to expand a field assignment
1406
   into a set of logical operations.
1407
 
1408
   If PI3_DEST_KILLED is nonzero, it is a pointer to a location in which
1409
   we place a register that is both set and used within I3.  If more than one
1410
   such register is detected, we fail.
1411
 
1412
   Return 1 if the combination is valid, zero otherwise.  */
1413
 
1414
static int
1415
combinable_i3pat (rtx i3, rtx *loc, rtx i2dest, rtx i1dest,
1416
                  int i1_not_in_src, rtx *pi3dest_killed)
1417
{
1418
  rtx x = *loc;
1419
 
1420
  if (GET_CODE (x) == SET)
1421
    {
1422
      rtx set = x ;
1423
      rtx dest = SET_DEST (set);
1424
      rtx src = SET_SRC (set);
1425
      rtx inner_dest = dest;
1426
      rtx subdest;
1427
 
1428
      while (GET_CODE (inner_dest) == STRICT_LOW_PART
1429
             || GET_CODE (inner_dest) == SUBREG
1430
             || GET_CODE (inner_dest) == ZERO_EXTRACT)
1431
        inner_dest = XEXP (inner_dest, 0);
1432
 
1433
      /* Check for the case where I3 modifies its output, as discussed
1434
         above.  We don't want to prevent pseudos from being combined
1435
         into the address of a MEM, so only prevent the combination if
1436
         i1 or i2 set the same MEM.  */
1437
      if ((inner_dest != dest &&
1438
           (!MEM_P (inner_dest)
1439
            || rtx_equal_p (i2dest, inner_dest)
1440
            || (i1dest && rtx_equal_p (i1dest, inner_dest)))
1441
           && (reg_overlap_mentioned_p (i2dest, inner_dest)
1442
               || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
1443
 
1444
          /* This is the same test done in can_combine_p except we can't test
1445
             all_adjacent; we don't have to, since this instruction will stay
1446
             in place, thus we are not considering increasing the lifetime of
1447
             INNER_DEST.
1448
 
1449
             Also, if this insn sets a function argument, combining it with
1450
             something that might need a spill could clobber a previous
1451
             function argument; the all_adjacent test in can_combine_p also
1452
             checks this; here, we do a more specific test for this case.  */
1453
 
1454
          || (REG_P (inner_dest)
1455
              && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1456
              && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1457
                                        GET_MODE (inner_dest))))
1458
          || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1459
        return 0;
1460
 
1461
      /* If DEST is used in I3, it is being killed in this insn, so
1462
         record that for later.  We have to consider paradoxical
1463
         subregs here, since they kill the whole register, but we
1464
         ignore partial subregs, STRICT_LOW_PART, etc.
1465
         Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1466
         STACK_POINTER_REGNUM, since these are always considered to be
1467
         live.  Similarly for ARG_POINTER_REGNUM if it is fixed.  */
1468
      subdest = dest;
1469
      if (GET_CODE (subdest) == SUBREG
1470
          && (GET_MODE_SIZE (GET_MODE (subdest))
1471
              >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (subdest)))))
1472
        subdest = SUBREG_REG (subdest);
1473
      if (pi3dest_killed
1474
          && REG_P (subdest)
1475
          && reg_referenced_p (subdest, PATTERN (i3))
1476
          && REGNO (subdest) != FRAME_POINTER_REGNUM
1477
#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1478
          && REGNO (subdest) != HARD_FRAME_POINTER_REGNUM
1479
#endif
1480
#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1481
          && (REGNO (subdest) != ARG_POINTER_REGNUM
1482
              || ! fixed_regs [REGNO (subdest)])
1483
#endif
1484
          && REGNO (subdest) != STACK_POINTER_REGNUM)
1485
        {
1486
          if (*pi3dest_killed)
1487
            return 0;
1488
 
1489
          *pi3dest_killed = subdest;
1490
        }
1491
    }
1492
 
1493
  else if (GET_CODE (x) == PARALLEL)
1494
    {
1495
      int i;
1496
 
1497
      for (i = 0; i < XVECLEN (x, 0); i++)
1498
        if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1499
                                i1_not_in_src, pi3dest_killed))
1500
          return 0;
1501
    }
1502
 
1503
  return 1;
1504
}
1505
 
1506
/* Return 1 if X is an arithmetic expression that contains a multiplication
1507
   and division.  We don't count multiplications by powers of two here.  */
1508
 
1509
static int
1510
contains_muldiv (rtx x)
1511
{
1512
  switch (GET_CODE (x))
1513
    {
1514
    case MOD:  case DIV:  case UMOD:  case UDIV:
1515
      return 1;
1516
 
1517
    case MULT:
1518
      return ! (GET_CODE (XEXP (x, 1)) == CONST_INT
1519
                && exact_log2 (INTVAL (XEXP (x, 1))) >= 0);
1520
    default:
1521
      if (BINARY_P (x))
1522
        return contains_muldiv (XEXP (x, 0))
1523
            || contains_muldiv (XEXP (x, 1));
1524
 
1525
      if (UNARY_P (x))
1526
        return contains_muldiv (XEXP (x, 0));
1527
 
1528
      return 0;
1529
    }
1530
}
1531
 
1532
/* Determine whether INSN can be used in a combination.  Return nonzero if
1533
   not.  This is used in try_combine to detect early some cases where we
1534
   can't perform combinations.  */
1535
 
1536
static int
1537
cant_combine_insn_p (rtx insn)
1538
{
1539
  rtx set;
1540
  rtx src, dest;
1541
 
1542
  /* If this isn't really an insn, we can't do anything.
1543
     This can occur when flow deletes an insn that it has merged into an
1544
     auto-increment address.  */
1545
  if (! INSN_P (insn))
1546
    return 1;
1547
 
1548
  /* Never combine loads and stores involving hard regs that are likely
1549
     to be spilled.  The register allocator can usually handle such
1550
     reg-reg moves by tying.  If we allow the combiner to make
1551
     substitutions of likely-spilled regs, reload might die.
1552
     As an exception, we allow combinations involving fixed regs; these are
1553
     not available to the register allocator so there's no risk involved.  */
1554
 
1555
  set = single_set (insn);
1556
  if (! set)
1557
    return 0;
1558
  src = SET_SRC (set);
1559
  dest = SET_DEST (set);
1560
  if (GET_CODE (src) == SUBREG)
1561
    src = SUBREG_REG (src);
1562
  if (GET_CODE (dest) == SUBREG)
1563
    dest = SUBREG_REG (dest);
1564
  if (REG_P (src) && REG_P (dest)
1565
      && ((REGNO (src) < FIRST_PSEUDO_REGISTER
1566
           && ! fixed_regs[REGNO (src)]
1567
           && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (src))))
1568
          || (REGNO (dest) < FIRST_PSEUDO_REGISTER
1569
              && ! fixed_regs[REGNO (dest)]
1570
              && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (dest))))))
1571
    return 1;
1572
 
1573
  return 0;
1574
}
1575
 
1576
struct likely_spilled_retval_info
1577
{
1578
  unsigned regno, nregs;
1579
  unsigned mask;
1580
};
1581
 
1582
/* Called via note_stores by likely_spilled_retval_p.  Remove from info->mask
1583
   hard registers that are known to be written to / clobbered in full.  */
1584
static void
1585
likely_spilled_retval_1 (rtx x, rtx set, void *data)
1586
{
1587
  struct likely_spilled_retval_info *info = data;
1588
  unsigned regno, nregs;
1589
  unsigned new_mask;
1590
 
1591
  if (!REG_P (XEXP (set, 0)))
1592
    return;
1593
  regno = REGNO (x);
1594
  if (regno >= info->regno + info->nregs)
1595
    return;
1596
  nregs = hard_regno_nregs[regno][GET_MODE (x)];
1597
  if (regno + nregs <= info->regno)
1598
    return;
1599
  new_mask = (2U << (nregs - 1)) - 1;
1600
  if (regno < info->regno)
1601
    new_mask >>= info->regno - regno;
1602
  else
1603
    new_mask <<= regno - info->regno;
1604
  info->mask &= new_mask;
1605
}
1606
 
1607
/* Return nonzero iff part of the return value is live during INSN, and
1608
   it is likely spilled.  This can happen when more than one insn is needed
1609
   to copy the return value, e.g. when we consider to combine into the
1610
   second copy insn for a complex value.  */
1611
 
1612
static int
1613
likely_spilled_retval_p (rtx insn)
1614
{
1615
  rtx use = BB_END (this_basic_block);
1616
  rtx reg, p;
1617
  unsigned regno, nregs;
1618
  /* We assume here that no machine mode needs more than
1619
     32 hard registers when the value overlaps with a register
1620
     for which FUNCTION_VALUE_REGNO_P is true.  */
1621
  unsigned mask;
1622
  struct likely_spilled_retval_info info;
1623
 
1624
  if (!NONJUMP_INSN_P (use) || GET_CODE (PATTERN (use)) != USE || insn == use)
1625
    return 0;
1626
  reg = XEXP (PATTERN (use), 0);
1627
  if (!REG_P (reg) || !FUNCTION_VALUE_REGNO_P (REGNO (reg)))
1628
    return 0;
1629
  regno = REGNO (reg);
1630
  nregs = hard_regno_nregs[regno][GET_MODE (reg)];
1631
  if (nregs == 1)
1632
    return 0;
1633
  mask = (2U << (nregs - 1)) - 1;
1634
 
1635
  /* Disregard parts of the return value that are set later.  */
1636
  info.regno = regno;
1637
  info.nregs = nregs;
1638
  info.mask = mask;
1639
  for (p = PREV_INSN (use); info.mask && p != insn; p = PREV_INSN (p))
1640
    note_stores (PATTERN (insn), likely_spilled_retval_1, &info);
1641
  mask = info.mask;
1642
 
1643
  /* Check if any of the (probably) live return value registers is
1644
     likely spilled.  */
1645
  nregs --;
1646
  do
1647
    {
1648
      if ((mask & 1 << nregs)
1649
          && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno + nregs)))
1650
        return 1;
1651
    } while (nregs--);
1652
  return 0;
1653
}
1654
 
1655
/* Adjust INSN after we made a change to its destination.
1656
 
1657
   Changing the destination can invalidate notes that say something about
1658
   the results of the insn and a LOG_LINK pointing to the insn.  */
1659
 
1660
static void
1661
adjust_for_new_dest (rtx insn)
1662
{
1663
  rtx *loc;
1664
 
1665
  /* For notes, be conservative and simply remove them.  */
1666
  loc = &REG_NOTES (insn);
1667
  while (*loc)
1668
    {
1669
      enum reg_note kind = REG_NOTE_KIND (*loc);
1670
      if (kind == REG_EQUAL || kind == REG_EQUIV)
1671
        *loc = XEXP (*loc, 1);
1672
      else
1673
        loc = &XEXP (*loc, 1);
1674
    }
1675
 
1676
  /* The new insn will have a destination that was previously the destination
1677
     of an insn just above it.  Call distribute_links to make a LOG_LINK from
1678
     the next use of that destination.  */
1679
  distribute_links (gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX));
1680
}
1681
 
1682
/* Return TRUE if combine can reuse reg X in mode MODE.
1683
   ADDED_SETS is nonzero if the original set is still required.  */
1684
static bool
1685
can_change_dest_mode (rtx x, int added_sets, enum machine_mode mode)
1686
{
1687
  unsigned int regno;
1688
 
1689
  if (!REG_P(x))
1690
    return false;
1691
 
1692
  regno = REGNO (x);
1693
  /* Allow hard registers if the new mode is legal, and occupies no more
1694
     registers than the old mode.  */
1695
  if (regno < FIRST_PSEUDO_REGISTER)
1696
    return (HARD_REGNO_MODE_OK (regno, mode)
1697
            && (hard_regno_nregs[regno][GET_MODE (x)]
1698
                >= hard_regno_nregs[regno][mode]));
1699
 
1700
  /* Or a pseudo that is only used once.  */
1701
  return (REG_N_SETS (regno) == 1 && !added_sets
1702
          && !REG_USERVAR_P (x));
1703
}
1704
 
1705
/* Try to combine the insns I1 and I2 into I3.
1706
   Here I1 and I2 appear earlier than I3.
1707
   I1 can be zero; then we combine just I2 into I3.
1708
 
1709
   If we are combining three insns and the resulting insn is not recognized,
1710
   try splitting it into two insns.  If that happens, I2 and I3 are retained
1711
   and I1 is pseudo-deleted by turning it into a NOTE.  Otherwise, I1 and I2
1712
   are pseudo-deleted.
1713
 
1714
   Return 0 if the combination does not work.  Then nothing is changed.
1715
   If we did the combination, return the insn at which combine should
1716
   resume scanning.
1717
 
1718
   Set NEW_DIRECT_JUMP_P to a nonzero value if try_combine creates a
1719
   new direct jump instruction.  */
1720
 
1721
static rtx
1722
try_combine (rtx i3, rtx i2, rtx i1, int *new_direct_jump_p)
1723
{
1724
  /* New patterns for I3 and I2, respectively.  */
1725
  rtx newpat, newi2pat = 0;
1726
  rtvec newpat_vec_with_clobbers = 0;
1727
  int substed_i2 = 0, substed_i1 = 0;
1728
  /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead.  */
1729
  int added_sets_1, added_sets_2;
1730
  /* Total number of SETs to put into I3.  */
1731
  int total_sets;
1732
  /* Nonzero if I2's body now appears in I3.  */
1733
  int i2_is_used;
1734
  /* INSN_CODEs for new I3, new I2, and user of condition code.  */
1735
  int insn_code_number, i2_code_number = 0, other_code_number = 0;
1736
  /* Contains I3 if the destination of I3 is used in its source, which means
1737
     that the old life of I3 is being killed.  If that usage is placed into
1738
     I2 and not in I3, a REG_DEAD note must be made.  */
1739
  rtx i3dest_killed = 0;
1740
  /* SET_DEST and SET_SRC of I2 and I1.  */
1741
  rtx i2dest, i2src, i1dest = 0, i1src = 0;
1742
  /* PATTERN (I2), or a copy of it in certain cases.  */
1743
  rtx i2pat;
1744
  /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC.  */
1745
  int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
1746
  int i2dest_killed = 0, i1dest_killed = 0;
1747
  int i1_feeds_i3 = 0;
1748
  /* Notes that must be added to REG_NOTES in I3 and I2.  */
1749
  rtx new_i3_notes, new_i2_notes;
1750
  /* Notes that we substituted I3 into I2 instead of the normal case.  */
1751
  int i3_subst_into_i2 = 0;
1752
  /* Notes that I1, I2 or I3 is a MULT operation.  */
1753
  int have_mult = 0;
1754
  int swap_i2i3 = 0;
1755
 
1756
  int maxreg;
1757
  rtx temp;
1758
  rtx link;
1759
  int i;
1760
 
1761
  /* Exit early if one of the insns involved can't be used for
1762
     combinations.  */
1763
  if (cant_combine_insn_p (i3)
1764
      || cant_combine_insn_p (i2)
1765
      || (i1 && cant_combine_insn_p (i1))
1766
      || likely_spilled_retval_p (i3)
1767
      /* We also can't do anything if I3 has a
1768
         REG_LIBCALL note since we don't want to disrupt the contiguity of a
1769
         libcall.  */
1770
#if 0
1771
      /* ??? This gives worse code, and appears to be unnecessary, since no
1772
         pass after flow uses REG_LIBCALL/REG_RETVAL notes.  */
1773
      || find_reg_note (i3, REG_LIBCALL, NULL_RTX)
1774
#endif
1775
      )
1776
    return 0;
1777
 
1778
  combine_attempts++;
1779
  undobuf.other_insn = 0;
1780
 
1781
  /* Reset the hard register usage information.  */
1782
  CLEAR_HARD_REG_SET (newpat_used_regs);
1783
 
1784
  /* If I1 and I2 both feed I3, they can be in any order.  To simplify the
1785
     code below, set I1 to be the earlier of the two insns.  */
1786
  if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1787
    temp = i1, i1 = i2, i2 = temp;
1788
 
1789
  added_links_insn = 0;
1790
 
1791
  /* First check for one important special-case that the code below will
1792
     not handle.  Namely, the case where I1 is zero, I2 is a PARALLEL
1793
     and I3 is a SET whose SET_SRC is a SET_DEST in I2.  In that case,
1794
     we may be able to replace that destination with the destination of I3.
1795
     This occurs in the common code where we compute both a quotient and
1796
     remainder into a structure, in which case we want to do the computation
1797
     directly into the structure to avoid register-register copies.
1798
 
1799
     Note that this case handles both multiple sets in I2 and also
1800
     cases where I2 has a number of CLOBBER or PARALLELs.
1801
 
1802
     We make very conservative checks below and only try to handle the
1803
     most common cases of this.  For example, we only handle the case
1804
     where I2 and I3 are adjacent to avoid making difficult register
1805
     usage tests.  */
1806
 
1807
  if (i1 == 0 && NONJUMP_INSN_P (i3) && GET_CODE (PATTERN (i3)) == SET
1808
      && REG_P (SET_SRC (PATTERN (i3)))
1809
      && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1810
      && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1811
      && GET_CODE (PATTERN (i2)) == PARALLEL
1812
      && ! side_effects_p (SET_DEST (PATTERN (i3)))
1813
      /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1814
         below would need to check what is inside (and reg_overlap_mentioned_p
1815
         doesn't support those codes anyway).  Don't allow those destinations;
1816
         the resulting insn isn't likely to be recognized anyway.  */
1817
      && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1818
      && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
1819
      && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1820
                                    SET_DEST (PATTERN (i3)))
1821
      && next_real_insn (i2) == i3)
1822
    {
1823
      rtx p2 = PATTERN (i2);
1824
 
1825
      /* Make sure that the destination of I3,
1826
         which we are going to substitute into one output of I2,
1827
         is not used within another output of I2.  We must avoid making this:
1828
         (parallel [(set (mem (reg 69)) ...)
1829
                    (set (reg 69) ...)])
1830
         which is not well-defined as to order of actions.
1831
         (Besides, reload can't handle output reloads for this.)
1832
 
1833
         The problem can also happen if the dest of I3 is a memory ref,
1834
         if another dest in I2 is an indirect memory ref.  */
1835
      for (i = 0; i < XVECLEN (p2, 0); i++)
1836
        if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1837
             || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
1838
            && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1839
                                        SET_DEST (XVECEXP (p2, 0, i))))
1840
          break;
1841
 
1842
      if (i == XVECLEN (p2, 0))
1843
        for (i = 0; i < XVECLEN (p2, 0); i++)
1844
          if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1845
               || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
1846
              && SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1847
            {
1848
              combine_merges++;
1849
 
1850
              subst_insn = i3;
1851
              subst_low_cuid = INSN_CUID (i2);
1852
 
1853
              added_sets_2 = added_sets_1 = 0;
1854
              i2dest = SET_SRC (PATTERN (i3));
1855
              i2dest_killed = dead_or_set_p (i2, i2dest);
1856
 
1857
              /* Replace the dest in I2 with our dest and make the resulting
1858
                 insn the new pattern for I3.  Then skip to where we
1859
                 validate the pattern.  Everything was set up above.  */
1860
              SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1861
                     SET_DEST (PATTERN (i3)));
1862
 
1863
              newpat = p2;
1864
              i3_subst_into_i2 = 1;
1865
              goto validate_replacement;
1866
            }
1867
    }
1868
 
1869
  /* If I2 is setting a double-word pseudo to a constant and I3 is setting
1870
     one of those words to another constant, merge them by making a new
1871
     constant.  */
1872
  if (i1 == 0
1873
      && (temp = single_set (i2)) != 0
1874
      && (GET_CODE (SET_SRC (temp)) == CONST_INT
1875
          || GET_CODE (SET_SRC (temp)) == CONST_DOUBLE)
1876
      && REG_P (SET_DEST (temp))
1877
      && GET_MODE_CLASS (GET_MODE (SET_DEST (temp))) == MODE_INT
1878
      && GET_MODE_SIZE (GET_MODE (SET_DEST (temp))) == 2 * UNITS_PER_WORD
1879
      && GET_CODE (PATTERN (i3)) == SET
1880
      && GET_CODE (SET_DEST (PATTERN (i3))) == SUBREG
1881
      && SUBREG_REG (SET_DEST (PATTERN (i3))) == SET_DEST (temp)
1882
      && GET_MODE_CLASS (GET_MODE (SET_DEST (PATTERN (i3)))) == MODE_INT
1883
      && GET_MODE_SIZE (GET_MODE (SET_DEST (PATTERN (i3)))) == UNITS_PER_WORD
1884
      && GET_CODE (SET_SRC (PATTERN (i3))) == CONST_INT)
1885
    {
1886
      HOST_WIDE_INT lo, hi;
1887
 
1888
      if (GET_CODE (SET_SRC (temp)) == CONST_INT)
1889
        lo = INTVAL (SET_SRC (temp)), hi = lo < 0 ? -1 : 0;
1890
      else
1891
        {
1892
          lo = CONST_DOUBLE_LOW (SET_SRC (temp));
1893
          hi = CONST_DOUBLE_HIGH (SET_SRC (temp));
1894
        }
1895
 
1896
      if (subreg_lowpart_p (SET_DEST (PATTERN (i3))))
1897
        {
1898
          /* We don't handle the case of the target word being wider
1899
             than a host wide int.  */
1900
          gcc_assert (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD);
1901
 
1902
          lo &= ~(UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1);
1903
          lo |= (INTVAL (SET_SRC (PATTERN (i3)))
1904
                 & (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1));
1905
        }
1906
      else if (HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
1907
        hi = INTVAL (SET_SRC (PATTERN (i3)));
1908
      else if (HOST_BITS_PER_WIDE_INT >= 2 * BITS_PER_WORD)
1909
        {
1910
          int sign = -(int) ((unsigned HOST_WIDE_INT) lo
1911
                             >> (HOST_BITS_PER_WIDE_INT - 1));
1912
 
1913
          lo &= ~ (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD
1914
                   (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1));
1915
          lo |= (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD
1916
                 (INTVAL (SET_SRC (PATTERN (i3)))));
1917
          if (hi == sign)
1918
            hi = lo < 0 ? -1 : 0;
1919
        }
1920
      else
1921
        /* We don't handle the case of the higher word not fitting
1922
           entirely in either hi or lo.  */
1923
        gcc_unreachable ();
1924
 
1925
      combine_merges++;
1926
      subst_insn = i3;
1927
      subst_low_cuid = INSN_CUID (i2);
1928
      added_sets_2 = added_sets_1 = 0;
1929
      i2dest = SET_DEST (temp);
1930
      i2dest_killed = dead_or_set_p (i2, i2dest);
1931
 
1932
      SUBST (SET_SRC (temp),
1933
             immed_double_const (lo, hi, GET_MODE (SET_DEST (temp))));
1934
 
1935
      newpat = PATTERN (i2);
1936
      goto validate_replacement;
1937
    }
1938
 
1939
#ifndef HAVE_cc0
1940
  /* If we have no I1 and I2 looks like:
1941
        (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1942
                   (set Y OP)])
1943
     make up a dummy I1 that is
1944
        (set Y OP)
1945
     and change I2 to be
1946
        (set (reg:CC X) (compare:CC Y (const_int 0)))
1947
 
1948
     (We can ignore any trailing CLOBBERs.)
1949
 
1950
     This undoes a previous combination and allows us to match a branch-and-
1951
     decrement insn.  */
1952
 
1953
  if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1954
      && XVECLEN (PATTERN (i2), 0) >= 2
1955
      && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1956
      && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1957
          == MODE_CC)
1958
      && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1959
      && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1960
      && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1961
      && REG_P (SET_DEST (XVECEXP (PATTERN (i2), 0, 1)))
1962
      && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1963
                      SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1964
    {
1965
      for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1966
        if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1967
          break;
1968
 
1969
      if (i == 1)
1970
        {
1971
          /* We make I1 with the same INSN_UID as I2.  This gives it
1972
             the same INSN_CUID for value tracking.  Our fake I1 will
1973
             never appear in the insn stream so giving it the same INSN_UID
1974
             as I2 will not cause a problem.  */
1975
 
1976
          i1 = gen_rtx_INSN (VOIDmode, INSN_UID (i2), NULL_RTX, i2,
1977
                             BLOCK_FOR_INSN (i2), INSN_LOCATOR (i2),
1978
                             XVECEXP (PATTERN (i2), 0, 1), -1, NULL_RTX,
1979
                             NULL_RTX);
1980
 
1981
          SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1982
          SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1983
                 SET_DEST (PATTERN (i1)));
1984
        }
1985
    }
1986
#endif
1987
 
1988
  /* Verify that I2 and I1 are valid for combining.  */
1989
  if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1990
      || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
1991
    {
1992
      undo_all ();
1993
      return 0;
1994
    }
1995
 
1996
  /* Record whether I2DEST is used in I2SRC and similarly for the other
1997
     cases.  Knowing this will help in register status updating below.  */
1998
  i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1999
  i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
2000
  i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
2001
  i2dest_killed = dead_or_set_p (i2, i2dest);
2002
  i1dest_killed = i1 && dead_or_set_p (i1, i1dest);
2003
 
2004
  /* See if I1 directly feeds into I3.  It does if I1DEST is not used
2005
     in I2SRC.  */
2006
  i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
2007
 
2008
  /* Ensure that I3's pattern can be the destination of combines.  */
2009
  if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
2010
                          i1 && i2dest_in_i1src && i1_feeds_i3,
2011
                          &i3dest_killed))
2012
    {
2013
      undo_all ();
2014
      return 0;
2015
    }
2016
 
2017
  /* See if any of the insns is a MULT operation.  Unless one is, we will
2018
     reject a combination that is, since it must be slower.  Be conservative
2019
     here.  */
2020
  if (GET_CODE (i2src) == MULT
2021
      || (i1 != 0 && GET_CODE (i1src) == MULT)
2022
      || (GET_CODE (PATTERN (i3)) == SET
2023
          && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
2024
    have_mult = 1;
2025
 
2026
  /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
2027
     We used to do this EXCEPT in one case: I3 has a post-inc in an
2028
     output operand.  However, that exception can give rise to insns like
2029
        mov r3,(r3)+
2030
     which is a famous insn on the PDP-11 where the value of r3 used as the
2031
     source was model-dependent.  Avoid this sort of thing.  */
2032
 
2033
#if 0
2034
  if (!(GET_CODE (PATTERN (i3)) == SET
2035
        && REG_P (SET_SRC (PATTERN (i3)))
2036
        && MEM_P (SET_DEST (PATTERN (i3)))
2037
        && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
2038
            || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
2039
    /* It's not the exception.  */
2040
#endif
2041
#ifdef AUTO_INC_DEC
2042
    for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
2043
      if (REG_NOTE_KIND (link) == REG_INC
2044
          && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
2045
              || (i1 != 0
2046
                  && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
2047
        {
2048
          undo_all ();
2049
          return 0;
2050
        }
2051
#endif
2052
 
2053
  /* See if the SETs in I1 or I2 need to be kept around in the merged
2054
     instruction: whenever the value set there is still needed past I3.
2055
     For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
2056
 
2057
     For the SET in I1, we have two cases:  If I1 and I2 independently
2058
     feed into I3, the set in I1 needs to be kept around if I1DEST dies
2059
     or is set in I3.  Otherwise (if I1 feeds I2 which feeds I3), the set
2060
     in I1 needs to be kept around unless I1DEST dies or is set in either
2061
     I2 or I3.  We can distinguish these cases by seeing if I2SRC mentions
2062
     I1DEST.  If so, we know I1 feeds into I2.  */
2063
 
2064
  added_sets_2 = ! dead_or_set_p (i3, i2dest);
2065
 
2066
  added_sets_1
2067
    = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
2068
               : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
2069
 
2070
  /* If the set in I2 needs to be kept around, we must make a copy of
2071
     PATTERN (I2), so that when we substitute I1SRC for I1DEST in
2072
     PATTERN (I2), we are only substituting for the original I1DEST, not into
2073
     an already-substituted copy.  This also prevents making self-referential
2074
     rtx.  If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
2075
     I2DEST.  */
2076
 
2077
  i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
2078
           ? gen_rtx_SET (VOIDmode, i2dest, i2src)
2079
           : PATTERN (i2));
2080
 
2081
  if (added_sets_2)
2082
    i2pat = copy_rtx (i2pat);
2083
 
2084
  combine_merges++;
2085
 
2086
  /* Substitute in the latest insn for the regs set by the earlier ones.  */
2087
 
2088
  maxreg = max_reg_num ();
2089
 
2090
  subst_insn = i3;
2091
 
2092
  /* It is possible that the source of I2 or I1 may be performing an
2093
     unneeded operation, such as a ZERO_EXTEND of something that is known
2094
     to have the high part zero.  Handle that case by letting subst look at
2095
     the innermost one of them.
2096
 
2097
     Another way to do this would be to have a function that tries to
2098
     simplify a single insn instead of merging two or more insns.  We don't
2099
     do this because of the potential of infinite loops and because
2100
     of the potential extra memory required.  However, doing it the way
2101
     we are is a bit of a kludge and doesn't catch all cases.
2102
 
2103
     But only do this if -fexpensive-optimizations since it slows things down
2104
     and doesn't usually win.  */
2105
 
2106
  if (flag_expensive_optimizations)
2107
    {
2108
      /* Pass pc_rtx so no substitutions are done, just simplifications.  */
2109
      if (i1)
2110
        {
2111
          subst_low_cuid = INSN_CUID (i1);
2112
          i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
2113
        }
2114
      else
2115
        {
2116
          subst_low_cuid = INSN_CUID (i2);
2117
          i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
2118
        }
2119
    }
2120
 
2121
#ifndef HAVE_cc0
2122
  /* Many machines that don't use CC0 have insns that can both perform an
2123
     arithmetic operation and set the condition code.  These operations will
2124
     be represented as a PARALLEL with the first element of the vector
2125
     being a COMPARE of an arithmetic operation with the constant zero.
2126
     The second element of the vector will set some pseudo to the result
2127
     of the same arithmetic operation.  If we simplify the COMPARE, we won't
2128
     match such a pattern and so will generate an extra insn.   Here we test
2129
     for this case, where both the comparison and the operation result are
2130
     needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
2131
     I2SRC.  Later we will make the PARALLEL that contains I2.  */
2132
 
2133
  if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
2134
      && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
2135
      && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
2136
      && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
2137
    {
2138
#ifdef SELECT_CC_MODE
2139
      rtx *cc_use;
2140
      enum machine_mode compare_mode;
2141
#endif
2142
 
2143
      newpat = PATTERN (i3);
2144
      SUBST (XEXP (SET_SRC (newpat), 0), i2src);
2145
 
2146
      i2_is_used = 1;
2147
 
2148
#ifdef SELECT_CC_MODE
2149
      /* See if a COMPARE with the operand we substituted in should be done
2150
         with the mode that is currently being used.  If not, do the same
2151
         processing we do in `subst' for a SET; namely, if the destination
2152
         is used only once, try to replace it with a register of the proper
2153
         mode and also replace the COMPARE.  */
2154
      if (undobuf.other_insn == 0
2155
          && (cc_use = find_single_use (SET_DEST (newpat), i3,
2156
                                        &undobuf.other_insn))
2157
          && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
2158
                                              i2src, const0_rtx))
2159
              != GET_MODE (SET_DEST (newpat))))
2160
        {
2161
          if (can_change_dest_mode(SET_DEST (newpat), added_sets_2,
2162
                                   compare_mode))
2163
            {
2164
              unsigned int regno = REGNO (SET_DEST (newpat));
2165
              rtx new_dest = gen_rtx_REG (compare_mode, regno);
2166
 
2167
              if (regno >= FIRST_PSEUDO_REGISTER)
2168
                SUBST (regno_reg_rtx[regno], new_dest);
2169
 
2170
              SUBST (SET_DEST (newpat), new_dest);
2171
              SUBST (XEXP (*cc_use, 0), new_dest);
2172
              SUBST (SET_SRC (newpat),
2173
                     gen_rtx_COMPARE (compare_mode, i2src, const0_rtx));
2174
            }
2175
          else
2176
            undobuf.other_insn = 0;
2177
        }
2178
#endif
2179
    }
2180
  else
2181
#endif
2182
    {
2183
      n_occurrences = 0;         /* `subst' counts here */
2184
 
2185
      /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
2186
         need to make a unique copy of I2SRC each time we substitute it
2187
         to avoid self-referential rtl.  */
2188
 
2189
      subst_low_cuid = INSN_CUID (i2);
2190
      newpat = subst (PATTERN (i3), i2dest, i2src, 0,
2191
                      ! i1_feeds_i3 && i1dest_in_i1src);
2192
      substed_i2 = 1;
2193
 
2194
      /* Record whether i2's body now appears within i3's body.  */
2195
      i2_is_used = n_occurrences;
2196
    }
2197
 
2198
  /* If we already got a failure, don't try to do more.  Otherwise,
2199
     try to substitute in I1 if we have it.  */
2200
 
2201
  if (i1 && GET_CODE (newpat) != CLOBBER)
2202
    {
2203
      /* Before we can do this substitution, we must redo the test done
2204
         above (see detailed comments there) that ensures  that I1DEST
2205
         isn't mentioned in any SETs in NEWPAT that are field assignments.  */
2206
 
2207
      if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
2208
                              0, (rtx*) 0))
2209
        {
2210
          undo_all ();
2211
          return 0;
2212
        }
2213
 
2214
      n_occurrences = 0;
2215
      subst_low_cuid = INSN_CUID (i1);
2216
      newpat = subst (newpat, i1dest, i1src, 0, 0);
2217
      substed_i1 = 1;
2218
    }
2219
 
2220
  /* Fail if an autoincrement side-effect has been duplicated.  Be careful
2221
     to count all the ways that I2SRC and I1SRC can be used.  */
2222
  if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
2223
       && i2_is_used + added_sets_2 > 1)
2224
      || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
2225
          && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
2226
              > 1))
2227
      /* Fail if we tried to make a new register.  */
2228
      || max_reg_num () != maxreg
2229
      /* Fail if we couldn't do something and have a CLOBBER.  */
2230
      || GET_CODE (newpat) == CLOBBER
2231
      /* Fail if this new pattern is a MULT and we didn't have one before
2232
         at the outer level.  */
2233
      || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
2234
          && ! have_mult))
2235
    {
2236
      undo_all ();
2237
      return 0;
2238
    }
2239
 
2240
  /* If the actions of the earlier insns must be kept
2241
     in addition to substituting them into the latest one,
2242
     we must make a new PARALLEL for the latest insn
2243
     to hold additional the SETs.  */
2244
 
2245
  if (added_sets_1 || added_sets_2)
2246
    {
2247
      combine_extras++;
2248
 
2249
      if (GET_CODE (newpat) == PARALLEL)
2250
        {
2251
          rtvec old = XVEC (newpat, 0);
2252
          total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
2253
          newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
2254
          memcpy (XVEC (newpat, 0)->elem, &old->elem[0],
2255
                  sizeof (old->elem[0]) * old->num_elem);
2256
        }
2257
      else
2258
        {
2259
          rtx old = newpat;
2260
          total_sets = 1 + added_sets_1 + added_sets_2;
2261
          newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
2262
          XVECEXP (newpat, 0, 0) = old;
2263
        }
2264
 
2265
      if (added_sets_1)
2266
        XVECEXP (newpat, 0, --total_sets)
2267
          = (GET_CODE (PATTERN (i1)) == PARALLEL
2268
             ? gen_rtx_SET (VOIDmode, i1dest, i1src) : PATTERN (i1));
2269
 
2270
      if (added_sets_2)
2271
        {
2272
          /* If there is no I1, use I2's body as is.  We used to also not do
2273
             the subst call below if I2 was substituted into I3,
2274
             but that could lose a simplification.  */
2275
          if (i1 == 0)
2276
            XVECEXP (newpat, 0, --total_sets) = i2pat;
2277
          else
2278
            /* See comment where i2pat is assigned.  */
2279
            XVECEXP (newpat, 0, --total_sets)
2280
              = subst (i2pat, i1dest, i1src, 0, 0);
2281
        }
2282
    }
2283
 
2284
  /* We come here when we are replacing a destination in I2 with the
2285
     destination of I3.  */
2286
 validate_replacement:
2287
 
2288
  /* Note which hard regs this insn has as inputs.  */
2289
  mark_used_regs_combine (newpat);
2290
 
2291
  /* If recog_for_combine fails, it strips existing clobbers.  If we'll
2292
     consider splitting this pattern, we might need these clobbers.  */
2293
  if (i1 && GET_CODE (newpat) == PARALLEL
2294
      && GET_CODE (XVECEXP (newpat, 0, XVECLEN (newpat, 0) - 1)) == CLOBBER)
2295
    {
2296
      int len = XVECLEN (newpat, 0);
2297
 
2298
      newpat_vec_with_clobbers = rtvec_alloc (len);
2299
      for (i = 0; i < len; i++)
2300
        RTVEC_ELT (newpat_vec_with_clobbers, i) = XVECEXP (newpat, 0, i);
2301
    }
2302
 
2303
  /* Is the result of combination a valid instruction?  */
2304
  insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2305
 
2306
  /* If the result isn't valid, see if it is a PARALLEL of two SETs where
2307
     the second SET's destination is a register that is unused and isn't
2308
     marked as an instruction that might trap in an EH region.  In that case,
2309
     we just need the first SET.   This can occur when simplifying a divmod
2310
     insn.  We *must* test for this case here because the code below that
2311
     splits two independent SETs doesn't handle this case correctly when it
2312
     updates the register status.
2313
 
2314
     It's pointless doing this if we originally had two sets, one from
2315
     i3, and one from i2.  Combining then splitting the parallel results
2316
     in the original i2 again plus an invalid insn (which we delete).
2317
     The net effect is only to move instructions around, which makes
2318
     debug info less accurate.
2319
 
2320
     Also check the case where the first SET's destination is unused.
2321
     That would not cause incorrect code, but does cause an unneeded
2322
     insn to remain.  */
2323
 
2324
  if (insn_code_number < 0
2325
      && !(added_sets_2 && i1 == 0)
2326
      && GET_CODE (newpat) == PARALLEL
2327
      && XVECLEN (newpat, 0) == 2
2328
      && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2329
      && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2330
      && asm_noperands (newpat) < 0)
2331
    {
2332
      rtx set0 = XVECEXP (newpat, 0, 0);
2333
      rtx set1 = XVECEXP (newpat, 0, 1);
2334
      rtx note;
2335
 
2336
      if (((REG_P (SET_DEST (set1))
2337
            && find_reg_note (i3, REG_UNUSED, SET_DEST (set1)))
2338
           || (GET_CODE (SET_DEST (set1)) == SUBREG
2339
               && find_reg_note (i3, REG_UNUSED, SUBREG_REG (SET_DEST (set1)))))
2340
          && (!(note = find_reg_note (i3, REG_EH_REGION, NULL_RTX))
2341
              || INTVAL (XEXP (note, 0)) <= 0)
2342
          && ! side_effects_p (SET_SRC (set1)))
2343
        {
2344
          newpat = set0;
2345
          insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2346
        }
2347
 
2348
      else if (((REG_P (SET_DEST (set0))
2349
                 && find_reg_note (i3, REG_UNUSED, SET_DEST (set0)))
2350
                || (GET_CODE (SET_DEST (set0)) == SUBREG
2351
                    && find_reg_note (i3, REG_UNUSED,
2352
                                      SUBREG_REG (SET_DEST (set0)))))
2353
               && (!(note = find_reg_note (i3, REG_EH_REGION, NULL_RTX))
2354
                   || INTVAL (XEXP (note, 0)) <= 0)
2355
               && ! side_effects_p (SET_SRC (set0)))
2356
        {
2357
          newpat = set1;
2358
          insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2359
 
2360
          if (insn_code_number >= 0)
2361
            {
2362
              /* If we will be able to accept this, we have made a
2363
                 change to the destination of I3.  This requires us to
2364
                 do a few adjustments.  */
2365
 
2366
              PATTERN (i3) = newpat;
2367
              adjust_for_new_dest (i3);
2368
            }
2369
        }
2370
    }
2371
 
2372
  /* If we were combining three insns and the result is a simple SET
2373
     with no ASM_OPERANDS that wasn't recognized, try to split it into two
2374
     insns.  There are two ways to do this.  It can be split using a
2375
     machine-specific method (like when you have an addition of a large
2376
     constant) or by combine in the function find_split_point.  */
2377
 
2378
  if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
2379
      && asm_noperands (newpat) < 0)
2380
    {
2381
      rtx m_split, *split;
2382
      rtx ni2dest = i2dest;
2383
 
2384
      /* See if the MD file can split NEWPAT.  If it can't, see if letting it
2385
         use I2DEST as a scratch register will help.  In the latter case,
2386
         convert I2DEST to the mode of the source of NEWPAT if we can.  */
2387
 
2388
      m_split = split_insns (newpat, i3);
2389
 
2390
      /* We can only use I2DEST as a scratch reg if it doesn't overlap any
2391
         inputs of NEWPAT.  */
2392
 
2393
      /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
2394
         possible to try that as a scratch reg.  This would require adding
2395
         more code to make it work though.  */
2396
 
2397
      if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
2398
        {
2399
          enum machine_mode new_mode = GET_MODE (SET_DEST (newpat));
2400
          /* If I2DEST is a hard register or the only use of a pseudo,
2401
             we can change its mode.  */
2402
          if (new_mode != GET_MODE (i2dest)
2403
              && new_mode != VOIDmode
2404
              && can_change_dest_mode (i2dest, added_sets_2, new_mode))
2405
            ni2dest = gen_rtx_REG (GET_MODE (SET_DEST (newpat)),
2406
                                   REGNO (i2dest));
2407
 
2408
          m_split = split_insns (gen_rtx_PARALLEL
2409
                                 (VOIDmode,
2410
                                  gen_rtvec (2, newpat,
2411
                                             gen_rtx_CLOBBER (VOIDmode,
2412
                                                              ni2dest))),
2413
                                 i3);
2414
          /* If the split with the mode-changed register didn't work, try
2415
             the original register.  */
2416
          if (! m_split && ni2dest != i2dest)
2417
            {
2418
              ni2dest = i2dest;
2419
              m_split = split_insns (gen_rtx_PARALLEL
2420
                                     (VOIDmode,
2421
                                      gen_rtvec (2, newpat,
2422
                                                 gen_rtx_CLOBBER (VOIDmode,
2423
                                                                  i2dest))),
2424
                                     i3);
2425
            }
2426
        }
2427
 
2428
      /* If recog_for_combine has discarded clobbers, try to use them
2429
         again for the split.  */
2430
      if (m_split == 0 && newpat_vec_with_clobbers)
2431
        m_split
2432
          = split_insns (gen_rtx_PARALLEL (VOIDmode,
2433
                                           newpat_vec_with_clobbers), i3);
2434
 
2435
      if (m_split && NEXT_INSN (m_split) == NULL_RTX)
2436
        {
2437
          m_split = PATTERN (m_split);
2438
          insn_code_number = recog_for_combine (&m_split, i3, &new_i3_notes);
2439
          if (insn_code_number >= 0)
2440
            newpat = m_split;
2441
        }
2442
      else if (m_split && NEXT_INSN (NEXT_INSN (m_split)) == NULL_RTX
2443
               && (next_real_insn (i2) == i3
2444
                   || ! use_crosses_set_p (PATTERN (m_split), INSN_CUID (i2))))
2445
        {
2446
          rtx i2set, i3set;
2447
          rtx newi3pat = PATTERN (NEXT_INSN (m_split));
2448
          newi2pat = PATTERN (m_split);
2449
 
2450
          i3set = single_set (NEXT_INSN (m_split));
2451
          i2set = single_set (m_split);
2452
 
2453
          /* In case we changed the mode of I2DEST, replace it in the
2454
             pseudo-register table here.  We can't do it above in case this
2455
             code doesn't get executed and we do a split the other way.  */
2456
 
2457
          if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
2458
            SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
2459
 
2460
          i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2461
 
2462
          /* If I2 or I3 has multiple SETs, we won't know how to track
2463
             register status, so don't use these insns.  If I2's destination
2464
             is used between I2 and I3, we also can't use these insns.  */
2465
 
2466
          if (i2_code_number >= 0 && i2set && i3set
2467
              && (next_real_insn (i2) == i3
2468
                  || ! reg_used_between_p (SET_DEST (i2set), i2, i3)))
2469
            insn_code_number = recog_for_combine (&newi3pat, i3,
2470
                                                  &new_i3_notes);
2471
          if (insn_code_number >= 0)
2472
            newpat = newi3pat;
2473
 
2474
          /* It is possible that both insns now set the destination of I3.
2475
             If so, we must show an extra use of it.  */
2476
 
2477
          if (insn_code_number >= 0)
2478
            {
2479
              rtx new_i3_dest = SET_DEST (i3set);
2480
              rtx new_i2_dest = SET_DEST (i2set);
2481
 
2482
              while (GET_CODE (new_i3_dest) == ZERO_EXTRACT
2483
                     || GET_CODE (new_i3_dest) == STRICT_LOW_PART
2484
                     || GET_CODE (new_i3_dest) == SUBREG)
2485
                new_i3_dest = XEXP (new_i3_dest, 0);
2486
 
2487
              while (GET_CODE (new_i2_dest) == ZERO_EXTRACT
2488
                     || GET_CODE (new_i2_dest) == STRICT_LOW_PART
2489
                     || GET_CODE (new_i2_dest) == SUBREG)
2490
                new_i2_dest = XEXP (new_i2_dest, 0);
2491
 
2492
              if (REG_P (new_i3_dest)
2493
                  && REG_P (new_i2_dest)
2494
                  && REGNO (new_i3_dest) == REGNO (new_i2_dest))
2495
                REG_N_SETS (REGNO (new_i2_dest))++;
2496
            }
2497
        }
2498
 
2499
      /* If we can split it and use I2DEST, go ahead and see if that
2500
         helps things be recognized.  Verify that none of the registers
2501
         are set between I2 and I3.  */
2502
      if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
2503
#ifdef HAVE_cc0
2504
          && REG_P (i2dest)
2505
#endif
2506
          /* We need I2DEST in the proper mode.  If it is a hard register
2507
             or the only use of a pseudo, we can change its mode.
2508
             Make sure we don't change a hard register to have a mode that
2509
             isn't valid for it, or change the number of registers.  */
2510
          && (GET_MODE (*split) == GET_MODE (i2dest)
2511
              || GET_MODE (*split) == VOIDmode
2512
              || can_change_dest_mode (i2dest, added_sets_2,
2513
                                       GET_MODE (*split)))
2514
          && (next_real_insn (i2) == i3
2515
              || ! use_crosses_set_p (*split, INSN_CUID (i2)))
2516
          /* We can't overwrite I2DEST if its value is still used by
2517
             NEWPAT.  */
2518
          && ! reg_referenced_p (i2dest, newpat))
2519
        {
2520
          rtx newdest = i2dest;
2521
          enum rtx_code split_code = GET_CODE (*split);
2522
          enum machine_mode split_mode = GET_MODE (*split);
2523
 
2524
          /* Get NEWDEST as a register in the proper mode.  We have already
2525
             validated that we can do this.  */
2526
          if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
2527
            {
2528
              newdest = gen_rtx_REG (split_mode, REGNO (i2dest));
2529
 
2530
              if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
2531
                SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
2532
            }
2533
 
2534
          /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
2535
             an ASHIFT.  This can occur if it was inside a PLUS and hence
2536
             appeared to be a memory address.  This is a kludge.  */
2537
          if (split_code == MULT
2538
              && GET_CODE (XEXP (*split, 1)) == CONST_INT
2539
              && INTVAL (XEXP (*split, 1)) > 0
2540
              && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
2541
            {
2542
              SUBST (*split, gen_rtx_ASHIFT (split_mode,
2543
                                             XEXP (*split, 0), GEN_INT (i)));
2544
              /* Update split_code because we may not have a multiply
2545
                 anymore.  */
2546
              split_code = GET_CODE (*split);
2547
            }
2548
 
2549
#ifdef INSN_SCHEDULING
2550
          /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
2551
             be written as a ZERO_EXTEND.  */
2552
          if (split_code == SUBREG && MEM_P (SUBREG_REG (*split)))
2553
            {
2554
#ifdef LOAD_EXTEND_OP
2555
              /* Or as a SIGN_EXTEND if LOAD_EXTEND_OP says that that's
2556
                 what it really is.  */
2557
              if (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (*split)))
2558
                  == SIGN_EXTEND)
2559
                SUBST (*split, gen_rtx_SIGN_EXTEND (split_mode,
2560
                                                    SUBREG_REG (*split)));
2561
              else
2562
#endif
2563
                SUBST (*split, gen_rtx_ZERO_EXTEND (split_mode,
2564
                                                    SUBREG_REG (*split)));
2565
            }
2566
#endif
2567
 
2568
          newi2pat = gen_rtx_SET (VOIDmode, newdest, *split);
2569
          SUBST (*split, newdest);
2570
          i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2571
 
2572
          /* recog_for_combine might have added CLOBBERs to newi2pat.
2573
             Make sure NEWPAT does not depend on the clobbered regs.  */
2574
          if (GET_CODE (newi2pat) == PARALLEL)
2575
            for (i = XVECLEN (newi2pat, 0) - 1; i >= 0; i--)
2576
              if (GET_CODE (XVECEXP (newi2pat, 0, i)) == CLOBBER)
2577
                {
2578
                  rtx reg = XEXP (XVECEXP (newi2pat, 0, i), 0);
2579
                  if (reg_overlap_mentioned_p (reg, newpat))
2580
                    {
2581
                      undo_all ();
2582
                      return 0;
2583
                    }
2584
                }
2585
 
2586
          /* If the split point was a MULT and we didn't have one before,
2587
             don't use one now.  */
2588
          if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
2589
            insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2590
        }
2591
    }
2592
 
2593
  /* Check for a case where we loaded from memory in a narrow mode and
2594
     then sign extended it, but we need both registers.  In that case,
2595
     we have a PARALLEL with both loads from the same memory location.
2596
     We can split this into a load from memory followed by a register-register
2597
     copy.  This saves at least one insn, more if register allocation can
2598
     eliminate the copy.
2599
 
2600
     We cannot do this if the destination of the first assignment is a
2601
     condition code register or cc0.  We eliminate this case by making sure
2602
     the SET_DEST and SET_SRC have the same mode.
2603
 
2604
     We cannot do this if the destination of the second assignment is
2605
     a register that we have already assumed is zero-extended.  Similarly
2606
     for a SUBREG of such a register.  */
2607
 
2608
  else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2609
           && GET_CODE (newpat) == PARALLEL
2610
           && XVECLEN (newpat, 0) == 2
2611
           && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2612
           && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
2613
           && (GET_MODE (SET_DEST (XVECEXP (newpat, 0, 0)))
2614
               == GET_MODE (SET_SRC (XVECEXP (newpat, 0, 0))))
2615
           && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2616
           && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2617
                           XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
2618
           && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2619
                                   INSN_CUID (i2))
2620
           && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2621
           && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2622
           && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
2623
                 (REG_P (temp)
2624
                  && reg_stat[REGNO (temp)].nonzero_bits != 0
2625
                  && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2626
                  && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2627
                  && (reg_stat[REGNO (temp)].nonzero_bits
2628
                      != GET_MODE_MASK (word_mode))))
2629
           && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
2630
                 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
2631
                     (REG_P (temp)
2632
                      && reg_stat[REGNO (temp)].nonzero_bits != 0
2633
                      && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2634
                      && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2635
                      && (reg_stat[REGNO (temp)].nonzero_bits
2636
                          != GET_MODE_MASK (word_mode)))))
2637
           && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2638
                                         SET_SRC (XVECEXP (newpat, 0, 1)))
2639
           && ! find_reg_note (i3, REG_UNUSED,
2640
                               SET_DEST (XVECEXP (newpat, 0, 0))))
2641
    {
2642
      rtx ni2dest;
2643
 
2644
      newi2pat = XVECEXP (newpat, 0, 0);
2645
      ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
2646
      newpat = XVECEXP (newpat, 0, 1);
2647
      SUBST (SET_SRC (newpat),
2648
             gen_lowpart (GET_MODE (SET_SRC (newpat)), ni2dest));
2649
      i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2650
 
2651
      if (i2_code_number >= 0)
2652
        insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2653
 
2654
      if (insn_code_number >= 0)
2655
        swap_i2i3 = 1;
2656
    }
2657
 
2658
  /* Similarly, check for a case where we have a PARALLEL of two independent
2659
     SETs but we started with three insns.  In this case, we can do the sets
2660
     as two separate insns.  This case occurs when some SET allows two
2661
     other insns to combine, but the destination of that SET is still live.  */
2662
 
2663
  else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2664
           && GET_CODE (newpat) == PARALLEL
2665
           && XVECLEN (newpat, 0) == 2
2666
           && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2667
           && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
2668
           && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
2669
           && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2670
           && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2671
           && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2672
           && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2673
                                   INSN_CUID (i2))
2674
           /* Don't pass sets with (USE (MEM ...)) dests to the following.  */
2675
           && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
2676
           && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
2677
           && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2678
                                  XVECEXP (newpat, 0, 0))
2679
           && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
2680
                                  XVECEXP (newpat, 0, 1))
2681
           && ! (contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 0)))
2682
                 && contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 1)))))
2683
    {
2684
      /* Normally, it doesn't matter which of the two is done first,
2685
         but it does if one references cc0.  In that case, it has to
2686
         be first.  */
2687
#ifdef HAVE_cc0
2688
      if (reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0)))
2689
        {
2690
          newi2pat = XVECEXP (newpat, 0, 0);
2691
          newpat = XVECEXP (newpat, 0, 1);
2692
        }
2693
      else
2694
#endif
2695
        {
2696
          newi2pat = XVECEXP (newpat, 0, 1);
2697
          newpat = XVECEXP (newpat, 0, 0);
2698
        }
2699
 
2700
      i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2701
 
2702
      if (i2_code_number >= 0)
2703
        insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2704
    }
2705
 
2706
  /* If it still isn't recognized, fail and change things back the way they
2707
     were.  */
2708
  if ((insn_code_number < 0
2709
       /* Is the result a reasonable ASM_OPERANDS?  */
2710
       && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
2711
    {
2712
      undo_all ();
2713
      return 0;
2714
    }
2715
 
2716
  /* If we had to change another insn, make sure it is valid also.  */
2717
  if (undobuf.other_insn)
2718
    {
2719
      rtx other_pat = PATTERN (undobuf.other_insn);
2720
      rtx new_other_notes;
2721
      rtx note, next;
2722
 
2723
      CLEAR_HARD_REG_SET (newpat_used_regs);
2724
 
2725
      other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
2726
                                             &new_other_notes);
2727
 
2728
      if (other_code_number < 0 && ! check_asm_operands (other_pat))
2729
        {
2730
          undo_all ();
2731
          return 0;
2732
        }
2733
 
2734
      PATTERN (undobuf.other_insn) = other_pat;
2735
 
2736
      /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
2737
         are still valid.  Then add any non-duplicate notes added by
2738
         recog_for_combine.  */
2739
      for (note = REG_NOTES (undobuf.other_insn); note; note = next)
2740
        {
2741
          next = XEXP (note, 1);
2742
 
2743
          if (REG_NOTE_KIND (note) == REG_UNUSED
2744
              && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
2745
            {
2746
              if (REG_P (XEXP (note, 0)))
2747
                REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
2748
 
2749
              remove_note (undobuf.other_insn, note);
2750
            }
2751
        }
2752
 
2753
      for (note = new_other_notes; note; note = XEXP (note, 1))
2754
        if (REG_P (XEXP (note, 0)))
2755
          REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
2756
 
2757
      distribute_notes (new_other_notes, undobuf.other_insn,
2758
                        undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
2759
    }
2760
#ifdef HAVE_cc0
2761
  /* If I2 is the CC0 setter and I3 is the CC0 user then check whether
2762
     they are adjacent to each other or not.  */
2763
  {
2764
    rtx p = prev_nonnote_insn (i3);
2765
    if (p && p != i2 && NONJUMP_INSN_P (p) && newi2pat
2766
        && sets_cc0_p (newi2pat))
2767
      {
2768
        undo_all ();
2769
        return 0;
2770
      }
2771
  }
2772
#endif
2773
 
2774
  /* Only allow this combination if insn_rtx_costs reports that the
2775
     replacement instructions are cheaper than the originals.  */
2776
  if (!combine_validate_cost (i1, i2, i3, newpat, newi2pat))
2777
    {
2778
      undo_all ();
2779
      return 0;
2780
    }
2781
 
2782
  /* We now know that we can do this combination.  Merge the insns and
2783
     update the status of registers and LOG_LINKS.  */
2784
 
2785
  if (swap_i2i3)
2786
    {
2787
      rtx insn;
2788
      rtx link;
2789
      rtx ni2dest;
2790
 
2791
      /* I3 now uses what used to be its destination and which is now
2792
         I2's destination.  This requires us to do a few adjustments.  */
2793
      PATTERN (i3) = newpat;
2794
      adjust_for_new_dest (i3);
2795
 
2796
      /* We need a LOG_LINK from I3 to I2.  But we used to have one,
2797
         so we still will.
2798
 
2799
         However, some later insn might be using I2's dest and have
2800
         a LOG_LINK pointing at I3.  We must remove this link.
2801
         The simplest way to remove the link is to point it at I1,
2802
         which we know will be a NOTE.  */
2803
 
2804
      /* newi2pat is usually a SET here; however, recog_for_combine might
2805
         have added some clobbers.  */
2806
      if (GET_CODE (newi2pat) == PARALLEL)
2807
        ni2dest = SET_DEST (XVECEXP (newi2pat, 0, 0));
2808
      else
2809
        ni2dest = SET_DEST (newi2pat);
2810
 
2811
      for (insn = NEXT_INSN (i3);
2812
           insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR
2813
                    || insn != BB_HEAD (this_basic_block->next_bb));
2814
           insn = NEXT_INSN (insn))
2815
        {
2816
          if (INSN_P (insn) && reg_referenced_p (ni2dest, PATTERN (insn)))
2817
            {
2818
              for (link = LOG_LINKS (insn); link;
2819
                   link = XEXP (link, 1))
2820
                if (XEXP (link, 0) == i3)
2821
                  XEXP (link, 0) = i1;
2822
 
2823
              break;
2824
            }
2825
        }
2826
    }
2827
 
2828
  {
2829
    rtx i3notes, i2notes, i1notes = 0;
2830
    rtx i3links, i2links, i1links = 0;
2831
    rtx midnotes = 0;
2832
    unsigned int regno;
2833
    /* Compute which registers we expect to eliminate.  newi2pat may be setting
2834
       either i3dest or i2dest, so we must check it.  Also, i1dest may be the
2835
       same as i3dest, in which case newi2pat may be setting i1dest.  */
2836
    rtx elim_i2 = ((newi2pat && reg_set_p (i2dest, newi2pat))
2837
                   || i2dest_in_i2src || i2dest_in_i1src
2838
                   || !i2dest_killed
2839
                   ? 0 : i2dest);
2840
    rtx elim_i1 = (i1 == 0 || i1dest_in_i1src
2841
                   || (newi2pat && reg_set_p (i1dest, newi2pat))
2842
                   || !i1dest_killed
2843
                   ? 0 : i1dest);
2844
 
2845
    /* Get the old REG_NOTES and LOG_LINKS from all our insns and
2846
       clear them.  */
2847
    i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
2848
    i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
2849
    if (i1)
2850
      i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
2851
 
2852
    /* Ensure that we do not have something that should not be shared but
2853
       occurs multiple times in the new insns.  Check this by first
2854
       resetting all the `used' flags and then copying anything is shared.  */
2855
 
2856
    reset_used_flags (i3notes);
2857
    reset_used_flags (i2notes);
2858
    reset_used_flags (i1notes);
2859
    reset_used_flags (newpat);
2860
    reset_used_flags (newi2pat);
2861
    if (undobuf.other_insn)
2862
      reset_used_flags (PATTERN (undobuf.other_insn));
2863
 
2864
    i3notes = copy_rtx_if_shared (i3notes);
2865
    i2notes = copy_rtx_if_shared (i2notes);
2866
    i1notes = copy_rtx_if_shared (i1notes);
2867
    newpat = copy_rtx_if_shared (newpat);
2868
    newi2pat = copy_rtx_if_shared (newi2pat);
2869
    if (undobuf.other_insn)
2870
      reset_used_flags (PATTERN (undobuf.other_insn));
2871
 
2872
    INSN_CODE (i3) = insn_code_number;
2873
    PATTERN (i3) = newpat;
2874
 
2875
    if (CALL_P (i3) && CALL_INSN_FUNCTION_USAGE (i3))
2876
      {
2877
        rtx call_usage = CALL_INSN_FUNCTION_USAGE (i3);
2878
 
2879
        reset_used_flags (call_usage);
2880
        call_usage = copy_rtx (call_usage);
2881
 
2882
        if (substed_i2)
2883
          replace_rtx (call_usage, i2dest, i2src);
2884
 
2885
        if (substed_i1)
2886
          replace_rtx (call_usage, i1dest, i1src);
2887
 
2888
        CALL_INSN_FUNCTION_USAGE (i3) = call_usage;
2889
      }
2890
 
2891
    if (undobuf.other_insn)
2892
      INSN_CODE (undobuf.other_insn) = other_code_number;
2893
 
2894
    /* We had one special case above where I2 had more than one set and
2895
       we replaced a destination of one of those sets with the destination
2896
       of I3.  In that case, we have to update LOG_LINKS of insns later
2897
       in this basic block.  Note that this (expensive) case is rare.
2898
 
2899
       Also, in this case, we must pretend that all REG_NOTEs for I2
2900
       actually came from I3, so that REG_UNUSED notes from I2 will be
2901
       properly handled.  */
2902
 
2903
    if (i3_subst_into_i2)
2904
      {
2905
        for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
2906
          if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != USE
2907
              && REG_P (SET_DEST (XVECEXP (PATTERN (i2), 0, i)))
2908
              && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
2909
              && ! find_reg_note (i2, REG_UNUSED,
2910
                                  SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
2911
            for (temp = NEXT_INSN (i2);
2912
                 temp && (this_basic_block->next_bb == EXIT_BLOCK_PTR
2913
                          || BB_HEAD (this_basic_block) != temp);
2914
                 temp = NEXT_INSN (temp))
2915
              if (temp != i3 && INSN_P (temp))
2916
                for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
2917
                  if (XEXP (link, 0) == i2)
2918
                    XEXP (link, 0) = i3;
2919
 
2920
        if (i3notes)
2921
          {
2922
            rtx link = i3notes;
2923
            while (XEXP (link, 1))
2924
              link = XEXP (link, 1);
2925
            XEXP (link, 1) = i2notes;
2926
          }
2927
        else
2928
          i3notes = i2notes;
2929
        i2notes = 0;
2930
      }
2931
 
2932
    LOG_LINKS (i3) = 0;
2933
    REG_NOTES (i3) = 0;
2934
    LOG_LINKS (i2) = 0;
2935
    REG_NOTES (i2) = 0;
2936
 
2937
    if (newi2pat)
2938
      {
2939
        INSN_CODE (i2) = i2_code_number;
2940
        PATTERN (i2) = newi2pat;
2941
      }
2942
    else
2943
      SET_INSN_DELETED (i2);
2944
 
2945
    if (i1)
2946
      {
2947
        LOG_LINKS (i1) = 0;
2948
        REG_NOTES (i1) = 0;
2949
        SET_INSN_DELETED (i1);
2950
      }
2951
 
2952
    /* Get death notes for everything that is now used in either I3 or
2953
       I2 and used to die in a previous insn.  If we built two new
2954
       patterns, move from I1 to I2 then I2 to I3 so that we get the
2955
       proper movement on registers that I2 modifies.  */
2956
 
2957
    if (newi2pat)
2958
      {
2959
        move_deaths (newi2pat, NULL_RTX, INSN_CUID (i1), i2, &midnotes);
2960
        move_deaths (newpat, newi2pat, INSN_CUID (i1), i3, &midnotes);
2961
      }
2962
    else
2963
      move_deaths (newpat, NULL_RTX, i1 ? INSN_CUID (i1) : INSN_CUID (i2),
2964
                   i3, &midnotes);
2965
 
2966
    /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3.  */
2967
    if (i3notes)
2968
      distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
2969
                        elim_i2, elim_i1);
2970
    if (i2notes)
2971
      distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
2972
                        elim_i2, elim_i1);
2973
    if (i1notes)
2974
      distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
2975
                        elim_i2, elim_i1);
2976
    if (midnotes)
2977
      distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2978
                        elim_i2, elim_i1);
2979
 
2980
    /* Distribute any notes added to I2 or I3 by recog_for_combine.  We
2981
       know these are REG_UNUSED and want them to go to the desired insn,
2982
       so we always pass it as i3.  We have not counted the notes in
2983
       reg_n_deaths yet, so we need to do so now.  */
2984
 
2985
    if (newi2pat && new_i2_notes)
2986
      {
2987
        for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2988
          if (REG_P (XEXP (temp, 0)))
2989
            REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
2990
 
2991
        distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2992
      }
2993
 
2994
    if (new_i3_notes)
2995
      {
2996
        for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
2997
          if (REG_P (XEXP (temp, 0)))
2998
            REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
2999
 
3000
        distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
3001
      }
3002
 
3003
    /* If I3DEST was used in I3SRC, it really died in I3.  We may need to
3004
       put a REG_DEAD note for it somewhere.  If NEWI2PAT exists and sets
3005
       I3DEST, the death must be somewhere before I2, not I3.  If we passed I3
3006
       in that case, it might delete I2.  Similarly for I2 and I1.
3007
       Show an additional death due to the REG_DEAD note we make here.  If
3008
       we discard it in distribute_notes, we will decrement it again.  */
3009
 
3010
    if (i3dest_killed)
3011
      {
3012
        if (REG_P (i3dest_killed))
3013
          REG_N_DEATHS (REGNO (i3dest_killed))++;
3014
 
3015
        if (newi2pat && reg_set_p (i3dest_killed, newi2pat))
3016
          distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
3017
                                               NULL_RTX),
3018
                            NULL_RTX, i2, NULL_RTX, elim_i2, elim_i1);
3019
        else
3020
          distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
3021
                                               NULL_RTX),
3022
                            NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
3023
                            elim_i2, elim_i1);
3024
      }
3025
 
3026
    if (i2dest_in_i2src)
3027
      {
3028
        if (REG_P (i2dest))
3029
          REG_N_DEATHS (REGNO (i2dest))++;
3030
 
3031
        if (newi2pat && reg_set_p (i2dest, newi2pat))
3032
          distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
3033
                            NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
3034
        else
3035
          distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
3036
                            NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
3037
                            NULL_RTX, NULL_RTX);
3038
      }
3039
 
3040
    if (i1dest_in_i1src)
3041
      {
3042
        if (REG_P (i1dest))
3043
          REG_N_DEATHS (REGNO (i1dest))++;
3044
 
3045
        if (newi2pat && reg_set_p (i1dest, newi2pat))
3046
          distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
3047
                            NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
3048
        else
3049
          distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
3050
                            NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
3051
                            NULL_RTX, NULL_RTX);
3052
      }
3053
 
3054
    distribute_links (i3links);
3055
    distribute_links (i2links);
3056
    distribute_links (i1links);
3057
 
3058
    if (REG_P (i2dest))
3059
      {
3060
        rtx link;
3061
        rtx i2_insn = 0, i2_val = 0, set;
3062
 
3063
        /* The insn that used to set this register doesn't exist, and
3064
           this life of the register may not exist either.  See if one of
3065
           I3's links points to an insn that sets I2DEST.  If it does,
3066
           that is now the last known value for I2DEST. If we don't update
3067
           this and I2 set the register to a value that depended on its old
3068
           contents, we will get confused.  If this insn is used, thing
3069
           will be set correctly in combine_instructions.  */
3070
 
3071
        for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
3072
          if ((set = single_set (XEXP (link, 0))) != 0
3073
              && rtx_equal_p (i2dest, SET_DEST (set)))
3074
            i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
3075
 
3076
        record_value_for_reg (i2dest, i2_insn, i2_val);
3077
 
3078
        /* If the reg formerly set in I2 died only once and that was in I3,
3079
           zero its use count so it won't make `reload' do any work.  */
3080
        if (! added_sets_2
3081
            && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat))
3082
            && ! i2dest_in_i2src)
3083
          {
3084
            regno = REGNO (i2dest);
3085
            REG_N_SETS (regno)--;
3086
          }
3087
      }
3088
 
3089
    if (i1 && REG_P (i1dest))
3090
      {
3091
        rtx link;
3092
        rtx i1_insn = 0, i1_val = 0, set;
3093
 
3094
        for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
3095
          if ((set = single_set (XEXP (link, 0))) != 0
3096
              && rtx_equal_p (i1dest, SET_DEST (set)))
3097
            i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
3098
 
3099
        record_value_for_reg (i1dest, i1_insn, i1_val);
3100
 
3101
        regno = REGNO (i1dest);
3102
        if (! added_sets_1 && ! i1dest_in_i1src)
3103
          REG_N_SETS (regno)--;
3104
      }
3105
 
3106
    /* Update reg_stat[].nonzero_bits et al for any changes that may have
3107
       been made to this insn.  The order of
3108
       set_nonzero_bits_and_sign_copies() is important.  Because newi2pat
3109
       can affect nonzero_bits of newpat */
3110
    if (newi2pat)
3111
      note_stores (newi2pat, set_nonzero_bits_and_sign_copies, NULL);
3112
    note_stores (newpat, set_nonzero_bits_and_sign_copies, NULL);
3113
 
3114
    /* Set new_direct_jump_p if a new return or simple jump instruction
3115
       has been created.
3116
 
3117
       If I3 is now an unconditional jump, ensure that it has a
3118
       BARRIER following it since it may have initially been a
3119
       conditional jump.  It may also be the last nonnote insn.  */
3120
 
3121
    if (returnjump_p (i3) || any_uncondjump_p (i3))
3122
      {
3123
        *new_direct_jump_p = 1;
3124
        mark_jump_label (PATTERN (i3), i3, 0);
3125
 
3126
        if ((temp = next_nonnote_insn (i3)) == NULL_RTX
3127
            || !BARRIER_P (temp))
3128
          emit_barrier_after (i3);
3129
      }
3130
 
3131
    if (undobuf.other_insn != NULL_RTX
3132
        && (returnjump_p (undobuf.other_insn)
3133
            || any_uncondjump_p (undobuf.other_insn)))
3134
      {
3135
        *new_direct_jump_p = 1;
3136
 
3137
        if ((temp = next_nonnote_insn (undobuf.other_insn)) == NULL_RTX
3138
            || !BARRIER_P (temp))
3139
          emit_barrier_after (undobuf.other_insn);
3140
      }
3141
 
3142
    /* An NOOP jump does not need barrier, but it does need cleaning up
3143
       of CFG.  */
3144
    if (GET_CODE (newpat) == SET
3145
        && SET_SRC (newpat) == pc_rtx
3146
        && SET_DEST (newpat) == pc_rtx)
3147
      *new_direct_jump_p = 1;
3148
  }
3149
 
3150
  combine_successes++;
3151
  undo_commit ();
3152
 
3153
  if (added_links_insn
3154
      && (newi2pat == 0 || INSN_CUID (added_links_insn) < INSN_CUID (i2))
3155
      && INSN_CUID (added_links_insn) < INSN_CUID (i3))
3156
    return added_links_insn;
3157
  else
3158
    return newi2pat ? i2 : i3;
3159
}
3160
 
3161
/* Undo all the modifications recorded in undobuf.  */
3162
 
3163
static void
3164
undo_all (void)
3165
{
3166
  struct undo *undo, *next;
3167
 
3168
  for (undo = undobuf.undos; undo; undo = next)
3169
    {
3170
      next = undo->next;
3171
      if (undo->is_int)
3172
        *undo->where.i = undo->old_contents.i;
3173
      else
3174
        *undo->where.r = undo->old_contents.r;
3175
 
3176
      undo->next = undobuf.frees;
3177
      undobuf.frees = undo;
3178
    }
3179
 
3180
  undobuf.undos = 0;
3181
}
3182
 
3183
/* We've committed to accepting the changes we made.  Move all
3184
   of the undos to the free list.  */
3185
 
3186
static void
3187
undo_commit (void)
3188
{
3189
  struct undo *undo, *next;
3190
 
3191
  for (undo = undobuf.undos; undo; undo = next)
3192
    {
3193
      next = undo->next;
3194
      undo->next = undobuf.frees;
3195
      undobuf.frees = undo;
3196
    }
3197
  undobuf.undos = 0;
3198
}
3199
 
3200
 
3201
/* Find the innermost point within the rtx at LOC, possibly LOC itself,
3202
   where we have an arithmetic expression and return that point.  LOC will
3203
   be inside INSN.
3204
 
3205
   try_combine will call this function to see if an insn can be split into
3206
   two insns.  */
3207
 
3208
static rtx *
3209
find_split_point (rtx *loc, rtx insn)
3210
{
3211
  rtx x = *loc;
3212
  enum rtx_code code = GET_CODE (x);
3213
  rtx *split;
3214
  unsigned HOST_WIDE_INT len = 0;
3215
  HOST_WIDE_INT pos = 0;
3216
  int unsignedp = 0;
3217
  rtx inner = NULL_RTX;
3218
 
3219
  /* First special-case some codes.  */
3220
  switch (code)
3221
    {
3222
    case SUBREG:
3223
#ifdef INSN_SCHEDULING
3224
      /* If we are making a paradoxical SUBREG invalid, it becomes a split
3225
         point.  */
3226
      if (MEM_P (SUBREG_REG (x)))
3227
        return loc;
3228
#endif
3229
      return find_split_point (&SUBREG_REG (x), insn);
3230
 
3231
    case MEM:
3232
#ifdef HAVE_lo_sum
3233
      /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
3234
         using LO_SUM and HIGH.  */
3235
      if (GET_CODE (XEXP (x, 0)) == CONST
3236
          || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
3237
        {
3238
          SUBST (XEXP (x, 0),
3239
                 gen_rtx_LO_SUM (Pmode,
3240
                                 gen_rtx_HIGH (Pmode, XEXP (x, 0)),
3241
                                 XEXP (x, 0)));
3242
          return &XEXP (XEXP (x, 0), 0);
3243
        }
3244
#endif
3245
 
3246
      /* If we have a PLUS whose second operand is a constant and the
3247
         address is not valid, perhaps will can split it up using
3248
         the machine-specific way to split large constants.  We use
3249
         the first pseudo-reg (one of the virtual regs) as a placeholder;
3250
         it will not remain in the result.  */
3251
      if (GET_CODE (XEXP (x, 0)) == PLUS
3252
          && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3253
          && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
3254
        {
3255
          rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
3256
          rtx seq = split_insns (gen_rtx_SET (VOIDmode, reg, XEXP (x, 0)),
3257
                                 subst_insn);
3258
 
3259
          /* This should have produced two insns, each of which sets our
3260
             placeholder.  If the source of the second is a valid address,
3261
             we can make put both sources together and make a split point
3262
             in the middle.  */
3263
 
3264
          if (seq
3265
              && NEXT_INSN (seq) != NULL_RTX
3266
              && NEXT_INSN (NEXT_INSN (seq)) == NULL_RTX
3267
              && NONJUMP_INSN_P (seq)
3268
              && GET_CODE (PATTERN (seq)) == SET
3269
              && SET_DEST (PATTERN (seq)) == reg
3270
              && ! reg_mentioned_p (reg,
3271
                                    SET_SRC (PATTERN (seq)))
3272
              && NONJUMP_INSN_P (NEXT_INSN (seq))
3273
              && GET_CODE (PATTERN (NEXT_INSN (seq))) == SET
3274
              && SET_DEST (PATTERN (NEXT_INSN (seq))) == reg
3275
              && memory_address_p (GET_MODE (x),
3276
                                   SET_SRC (PATTERN (NEXT_INSN (seq)))))
3277
            {
3278
              rtx src1 = SET_SRC (PATTERN (seq));
3279
              rtx src2 = SET_SRC (PATTERN (NEXT_INSN (seq)));
3280
 
3281
              /* Replace the placeholder in SRC2 with SRC1.  If we can
3282
                 find where in SRC2 it was placed, that can become our
3283
                 split point and we can replace this address with SRC2.
3284
                 Just try two obvious places.  */
3285
 
3286
              src2 = replace_rtx (src2, reg, src1);
3287
              split = 0;
3288
              if (XEXP (src2, 0) == src1)
3289
                split = &XEXP (src2, 0);
3290
              else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
3291
                       && XEXP (XEXP (src2, 0), 0) == src1)
3292
                split = &XEXP (XEXP (src2, 0), 0);
3293
 
3294
              if (split)
3295
                {
3296
                  SUBST (XEXP (x, 0), src2);
3297
                  return split;
3298
                }
3299
            }
3300
 
3301
          /* If that didn't work, perhaps the first operand is complex and
3302
             needs to be computed separately, so make a split point there.
3303
             This will occur on machines that just support REG + CONST
3304
             and have a constant moved through some previous computation.  */
3305
 
3306
          else if (!OBJECT_P (XEXP (XEXP (x, 0), 0))
3307
                   && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
3308
                         && OBJECT_P (SUBREG_REG (XEXP (XEXP (x, 0), 0)))))
3309
            return &XEXP (XEXP (x, 0), 0);
3310
        }
3311
      break;
3312
 
3313
    case SET:
3314
#ifdef HAVE_cc0
3315
      /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
3316
         ZERO_EXTRACT, the most likely reason why this doesn't match is that
3317
         we need to put the operand into a register.  So split at that
3318
         point.  */
3319
 
3320
      if (SET_DEST (x) == cc0_rtx
3321
          && GET_CODE (SET_SRC (x)) != COMPARE
3322
          && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
3323
          && !OBJECT_P (SET_SRC (x))
3324
          && ! (GET_CODE (SET_SRC (x)) == SUBREG
3325
                && OBJECT_P (SUBREG_REG (SET_SRC (x)))))
3326
        return &SET_SRC (x);
3327
#endif
3328
 
3329
      /* See if we can split SET_SRC as it stands.  */
3330
      split = find_split_point (&SET_SRC (x), insn);
3331
      if (split && split != &SET_SRC (x))
3332
        return split;
3333
 
3334
      /* See if we can split SET_DEST as it stands.  */
3335
      split = find_split_point (&SET_DEST (x), insn);
3336
      if (split && split != &SET_DEST (x))
3337
        return split;
3338
 
3339
      /* See if this is a bitfield assignment with everything constant.  If
3340
         so, this is an IOR of an AND, so split it into that.  */
3341
      if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
3342
          && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
3343
              <= HOST_BITS_PER_WIDE_INT)
3344
          && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
3345
          && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
3346
          && GET_CODE (SET_SRC (x)) == CONST_INT
3347
          && ((INTVAL (XEXP (SET_DEST (x), 1))
3348
               + INTVAL (XEXP (SET_DEST (x), 2)))
3349
              <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
3350
          && ! side_effects_p (XEXP (SET_DEST (x), 0)))
3351
        {
3352
          HOST_WIDE_INT pos = INTVAL (XEXP (SET_DEST (x), 2));
3353
          unsigned HOST_WIDE_INT len = INTVAL (XEXP (SET_DEST (x), 1));
3354
          unsigned HOST_WIDE_INT src = INTVAL (SET_SRC (x));
3355
          rtx dest = XEXP (SET_DEST (x), 0);
3356
          enum machine_mode mode = GET_MODE (dest);
3357
          unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
3358
          rtx or_mask;
3359
 
3360
          if (BITS_BIG_ENDIAN)
3361
            pos = GET_MODE_BITSIZE (mode) - len - pos;
3362
 
3363
          or_mask = gen_int_mode (src << pos, mode);
3364
          if (src == mask)
3365
            SUBST (SET_SRC (x),
3366
                   simplify_gen_binary (IOR, mode, dest, or_mask));
3367
          else
3368
            {
3369
              rtx negmask = gen_int_mode (~(mask << pos), mode);
3370
              SUBST (SET_SRC (x),
3371
                     simplify_gen_binary (IOR, mode,
3372
                                          simplify_gen_binary (AND, mode,
3373
                                                               dest, negmask),
3374
                                          or_mask));
3375
            }
3376
 
3377
          SUBST (SET_DEST (x), dest);
3378
 
3379
          split = find_split_point (&SET_SRC (x), insn);
3380
          if (split && split != &SET_SRC (x))
3381
            return split;
3382
        }
3383
 
3384
      /* Otherwise, see if this is an operation that we can split into two.
3385
         If so, try to split that.  */
3386
      code = GET_CODE (SET_SRC (x));
3387
 
3388
      switch (code)
3389
        {
3390
        case AND:
3391
          /* If we are AND'ing with a large constant that is only a single
3392
             bit and the result is only being used in a context where we
3393
             need to know if it is zero or nonzero, replace it with a bit
3394
             extraction.  This will avoid the large constant, which might
3395
             have taken more than one insn to make.  If the constant were
3396
             not a valid argument to the AND but took only one insn to make,
3397
             this is no worse, but if it took more than one insn, it will
3398
             be better.  */
3399
 
3400
          if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3401
              && REG_P (XEXP (SET_SRC (x), 0))
3402
              && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
3403
              && REG_P (SET_DEST (x))
3404
              && (split = find_single_use (SET_DEST (x), insn, (rtx*) 0)) != 0
3405
              && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
3406
              && XEXP (*split, 0) == SET_DEST (x)
3407
              && XEXP (*split, 1) == const0_rtx)
3408
            {
3409
              rtx extraction = make_extraction (GET_MODE (SET_DEST (x)),
3410
                                                XEXP (SET_SRC (x), 0),
3411
                                                pos, NULL_RTX, 1, 1, 0, 0);
3412
              if (extraction != 0)
3413
                {
3414
                  SUBST (SET_SRC (x), extraction);
3415
                  return find_split_point (loc, insn);
3416
                }
3417
            }
3418
          break;
3419
 
3420
        case NE:
3421
          /* If STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X
3422
             is known to be on, this can be converted into a NEG of a shift.  */
3423
          if (STORE_FLAG_VALUE == -1 && XEXP (SET_SRC (x), 1) == const0_rtx
3424
              && GET_MODE (SET_SRC (x)) == GET_MODE (XEXP (SET_SRC (x), 0))
3425
              && 1 <= (pos = exact_log2
3426
                       (nonzero_bits (XEXP (SET_SRC (x), 0),
3427
                                      GET_MODE (XEXP (SET_SRC (x), 0))))))
3428
            {
3429
              enum machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0));
3430
 
3431
              SUBST (SET_SRC (x),
3432
                     gen_rtx_NEG (mode,
3433
                                  gen_rtx_LSHIFTRT (mode,
3434
                                                    XEXP (SET_SRC (x), 0),
3435
                                                    GEN_INT (pos))));
3436
 
3437
              split = find_split_point (&SET_SRC (x), insn);
3438
              if (split && split != &SET_SRC (x))
3439
                return split;
3440
            }
3441
          break;
3442
 
3443
        case SIGN_EXTEND:
3444
          inner = XEXP (SET_SRC (x), 0);
3445
 
3446
          /* We can't optimize if either mode is a partial integer
3447
             mode as we don't know how many bits are significant
3448
             in those modes.  */
3449
          if (GET_MODE_CLASS (GET_MODE (inner)) == MODE_PARTIAL_INT
3450
              || GET_MODE_CLASS (GET_MODE (SET_SRC (x))) == MODE_PARTIAL_INT)
3451
            break;
3452
 
3453
          pos = 0;
3454
          len = GET_MODE_BITSIZE (GET_MODE (inner));
3455
          unsignedp = 0;
3456
          break;
3457
 
3458
        case SIGN_EXTRACT:
3459
        case ZERO_EXTRACT:
3460
          if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3461
              && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
3462
            {
3463
              inner = XEXP (SET_SRC (x), 0);
3464
              len = INTVAL (XEXP (SET_SRC (x), 1));
3465
              pos = INTVAL (XEXP (SET_SRC (x), 2));
3466
 
3467
              if (BITS_BIG_ENDIAN)
3468
                pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
3469
              unsignedp = (code == ZERO_EXTRACT);
3470
            }
3471
          break;
3472
 
3473
        default:
3474
          break;
3475
        }
3476
 
3477
      if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
3478
        {
3479
          enum machine_mode mode = GET_MODE (SET_SRC (x));
3480
 
3481
          /* For unsigned, we have a choice of a shift followed by an
3482
             AND or two shifts.  Use two shifts for field sizes where the
3483
             constant might be too large.  We assume here that we can
3484
             always at least get 8-bit constants in an AND insn, which is
3485
             true for every current RISC.  */
3486
 
3487
          if (unsignedp && len <= 8)
3488
            {
3489
              SUBST (SET_SRC (x),
3490
                     gen_rtx_AND (mode,
3491
                                  gen_rtx_LSHIFTRT
3492
                                  (mode, gen_lowpart (mode, inner),
3493
                                   GEN_INT (pos)),
3494
                                  GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
3495
 
3496
              split = find_split_point (&SET_SRC (x), insn);
3497
              if (split && split != &SET_SRC (x))
3498
                return split;
3499
            }
3500
          else
3501
            {
3502
              SUBST (SET_SRC (x),
3503
                     gen_rtx_fmt_ee
3504
                     (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
3505
                      gen_rtx_ASHIFT (mode,
3506
                                      gen_lowpart (mode, inner),
3507
                                      GEN_INT (GET_MODE_BITSIZE (mode)
3508
                                               - len - pos)),
3509
                      GEN_INT (GET_MODE_BITSIZE (mode) - len)));
3510
 
3511
              split = find_split_point (&SET_SRC (x), insn);
3512
              if (split && split != &SET_SRC (x))
3513
                return split;
3514
            }
3515
        }
3516
 
3517
      /* See if this is a simple operation with a constant as the second
3518
         operand.  It might be that this constant is out of range and hence
3519
         could be used as a split point.  */
3520
      if (BINARY_P (SET_SRC (x))
3521
          && CONSTANT_P (XEXP (SET_SRC (x), 1))
3522
          && (OBJECT_P (XEXP (SET_SRC (x), 0))
3523
              || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
3524
                  && OBJECT_P (SUBREG_REG (XEXP (SET_SRC (x), 0))))))
3525
        return &XEXP (SET_SRC (x), 1);
3526
 
3527
      /* Finally, see if this is a simple operation with its first operand
3528
         not in a register.  The operation might require this operand in a
3529
         register, so return it as a split point.  We can always do this
3530
         because if the first operand were another operation, we would have
3531
         already found it as a split point.  */
3532
      if ((BINARY_P (SET_SRC (x)) || UNARY_P (SET_SRC (x)))
3533
          && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
3534
        return &XEXP (SET_SRC (x), 0);
3535
 
3536
      return 0;
3537
 
3538
    case AND:
3539
    case IOR:
3540
      /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
3541
         it is better to write this as (not (ior A B)) so we can split it.
3542
         Similarly for IOR.  */
3543
      if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
3544
        {
3545
          SUBST (*loc,
3546
                 gen_rtx_NOT (GET_MODE (x),
3547
                              gen_rtx_fmt_ee (code == IOR ? AND : IOR,
3548
                                              GET_MODE (x),
3549
                                              XEXP (XEXP (x, 0), 0),
3550
                                              XEXP (XEXP (x, 1), 0))));
3551
          return find_split_point (loc, insn);
3552
        }
3553
 
3554
      /* Many RISC machines have a large set of logical insns.  If the
3555
         second operand is a NOT, put it first so we will try to split the
3556
         other operand first.  */
3557
      if (GET_CODE (XEXP (x, 1)) == NOT)
3558
        {
3559
          rtx tem = XEXP (x, 0);
3560
          SUBST (XEXP (x, 0), XEXP (x, 1));
3561
          SUBST (XEXP (x, 1), tem);
3562
        }
3563
      break;
3564
 
3565
    default:
3566
      break;
3567
    }
3568
 
3569
  /* Otherwise, select our actions depending on our rtx class.  */
3570
  switch (GET_RTX_CLASS (code))
3571
    {
3572
    case RTX_BITFIELD_OPS:              /* This is ZERO_EXTRACT and SIGN_EXTRACT.  */
3573
    case RTX_TERNARY:
3574
      split = find_split_point (&XEXP (x, 2), insn);
3575
      if (split)
3576
        return split;
3577
      /* ... fall through ...  */
3578
    case RTX_BIN_ARITH:
3579
    case RTX_COMM_ARITH:
3580
    case RTX_COMPARE:
3581
    case RTX_COMM_COMPARE:
3582
      split = find_split_point (&XEXP (x, 1), insn);
3583
      if (split)
3584
        return split;
3585
      /* ... fall through ...  */
3586
    case RTX_UNARY:
3587
      /* Some machines have (and (shift ...) ...) insns.  If X is not
3588
         an AND, but XEXP (X, 0) is, use it as our split point.  */
3589
      if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
3590
        return &XEXP (x, 0);
3591
 
3592
      split = find_split_point (&XEXP (x, 0), insn);
3593
      if (split)
3594
        return split;
3595
      return loc;
3596
 
3597
    default:
3598
      /* Otherwise, we don't have a split point.  */
3599
      return 0;
3600
    }
3601
}
3602
 
3603
/* Throughout X, replace FROM with TO, and return the result.
3604
   The result is TO if X is FROM;
3605
   otherwise the result is X, but its contents may have been modified.
3606
   If they were modified, a record was made in undobuf so that
3607
   undo_all will (among other things) return X to its original state.
3608
 
3609
   If the number of changes necessary is too much to record to undo,
3610
   the excess changes are not made, so the result is invalid.
3611
   The changes already made can still be undone.
3612
   undobuf.num_undo is incremented for such changes, so by testing that
3613
   the caller can tell whether the result is valid.
3614
 
3615
   `n_occurrences' is incremented each time FROM is replaced.
3616
 
3617
   IN_DEST is nonzero if we are processing the SET_DEST of a SET.
3618
 
3619
   UNIQUE_COPY is nonzero if each substitution must be unique.  We do this
3620
   by copying if `n_occurrences' is nonzero.  */
3621
 
3622
static rtx
3623
subst (rtx x, rtx from, rtx to, int in_dest, int unique_copy)
3624
{
3625
  enum rtx_code code = GET_CODE (x);
3626
  enum machine_mode op0_mode = VOIDmode;
3627
  const char *fmt;
3628
  int len, i;
3629
  rtx new;
3630
 
3631
/* Two expressions are equal if they are identical copies of a shared
3632
   RTX or if they are both registers with the same register number
3633
   and mode.  */
3634
 
3635
#define COMBINE_RTX_EQUAL_P(X,Y)                        \
3636
  ((X) == (Y)                                           \
3637
   || (REG_P (X) && REG_P (Y)   \
3638
       && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
3639
 
3640
  if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
3641
    {
3642
      n_occurrences++;
3643
      return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
3644
    }
3645
 
3646
  /* If X and FROM are the same register but different modes, they will
3647
     not have been seen as equal above.  However, flow.c will make a
3648
     LOG_LINKS entry for that case.  If we do nothing, we will try to
3649
     rerecognize our original insn and, when it succeeds, we will
3650
     delete the feeding insn, which is incorrect.
3651
 
3652
     So force this insn not to match in this (rare) case.  */
3653
  if (! in_dest && code == REG && REG_P (from)
3654
      && REGNO (x) == REGNO (from))
3655
    return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
3656
 
3657
  /* If this is an object, we are done unless it is a MEM or LO_SUM, both
3658
     of which may contain things that can be combined.  */
3659
  if (code != MEM && code != LO_SUM && OBJECT_P (x))
3660
    return x;
3661
 
3662
  /* It is possible to have a subexpression appear twice in the insn.
3663
     Suppose that FROM is a register that appears within TO.
3664
     Then, after that subexpression has been scanned once by `subst',
3665
     the second time it is scanned, TO may be found.  If we were
3666
     to scan TO here, we would find FROM within it and create a
3667
     self-referent rtl structure which is completely wrong.  */
3668
  if (COMBINE_RTX_EQUAL_P (x, to))
3669
    return to;
3670
 
3671
  /* Parallel asm_operands need special attention because all of the
3672
     inputs are shared across the arms.  Furthermore, unsharing the
3673
     rtl results in recognition failures.  Failure to handle this case
3674
     specially can result in circular rtl.
3675
 
3676
     Solve this by doing a normal pass across the first entry of the
3677
     parallel, and only processing the SET_DESTs of the subsequent
3678
     entries.  Ug.  */
3679
 
3680
  if (code == PARALLEL
3681
      && GET_CODE (XVECEXP (x, 0, 0)) == SET
3682
      && GET_CODE (SET_SRC (XVECEXP (x, 0, 0))) == ASM_OPERANDS)
3683
    {
3684
      new = subst (XVECEXP (x, 0, 0), from, to, 0, unique_copy);
3685
 
3686
      /* If this substitution failed, this whole thing fails.  */
3687
      if (GET_CODE (new) == CLOBBER
3688
          && XEXP (new, 0) == const0_rtx)
3689
        return new;
3690
 
3691
      SUBST (XVECEXP (x, 0, 0), new);
3692
 
3693
      for (i = XVECLEN (x, 0) - 1; i >= 1; i--)
3694
        {
3695
          rtx dest = SET_DEST (XVECEXP (x, 0, i));
3696
 
3697
          if (!REG_P (dest)
3698
              && GET_CODE (dest) != CC0
3699
              && GET_CODE (dest) != PC)
3700
            {
3701
              new = subst (dest, from, to, 0, unique_copy);
3702
 
3703
              /* If this substitution failed, this whole thing fails.  */
3704
              if (GET_CODE (new) == CLOBBER
3705
                  && XEXP (new, 0) == const0_rtx)
3706
                return new;
3707
 
3708
              SUBST (SET_DEST (XVECEXP (x, 0, i)), new);
3709
            }
3710
        }
3711
    }
3712
  else
3713
    {
3714
      len = GET_RTX_LENGTH (code);
3715
      fmt = GET_RTX_FORMAT (code);
3716
 
3717
      /* We don't need to process a SET_DEST that is a register, CC0,
3718
         or PC, so set up to skip this common case.  All other cases
3719
         where we want to suppress replacing something inside a
3720
         SET_SRC are handled via the IN_DEST operand.  */
3721
      if (code == SET
3722
          && (REG_P (SET_DEST (x))
3723
              || GET_CODE (SET_DEST (x)) == CC0
3724
              || GET_CODE (SET_DEST (x)) == PC))
3725
        fmt = "ie";
3726
 
3727
      /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a
3728
         constant.  */
3729
      if (fmt[0] == 'e')
3730
        op0_mode = GET_MODE (XEXP (x, 0));
3731
 
3732
      for (i = 0; i < len; i++)
3733
        {
3734
          if (fmt[i] == 'E')
3735
            {
3736
              int j;
3737
              for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3738
                {
3739
                  if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
3740
                    {
3741
                      new = (unique_copy && n_occurrences
3742
                             ? copy_rtx (to) : to);
3743
                      n_occurrences++;
3744
                    }
3745
                  else
3746
                    {
3747
                      new = subst (XVECEXP (x, i, j), from, to, 0,
3748
                                   unique_copy);
3749
 
3750
                      /* If this substitution failed, this whole thing
3751
                         fails.  */
3752
                      if (GET_CODE (new) == CLOBBER
3753
                          && XEXP (new, 0) == const0_rtx)
3754
                        return new;
3755
                    }
3756
 
3757
                  SUBST (XVECEXP (x, i, j), new);
3758
                }
3759
            }
3760
          else if (fmt[i] == 'e')
3761
            {
3762
              /* If this is a register being set, ignore it.  */
3763
              new = XEXP (x, i);
3764
              if (in_dest
3765
                  && i == 0
3766
                  && (((code == SUBREG || code == ZERO_EXTRACT)
3767
                       && REG_P (new))
3768
                      || code == STRICT_LOW_PART))
3769
                ;
3770
 
3771
              else if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
3772
                {
3773
                  /* In general, don't install a subreg involving two
3774
                     modes not tieable.  It can worsen register
3775
                     allocation, and can even make invalid reload
3776
                     insns, since the reg inside may need to be copied
3777
                     from in the outside mode, and that may be invalid
3778
                     if it is an fp reg copied in integer mode.
3779
 
3780
                     We allow two exceptions to this: It is valid if
3781
                     it is inside another SUBREG and the mode of that
3782
                     SUBREG and the mode of the inside of TO is
3783
                     tieable and it is valid if X is a SET that copies
3784
                     FROM to CC0.  */
3785
 
3786
                  if (GET_CODE (to) == SUBREG
3787
                      && ! MODES_TIEABLE_P (GET_MODE (to),
3788
                                            GET_MODE (SUBREG_REG (to)))
3789
                      && ! (code == SUBREG
3790
                            && MODES_TIEABLE_P (GET_MODE (x),
3791
                                                GET_MODE (SUBREG_REG (to))))
3792
#ifdef HAVE_cc0
3793
                      && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
3794
#endif
3795
                      )
3796
                    return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
3797
 
3798
#ifdef CANNOT_CHANGE_MODE_CLASS
3799
                  if (code == SUBREG
3800
                      && REG_P (to)
3801
                      && REGNO (to) < FIRST_PSEUDO_REGISTER
3802
                      && REG_CANNOT_CHANGE_MODE_P (REGNO (to),
3803
                                                   GET_MODE (to),
3804
                                                   GET_MODE (x)))
3805
                    return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
3806
#endif
3807
 
3808
                  new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
3809
                  n_occurrences++;
3810
                }
3811
              else
3812
                /* If we are in a SET_DEST, suppress most cases unless we
3813
                   have gone inside a MEM, in which case we want to
3814
                   simplify the address.  We assume here that things that
3815
                   are actually part of the destination have their inner
3816
                   parts in the first expression.  This is true for SUBREG,
3817
                   STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
3818
                   things aside from REG and MEM that should appear in a
3819
                   SET_DEST.  */
3820
                new = subst (XEXP (x, i), from, to,
3821
                             (((in_dest
3822
                                && (code == SUBREG || code == STRICT_LOW_PART
3823
                                    || code == ZERO_EXTRACT))
3824
                               || code == SET)
3825
                              && i == 0), unique_copy);
3826
 
3827
              /* If we found that we will have to reject this combination,
3828
                 indicate that by returning the CLOBBER ourselves, rather than
3829
                 an expression containing it.  This will speed things up as
3830
                 well as prevent accidents where two CLOBBERs are considered
3831
                 to be equal, thus producing an incorrect simplification.  */
3832
 
3833
              if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
3834
                return new;
3835
 
3836
              if (GET_CODE (x) == SUBREG
3837
                  && (GET_CODE (new) == CONST_INT
3838
                      || GET_CODE (new) == CONST_DOUBLE))
3839
                {
3840
                  enum machine_mode mode = GET_MODE (x);
3841
 
3842
                  x = simplify_subreg (GET_MODE (x), new,
3843
                                       GET_MODE (SUBREG_REG (x)),
3844
                                       SUBREG_BYTE (x));
3845
                  if (! x)
3846
                    x = gen_rtx_CLOBBER (mode, const0_rtx);
3847
                }
3848
              else if (GET_CODE (new) == CONST_INT
3849
                       && GET_CODE (x) == ZERO_EXTEND)
3850
                {
3851
                  x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
3852
                                                new, GET_MODE (XEXP (x, 0)));
3853
                  gcc_assert (x);
3854
                }
3855
              else
3856
                SUBST (XEXP (x, i), new);
3857
            }
3858
        }
3859
    }
3860
 
3861
  /* Try to simplify X.  If the simplification changed the code, it is likely
3862
     that further simplification will help, so loop, but limit the number
3863
     of repetitions that will be performed.  */
3864
 
3865
  for (i = 0; i < 4; i++)
3866
    {
3867
      /* If X is sufficiently simple, don't bother trying to do anything
3868
         with it.  */
3869
      if (code != CONST_INT && code != REG && code != CLOBBER)
3870
        x = combine_simplify_rtx (x, op0_mode, in_dest);
3871
 
3872
      if (GET_CODE (x) == code)
3873
        break;
3874
 
3875
      code = GET_CODE (x);
3876
 
3877
      /* We no longer know the original mode of operand 0 since we
3878
         have changed the form of X)  */
3879
      op0_mode = VOIDmode;
3880
    }
3881
 
3882
  return x;
3883
}
3884
 
3885
/* Simplify X, a piece of RTL.  We just operate on the expression at the
3886
   outer level; call `subst' to simplify recursively.  Return the new
3887
   expression.
3888
 
3889
   OP0_MODE is the original mode of XEXP (x, 0).  IN_DEST is nonzero
3890
   if we are inside a SET_DEST.  */
3891
 
3892
static rtx
3893
combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest)
3894
{
3895
  enum rtx_code code = GET_CODE (x);
3896
  enum machine_mode mode = GET_MODE (x);
3897
  rtx temp;
3898
  rtx reversed;
3899
  int i;
3900
 
3901
  /* If this is a commutative operation, put a constant last and a complex
3902
     expression first.  We don't need to do this for comparisons here.  */
3903
  if (COMMUTATIVE_ARITH_P (x)
3904
      && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
3905
    {
3906
      temp = XEXP (x, 0);
3907
      SUBST (XEXP (x, 0), XEXP (x, 1));
3908
      SUBST (XEXP (x, 1), temp);
3909
    }
3910
 
3911
  /* If this is a simple operation applied to an IF_THEN_ELSE, try
3912
     applying it to the arms of the IF_THEN_ELSE.  This often simplifies
3913
     things.  Check for cases where both arms are testing the same
3914
     condition.
3915
 
3916
     Don't do anything if all operands are very simple.  */
3917
 
3918
  if ((BINARY_P (x)
3919
       && ((!OBJECT_P (XEXP (x, 0))
3920
            && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3921
                  && OBJECT_P (SUBREG_REG (XEXP (x, 0)))))
3922
           || (!OBJECT_P (XEXP (x, 1))
3923
               && ! (GET_CODE (XEXP (x, 1)) == SUBREG
3924
                     && OBJECT_P (SUBREG_REG (XEXP (x, 1)))))))
3925
      || (UNARY_P (x)
3926
          && (!OBJECT_P (XEXP (x, 0))
3927
               && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3928
                     && OBJECT_P (SUBREG_REG (XEXP (x, 0)))))))
3929
    {
3930
      rtx cond, true_rtx, false_rtx;
3931
 
3932
      cond = if_then_else_cond (x, &true_rtx, &false_rtx);
3933
      if (cond != 0
3934
          /* If everything is a comparison, what we have is highly unlikely
3935
             to be simpler, so don't use it.  */
3936
          && ! (COMPARISON_P (x)
3937
                && (COMPARISON_P (true_rtx) || COMPARISON_P (false_rtx))))
3938
        {
3939
          rtx cop1 = const0_rtx;
3940
          enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
3941
 
3942
          if (cond_code == NE && COMPARISON_P (cond))
3943
            return x;
3944
 
3945
          /* Simplify the alternative arms; this may collapse the true and
3946
             false arms to store-flag values.  Be careful to use copy_rtx
3947
             here since true_rtx or false_rtx might share RTL with x as a
3948
             result of the if_then_else_cond call above.  */
3949
          true_rtx = subst (copy_rtx (true_rtx), pc_rtx, pc_rtx, 0, 0);
3950
          false_rtx = subst (copy_rtx (false_rtx), pc_rtx, pc_rtx, 0, 0);
3951
 
3952
          /* If true_rtx and false_rtx are not general_operands, an if_then_else
3953
             is unlikely to be simpler.  */
3954
          if (general_operand (true_rtx, VOIDmode)
3955
              && general_operand (false_rtx, VOIDmode))
3956
            {
3957
              enum rtx_code reversed;
3958
 
3959
              /* Restarting if we generate a store-flag expression will cause
3960
                 us to loop.  Just drop through in this case.  */
3961
 
3962
              /* If the result values are STORE_FLAG_VALUE and zero, we can
3963
                 just make the comparison operation.  */
3964
              if (true_rtx == const_true_rtx && false_rtx == const0_rtx)
3965
                x = simplify_gen_relational (cond_code, mode, VOIDmode,
3966
                                             cond, cop1);
3967
              else if (true_rtx == const0_rtx && false_rtx == const_true_rtx
3968
                       && ((reversed = reversed_comparison_code_parts
3969
                                        (cond_code, cond, cop1, NULL))
3970
                           != UNKNOWN))
3971
                x = simplify_gen_relational (reversed, mode, VOIDmode,
3972
                                             cond, cop1);
3973
 
3974
              /* Likewise, we can make the negate of a comparison operation
3975
                 if the result values are - STORE_FLAG_VALUE and zero.  */
3976
              else if (GET_CODE (true_rtx) == CONST_INT
3977
                       && INTVAL (true_rtx) == - STORE_FLAG_VALUE
3978
                       && false_rtx == const0_rtx)
3979
                x = simplify_gen_unary (NEG, mode,
3980
                                        simplify_gen_relational (cond_code,
3981
                                                                 mode, VOIDmode,
3982
                                                                 cond, cop1),
3983
                                        mode);
3984
              else if (GET_CODE (false_rtx) == CONST_INT
3985
                       && INTVAL (false_rtx) == - STORE_FLAG_VALUE
3986
                       && true_rtx == const0_rtx
3987
                       && ((reversed = reversed_comparison_code_parts
3988
                                        (cond_code, cond, cop1, NULL))
3989
                           != UNKNOWN))
3990
                x = simplify_gen_unary (NEG, mode,
3991
                                        simplify_gen_relational (reversed,
3992
                                                                 mode, VOIDmode,
3993
                                                                 cond, cop1),
3994
                                        mode);
3995
              else
3996
                return gen_rtx_IF_THEN_ELSE (mode,
3997
                                             simplify_gen_relational (cond_code,
3998
                                                                      mode,
3999
                                                                      VOIDmode,
4000
                                                                      cond,
4001
                                                                      cop1),
4002
                                             true_rtx, false_rtx);
4003
 
4004
              code = GET_CODE (x);
4005
              op0_mode = VOIDmode;
4006
            }
4007
        }
4008
    }
4009
 
4010
  /* Try to fold this expression in case we have constants that weren't
4011
     present before.  */
4012
  temp = 0;
4013
  switch (GET_RTX_CLASS (code))
4014
    {
4015
    case RTX_UNARY:
4016
      if (op0_mode == VOIDmode)
4017
        op0_mode = GET_MODE (XEXP (x, 0));
4018
      temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
4019
      break;
4020
    case RTX_COMPARE:
4021
    case RTX_COMM_COMPARE:
4022
      {
4023
        enum machine_mode cmp_mode = GET_MODE (XEXP (x, 0));
4024
        if (cmp_mode == VOIDmode)
4025
          {
4026
            cmp_mode = GET_MODE (XEXP (x, 1));
4027
            if (cmp_mode == VOIDmode)
4028
              cmp_mode = op0_mode;
4029
          }
4030
        temp = simplify_relational_operation (code, mode, cmp_mode,
4031
                                              XEXP (x, 0), XEXP (x, 1));
4032
      }
4033
      break;
4034
    case RTX_COMM_ARITH:
4035
    case RTX_BIN_ARITH:
4036
      temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
4037
      break;
4038
    case RTX_BITFIELD_OPS:
4039
    case RTX_TERNARY:
4040
      temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
4041
                                         XEXP (x, 1), XEXP (x, 2));
4042
      break;
4043
    default:
4044
      break;
4045
    }
4046
 
4047
  if (temp)
4048
    {
4049
      x = temp;
4050
      code = GET_CODE (temp);
4051
      op0_mode = VOIDmode;
4052
      mode = GET_MODE (temp);
4053
    }
4054
 
4055
  /* First see if we can apply the inverse distributive law.  */
4056
  if (code == PLUS || code == MINUS
4057
      || code == AND || code == IOR || code == XOR)
4058
    {
4059
      x = apply_distributive_law (x);
4060
      code = GET_CODE (x);
4061
      op0_mode = VOIDmode;
4062
    }
4063
 
4064
  /* If CODE is an associative operation not otherwise handled, see if we
4065
     can associate some operands.  This can win if they are constants or
4066
     if they are logically related (i.e. (a & b) & a).  */
4067
  if ((code == PLUS || code == MINUS || code == MULT || code == DIV
4068
       || code == AND || code == IOR || code == XOR
4069
       || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
4070
      && ((INTEGRAL_MODE_P (mode) && code != DIV)
4071
          || (flag_unsafe_math_optimizations && FLOAT_MODE_P (mode))))
4072
    {
4073
      if (GET_CODE (XEXP (x, 0)) == code)
4074
        {
4075
          rtx other = XEXP (XEXP (x, 0), 0);
4076
          rtx inner_op0 = XEXP (XEXP (x, 0), 1);
4077
          rtx inner_op1 = XEXP (x, 1);
4078
          rtx inner;
4079
 
4080
          /* Make sure we pass the constant operand if any as the second
4081
             one if this is a commutative operation.  */
4082
          if (CONSTANT_P (inner_op0) && COMMUTATIVE_ARITH_P (x))
4083
            {
4084
              rtx tem = inner_op0;
4085
              inner_op0 = inner_op1;
4086
              inner_op1 = tem;
4087
            }
4088
          inner = simplify_binary_operation (code == MINUS ? PLUS
4089
                                             : code == DIV ? MULT
4090
                                             : code,
4091
                                             mode, inner_op0, inner_op1);
4092
 
4093
          /* For commutative operations, try the other pair if that one
4094
             didn't simplify.  */
4095
          if (inner == 0 && COMMUTATIVE_ARITH_P (x))
4096
            {
4097
              other = XEXP (XEXP (x, 0), 1);
4098
              inner = simplify_binary_operation (code, mode,
4099
                                                 XEXP (XEXP (x, 0), 0),
4100
                                                 XEXP (x, 1));
4101
            }
4102
 
4103
          if (inner)
4104
            return simplify_gen_binary (code, mode, other, inner);
4105
        }
4106
    }
4107
 
4108
  /* A little bit of algebraic simplification here.  */
4109
  switch (code)
4110
    {
4111
    case MEM:
4112
      /* Ensure that our address has any ASHIFTs converted to MULT in case
4113
         address-recognizing predicates are called later.  */
4114
      temp = make_compound_operation (XEXP (x, 0), MEM);
4115
      SUBST (XEXP (x, 0), temp);
4116
      break;
4117
 
4118
    case SUBREG:
4119
      if (op0_mode == VOIDmode)
4120
        op0_mode = GET_MODE (SUBREG_REG (x));
4121
 
4122
      /* See if this can be moved to simplify_subreg.  */
4123
      if (CONSTANT_P (SUBREG_REG (x))
4124
          && subreg_lowpart_offset (mode, op0_mode) == SUBREG_BYTE (x)
4125
             /* Don't call gen_lowpart if the inner mode
4126
                is VOIDmode and we cannot simplify it, as SUBREG without
4127
                inner mode is invalid.  */
4128
          && (GET_MODE (SUBREG_REG (x)) != VOIDmode
4129
              || gen_lowpart_common (mode, SUBREG_REG (x))))
4130
        return gen_lowpart (mode, SUBREG_REG (x));
4131
 
4132
      if (GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_CC)
4133
        break;
4134
      {
4135
        rtx temp;
4136
        temp = simplify_subreg (mode, SUBREG_REG (x), op0_mode,
4137
                                SUBREG_BYTE (x));
4138
        if (temp)
4139
          return temp;
4140
      }
4141
 
4142
      /* Don't change the mode of the MEM if that would change the meaning
4143
         of the address.  */
4144
      if (MEM_P (SUBREG_REG (x))
4145
          && (MEM_VOLATILE_P (SUBREG_REG (x))
4146
              || mode_dependent_address_p (XEXP (SUBREG_REG (x), 0))))
4147
        return gen_rtx_CLOBBER (mode, const0_rtx);
4148
 
4149
      /* Note that we cannot do any narrowing for non-constants since
4150
         we might have been counting on using the fact that some bits were
4151
         zero.  We now do this in the SET.  */
4152
 
4153
      break;
4154
 
4155
    case NOT:
4156
      if (GET_CODE (XEXP (x, 0)) == SUBREG
4157
          && subreg_lowpart_p (XEXP (x, 0))
4158
          && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
4159
              < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
4160
          && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
4161
          && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
4162
        {
4163
          enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
4164
 
4165
          x = gen_rtx_ROTATE (inner_mode,
4166
                              simplify_gen_unary (NOT, inner_mode, const1_rtx,
4167
                                                  inner_mode),
4168
                              XEXP (SUBREG_REG (XEXP (x, 0)), 1));
4169
          return gen_lowpart (mode, x);
4170
        }
4171
 
4172
      /* Apply De Morgan's laws to reduce number of patterns for machines
4173
         with negating logical insns (and-not, nand, etc.).  If result has
4174
         only one NOT, put it first, since that is how the patterns are
4175
         coded.  */
4176
 
4177
      if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
4178
        {
4179
          rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
4180
          enum machine_mode op_mode;
4181
 
4182
          op_mode = GET_MODE (in1);
4183
          in1 = simplify_gen_unary (NOT, op_mode, in1, op_mode);
4184
 
4185
          op_mode = GET_MODE (in2);
4186
          if (op_mode == VOIDmode)
4187
            op_mode = mode;
4188
          in2 = simplify_gen_unary (NOT, op_mode, in2, op_mode);
4189
 
4190
          if (GET_CODE (in2) == NOT && GET_CODE (in1) != NOT)
4191
            {
4192
              rtx tem = in2;
4193
              in2 = in1; in1 = tem;
4194
            }
4195
 
4196
          return gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
4197
                                 mode, in1, in2);
4198
        }
4199
      break;
4200
 
4201
    case NEG:
4202
      /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1.  */
4203
      if (GET_CODE (XEXP (x, 0)) == XOR
4204
          && XEXP (XEXP (x, 0), 1) == const1_rtx
4205
          && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
4206
        return simplify_gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
4207
                                    constm1_rtx);
4208
 
4209
      temp = expand_compound_operation (XEXP (x, 0));
4210
 
4211
      /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
4212
         replaced by (lshiftrt X C).  This will convert
4213
         (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y).  */
4214
 
4215
      if (GET_CODE (temp) == ASHIFTRT
4216
          && GET_CODE (XEXP (temp, 1)) == CONST_INT
4217
          && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
4218
        return simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
4219
                                     INTVAL (XEXP (temp, 1)));
4220
 
4221
      /* If X has only a single bit that might be nonzero, say, bit I, convert
4222
         (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
4223
         MODE minus 1.  This will convert (neg (zero_extract X 1 Y)) to
4224
         (sign_extract X 1 Y).  But only do this if TEMP isn't a register
4225
         or a SUBREG of one since we'd be making the expression more
4226
         complex if it was just a register.  */
4227
 
4228
      if (!REG_P (temp)
4229
          && ! (GET_CODE (temp) == SUBREG
4230
                && REG_P (SUBREG_REG (temp)))
4231
          && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
4232
        {
4233
          rtx temp1 = simplify_shift_const
4234
            (NULL_RTX, ASHIFTRT, mode,
4235
             simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
4236
                                   GET_MODE_BITSIZE (mode) - 1 - i),
4237
             GET_MODE_BITSIZE (mode) - 1 - i);
4238
 
4239
          /* If all we did was surround TEMP with the two shifts, we
4240
             haven't improved anything, so don't use it.  Otherwise,
4241
             we are better off with TEMP1.  */
4242
          if (GET_CODE (temp1) != ASHIFTRT
4243
              || GET_CODE (XEXP (temp1, 0)) != ASHIFT
4244
              || XEXP (XEXP (temp1, 0), 0) != temp)
4245
            return temp1;
4246
        }
4247
      break;
4248
 
4249
    case TRUNCATE:
4250
      /* We can't handle truncation to a partial integer mode here
4251
         because we don't know the real bitsize of the partial
4252
         integer mode.  */
4253
      if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
4254
        break;
4255
 
4256
      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4257
          && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
4258
                                    GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))))
4259
        SUBST (XEXP (x, 0),
4260
               force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
4261
                              GET_MODE_MASK (mode), NULL_RTX, 0));
4262
 
4263
      /* (truncate:SI ({sign,zero}_extend:DI foo:SI)) == foo:SI.  */
4264
      if ((GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4265
           || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4266
          && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
4267
        return XEXP (XEXP (x, 0), 0);
4268
 
4269
      /* (truncate:SI (OP:DI ({sign,zero}_extend:DI foo:SI))) is
4270
         (OP:SI foo:SI) if OP is NEG or ABS.  */
4271
      if ((GET_CODE (XEXP (x, 0)) == ABS
4272
           || GET_CODE (XEXP (x, 0)) == NEG)
4273
          && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SIGN_EXTEND
4274
              || GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND)
4275
          && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
4276
        return simplify_gen_unary (GET_CODE (XEXP (x, 0)), mode,
4277
                                   XEXP (XEXP (XEXP (x, 0), 0), 0), mode);
4278
 
4279
      /* (truncate:SI (subreg:DI (truncate:SI X) 0)) is
4280
         (truncate:SI x).  */
4281
      if (GET_CODE (XEXP (x, 0)) == SUBREG
4282
          && GET_CODE (SUBREG_REG (XEXP (x, 0))) == TRUNCATE
4283
          && subreg_lowpart_p (XEXP (x, 0)))
4284
        return SUBREG_REG (XEXP (x, 0));
4285
 
4286
      /* If we know that the value is already truncated, we can
4287
         replace the TRUNCATE with a SUBREG if TRULY_NOOP_TRUNCATION
4288
         is nonzero for the corresponding modes.  But don't do this
4289
         for an (LSHIFTRT (MULT ...)) since this will cause problems
4290
         with the umulXi3_highpart patterns.  */
4291
      if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
4292
                                 GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
4293
          && num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4294
             >= (unsigned int) (GET_MODE_BITSIZE (mode) + 1)
4295
          && ! (GET_CODE (XEXP (x, 0)) == LSHIFTRT
4296
                && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT))
4297
        return gen_lowpart (mode, XEXP (x, 0));
4298
 
4299
      /* A truncate of a comparison can be replaced with a subreg if
4300
         STORE_FLAG_VALUE permits.  This is like the previous test,
4301
         but it works even if the comparison is done in a mode larger
4302
         than HOST_BITS_PER_WIDE_INT.  */
4303
      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4304
          && COMPARISON_P (XEXP (x, 0))
4305
          && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0)
4306
        return gen_lowpart (mode, XEXP (x, 0));
4307
 
4308
      /* Similarly, a truncate of a register whose value is a
4309
         comparison can be replaced with a subreg if STORE_FLAG_VALUE
4310
         permits.  */
4311
      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4312
          && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0
4313
          && (temp = get_last_value (XEXP (x, 0)))
4314
          && COMPARISON_P (temp))
4315
        return gen_lowpart (mode, XEXP (x, 0));
4316
 
4317
      break;
4318
 
4319
    case FLOAT_TRUNCATE:
4320
      /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF.  */
4321
      if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
4322
          && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
4323
        return XEXP (XEXP (x, 0), 0);
4324
 
4325
      /* (float_truncate:SF (float_truncate:DF foo:XF))
4326
         = (float_truncate:SF foo:XF).
4327
         This may eliminate double rounding, so it is unsafe.
4328
 
4329
         (float_truncate:SF (float_extend:XF foo:DF))
4330
         = (float_truncate:SF foo:DF).
4331
 
4332
         (float_truncate:DF (float_extend:XF foo:SF))
4333
         = (float_extend:SF foo:DF).  */
4334
      if ((GET_CODE (XEXP (x, 0)) == FLOAT_TRUNCATE
4335
           && flag_unsafe_math_optimizations)
4336
          || GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND)
4337
        return simplify_gen_unary (GET_MODE_SIZE (GET_MODE (XEXP (XEXP (x, 0),
4338
                                                            0)))
4339
                                   > GET_MODE_SIZE (mode)
4340
                                   ? FLOAT_TRUNCATE : FLOAT_EXTEND,
4341
                                   mode,
4342
                                   XEXP (XEXP (x, 0), 0), mode);
4343
 
4344
      /*  (float_truncate (float x)) is (float x)  */
4345
      if (GET_CODE (XEXP (x, 0)) == FLOAT
4346
          && (flag_unsafe_math_optimizations
4347
              || ((unsigned)significand_size (GET_MODE (XEXP (x, 0)))
4348
                  >= (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (x, 0), 0)))
4349
                      - num_sign_bit_copies (XEXP (XEXP (x, 0), 0),
4350
                                             GET_MODE (XEXP (XEXP (x, 0), 0)))))))
4351
        return simplify_gen_unary (FLOAT, mode,
4352
                                   XEXP (XEXP (x, 0), 0),
4353
                                   GET_MODE (XEXP (XEXP (x, 0), 0)));
4354
 
4355
      /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is
4356
         (OP:SF foo:SF) if OP is NEG or ABS.  */
4357
      if ((GET_CODE (XEXP (x, 0)) == ABS
4358
           || GET_CODE (XEXP (x, 0)) == NEG)
4359
          && GET_CODE (XEXP (XEXP (x, 0), 0)) == FLOAT_EXTEND
4360
          && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
4361
        return simplify_gen_unary (GET_CODE (XEXP (x, 0)), mode,
4362
                                   XEXP (XEXP (XEXP (x, 0), 0), 0), mode);
4363
 
4364
      /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0))
4365
         is (float_truncate:SF x).  */
4366
      if (GET_CODE (XEXP (x, 0)) == SUBREG
4367
          && subreg_lowpart_p (XEXP (x, 0))
4368
          && GET_CODE (SUBREG_REG (XEXP (x, 0))) == FLOAT_TRUNCATE)
4369
        return SUBREG_REG (XEXP (x, 0));
4370
      break;
4371
    case FLOAT_EXTEND:
4372
      /*  (float_extend (float_extend x)) is (float_extend x)
4373
 
4374
          (float_extend (float x)) is (float x) assuming that double
4375
          rounding can't happen.
4376
          */
4377
      if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
4378
          || (GET_CODE (XEXP (x, 0)) == FLOAT
4379
              && ((unsigned)significand_size (GET_MODE (XEXP (x, 0)))
4380
                  >= (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (x, 0), 0)))
4381
                      - num_sign_bit_copies (XEXP (XEXP (x, 0), 0),
4382
                                             GET_MODE (XEXP (XEXP (x, 0), 0)))))))
4383
        return simplify_gen_unary (GET_CODE (XEXP (x, 0)), mode,
4384
                                   XEXP (XEXP (x, 0), 0),
4385
                                   GET_MODE (XEXP (XEXP (x, 0), 0)));
4386
 
4387
      break;
4388
#ifdef HAVE_cc0
4389
    case COMPARE:
4390
      /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
4391
         using cc0, in which case we want to leave it as a COMPARE
4392
         so we can distinguish it from a register-register-copy.  */
4393
      if (XEXP (x, 1) == const0_rtx)
4394
        return XEXP (x, 0);
4395
 
4396
      /* x - 0 is the same as x unless x's mode has signed zeros and
4397
         allows rounding towards -infinity.  Under those conditions,
4398
 
4399
      if (!(HONOR_SIGNED_ZEROS (GET_MODE (XEXP (x, 0)))
4400
            && HONOR_SIGN_DEPENDENT_ROUNDING (GET_MODE (XEXP (x, 0))))
4401
          && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
4402
        return XEXP (x, 0);
4403
      break;
4404
#endif
4405
 
4406
    case CONST:
4407
      /* (const (const X)) can become (const X).  Do it this way rather than
4408
         returning the inner CONST since CONST can be shared with a
4409
         REG_EQUAL note.  */
4410
      if (GET_CODE (XEXP (x, 0)) == CONST)
4411
        SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4412
      break;
4413
 
4414
#ifdef HAVE_lo_sum
4415
    case LO_SUM:
4416
      /* Convert (lo_sum (high FOO) FOO) to FOO.  This is necessary so we
4417
         can add in an offset.  find_split_point will split this address up
4418
         again if it doesn't match.  */
4419
      if (GET_CODE (XEXP (x, 0)) == HIGH
4420
          && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
4421
        return XEXP (x, 1);
4422
      break;
4423
#endif
4424
 
4425
    case PLUS:
4426
      /* Canonicalize (plus (mult (neg B) C) A) to (minus A (mult B C)).
4427
       */
4428
      if (GET_CODE (XEXP (x, 0)) == MULT
4429
          && GET_CODE (XEXP (XEXP (x, 0), 0)) == NEG)
4430
        {
4431
          rtx in1, in2;
4432
 
4433
          in1 = XEXP (XEXP (XEXP (x, 0), 0), 0);
4434
          in2 = XEXP (XEXP (x, 0), 1);
4435
          return simplify_gen_binary (MINUS, mode, XEXP (x, 1),
4436
                                      simplify_gen_binary (MULT, mode,
4437
                                                           in1, in2));
4438
        }
4439
 
4440
      /* If we have (plus (plus (A const) B)), associate it so that CONST is
4441
         outermost.  That's because that's the way indexed addresses are
4442
         supposed to appear.  This code used to check many more cases, but
4443
         they are now checked elsewhere.  */
4444
      if (GET_CODE (XEXP (x, 0)) == PLUS
4445
          && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
4446
        return simplify_gen_binary (PLUS, mode,
4447
                                    simplify_gen_binary (PLUS, mode,
4448
                                                         XEXP (XEXP (x, 0), 0),
4449
                                                         XEXP (x, 1)),
4450
                                    XEXP (XEXP (x, 0), 1));
4451
 
4452
      /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
4453
         when c is (const_int (pow2 + 1) / 2) is a sign extension of a
4454
         bit-field and can be replaced by either a sign_extend or a
4455
         sign_extract.  The `and' may be a zero_extend and the two
4456
         <c>, -<c> constants may be reversed.  */
4457
      if (GET_CODE (XEXP (x, 0)) == XOR
4458
          && GET_CODE (XEXP (x, 1)) == CONST_INT
4459
          && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4460
          && INTVAL (XEXP (x, 1)) == -INTVAL (XEXP (XEXP (x, 0), 1))
4461
          && ((i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
4462
              || (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
4463
          && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4464
          && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
4465
               && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
4466
               && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
4467
                   == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
4468
              || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
4469
                  && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
4470
                      == (unsigned int) i + 1))))
4471
        return simplify_shift_const
4472
          (NULL_RTX, ASHIFTRT, mode,
4473
           simplify_shift_const (NULL_RTX, ASHIFT, mode,
4474
                                 XEXP (XEXP (XEXP (x, 0), 0), 0),
4475
                                 GET_MODE_BITSIZE (mode) - (i + 1)),
4476
           GET_MODE_BITSIZE (mode) - (i + 1));
4477
 
4478
      /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
4479
         C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
4480
         is 1.  This produces better code than the alternative immediately
4481
         below.  */
4482
      if (COMPARISON_P (XEXP (x, 0))
4483
          && ((STORE_FLAG_VALUE == -1 && XEXP (x, 1) == const1_rtx)
4484
              || (STORE_FLAG_VALUE == 1 && XEXP (x, 1) == constm1_rtx))
4485
          && (reversed = reversed_comparison (XEXP (x, 0), mode)))
4486
        return
4487
          simplify_gen_unary (NEG, mode, reversed, mode);
4488
 
4489
      /* If only the low-order bit of X is possibly nonzero, (plus x -1)
4490
         can become (ashiftrt (ashift (xor x 1) C) C) where C is
4491
         the bitsize of the mode - 1.  This allows simplification of
4492
         "a = (b & 8) == 0;"  */
4493
      if (XEXP (x, 1) == constm1_rtx
4494
          && !REG_P (XEXP (x, 0))
4495
          && ! (GET_CODE (XEXP (x, 0)) == SUBREG
4496
                && REG_P (SUBREG_REG (XEXP (x, 0))))
4497
          && nonzero_bits (XEXP (x, 0), mode) == 1)
4498
        return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
4499
           simplify_shift_const (NULL_RTX, ASHIFT, mode,
4500
                                 gen_rtx_XOR (mode, XEXP (x, 0), const1_rtx),
4501
                                 GET_MODE_BITSIZE (mode) - 1),
4502
           GET_MODE_BITSIZE (mode) - 1);
4503
 
4504
      /* If we are adding two things that have no bits in common, convert
4505
         the addition into an IOR.  This will often be further simplified,
4506
         for example in cases like ((a & 1) + (a & 2)), which can
4507
         become a & 3.  */
4508
 
4509
      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4510
          && (nonzero_bits (XEXP (x, 0), mode)
4511
              & nonzero_bits (XEXP (x, 1), mode)) == 0)
4512
        {
4513
          /* Try to simplify the expression further.  */
4514
          rtx tor = simplify_gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
4515
          temp = combine_simplify_rtx (tor, mode, in_dest);
4516
 
4517
          /* If we could, great.  If not, do not go ahead with the IOR
4518
             replacement, since PLUS appears in many special purpose
4519
             address arithmetic instructions.  */
4520
          if (GET_CODE (temp) != CLOBBER && temp != tor)
4521
            return temp;
4522
        }
4523
      break;
4524
 
4525
    case MINUS:
4526
      /* If STORE_FLAG_VALUE is 1, (minus 1 (comparison foo bar)) can be done
4527
         by reversing the comparison code if valid.  */
4528
      if (STORE_FLAG_VALUE == 1
4529
          && XEXP (x, 0) == const1_rtx
4530
          && COMPARISON_P (XEXP (x, 1))
4531
          && (reversed = reversed_comparison (XEXP (x, 1), mode)))
4532
        return reversed;
4533
 
4534
      /* (minus <foo> (and <foo> (const_int -pow2))) becomes
4535
         (and <foo> (const_int pow2-1))  */
4536
      if (GET_CODE (XEXP (x, 1)) == AND
4537
          && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
4538
          && exact_log2 (-INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
4539
          && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
4540
        return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
4541
                                       -INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
4542
 
4543
      /* Canonicalize (minus A (mult (neg B) C)) to (plus (mult B C) A).
4544
       */
4545
      if (GET_CODE (XEXP (x, 1)) == MULT
4546
          && GET_CODE (XEXP (XEXP (x, 1), 0)) == NEG)
4547
        {
4548
          rtx in1, in2;
4549
 
4550
          in1 = XEXP (XEXP (XEXP (x, 1), 0), 0);
4551
          in2 = XEXP (XEXP (x, 1), 1);
4552
          return simplify_gen_binary (PLUS, mode,
4553
                                      simplify_gen_binary (MULT, mode,
4554
                                                           in1, in2),
4555
                                      XEXP (x, 0));
4556
        }
4557
 
4558
      /* Canonicalize (minus (neg A) (mult B C)) to
4559
         (minus (mult (neg B) C) A).  */
4560
      if (GET_CODE (XEXP (x, 1)) == MULT
4561
          && GET_CODE (XEXP (x, 0)) == NEG)
4562
        {
4563
          rtx in1, in2;
4564
 
4565
          in1 = simplify_gen_unary (NEG, mode, XEXP (XEXP (x, 1), 0), mode);
4566
          in2 = XEXP (XEXP (x, 1), 1);
4567
          return simplify_gen_binary (MINUS, mode,
4568
                                      simplify_gen_binary (MULT, mode,
4569
                                                           in1, in2),
4570
                                      XEXP (XEXP (x, 0), 0));
4571
        }
4572
 
4573
      /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
4574
         integers.  */
4575
      if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
4576
        return simplify_gen_binary (MINUS, mode,
4577
                                    simplify_gen_binary (MINUS, mode,
4578
                                                         XEXP (x, 0),
4579
                                                         XEXP (XEXP (x, 1), 0)),
4580
                                    XEXP (XEXP (x, 1), 1));
4581
      break;
4582
 
4583
    case MULT:
4584
      /* If we have (mult (plus A B) C), apply the distributive law and then
4585
         the inverse distributive law to see if things simplify.  This
4586
         occurs mostly in addresses, often when unrolling loops.  */
4587
 
4588
      if (GET_CODE (XEXP (x, 0)) == PLUS)
4589
        {
4590
          rtx result = distribute_and_simplify_rtx (x, 0);
4591
          if (result)
4592
            return result;
4593
        }
4594
 
4595
      /* Try simplify a*(b/c) as (a*b)/c.  */
4596
      if (FLOAT_MODE_P (mode) && flag_unsafe_math_optimizations
4597
          && GET_CODE (XEXP (x, 0)) == DIV)
4598
        {
4599
          rtx tem = simplify_binary_operation (MULT, mode,
4600
                                               XEXP (XEXP (x, 0), 0),
4601
                                               XEXP (x, 1));
4602
          if (tem)
4603
            return simplify_gen_binary (DIV, mode, tem, XEXP (XEXP (x, 0), 1));
4604
        }
4605
      break;
4606
 
4607
    case UDIV:
4608
      /* If this is a divide by a power of two, treat it as a shift if
4609
         its first operand is a shift.  */
4610
      if (GET_CODE (XEXP (x, 1)) == CONST_INT
4611
          && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
4612
          && (GET_CODE (XEXP (x, 0)) == ASHIFT
4613
              || GET_CODE (XEXP (x, 0)) == LSHIFTRT
4614
              || GET_CODE (XEXP (x, 0)) == ASHIFTRT
4615
              || GET_CODE (XEXP (x, 0)) == ROTATE
4616
              || GET_CODE (XEXP (x, 0)) == ROTATERT))
4617
        return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
4618
      break;
4619
 
4620
    case EQ:  case NE:
4621
    case GT:  case GTU:  case GE:  case GEU:
4622
    case LT:  case LTU:  case LE:  case LEU:
4623
    case UNEQ:  case LTGT:
4624
    case UNGT:  case UNGE:
4625
    case UNLT:  case UNLE:
4626
    case UNORDERED: case ORDERED:
4627
      /* If the first operand is a condition code, we can't do anything
4628
         with it.  */
4629
      if (GET_CODE (XEXP (x, 0)) == COMPARE
4630
          || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
4631
              && ! CC0_P (XEXP (x, 0))))
4632
        {
4633
          rtx op0 = XEXP (x, 0);
4634
          rtx op1 = XEXP (x, 1);
4635
          enum rtx_code new_code;
4636
 
4637
          if (GET_CODE (op0) == COMPARE)
4638
            op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4639
 
4640
          /* Simplify our comparison, if possible.  */
4641
          new_code = simplify_comparison (code, &op0, &op1);
4642
 
4643
          /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
4644
             if only the low-order bit is possibly nonzero in X (such as when
4645
             X is a ZERO_EXTRACT of one bit).  Similarly, we can convert EQ to
4646
             (xor X 1) or (minus 1 X); we use the former.  Finally, if X is
4647
             known to be either 0 or -1, NE becomes a NEG and EQ becomes
4648
             (plus X 1).
4649
 
4650
             Remove any ZERO_EXTRACT we made when thinking this was a
4651
             comparison.  It may now be simpler to use, e.g., an AND.  If a
4652
             ZERO_EXTRACT is indeed appropriate, it will be placed back by
4653
             the call to make_compound_operation in the SET case.  */
4654
 
4655
          if (STORE_FLAG_VALUE == 1
4656
              && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4657
              && op1 == const0_rtx
4658
              && mode == GET_MODE (op0)
4659
              && nonzero_bits (op0, mode) == 1)
4660
            return gen_lowpart (mode,
4661
                                expand_compound_operation (op0));
4662
 
4663
          else if (STORE_FLAG_VALUE == 1
4664
                   && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4665
                   && op1 == const0_rtx
4666
                   && mode == GET_MODE (op0)
4667
                   && (num_sign_bit_copies (op0, mode)
4668
                       == GET_MODE_BITSIZE (mode)))
4669
            {
4670
              op0 = expand_compound_operation (op0);
4671
              return simplify_gen_unary (NEG, mode,
4672
                                         gen_lowpart (mode, op0),
4673
                                         mode);
4674
            }
4675
 
4676
          else if (STORE_FLAG_VALUE == 1
4677
                   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4678
                   && op1 == const0_rtx
4679
                   && mode == GET_MODE (op0)
4680
                   && nonzero_bits (op0, mode) == 1)
4681
            {
4682
              op0 = expand_compound_operation (op0);
4683
              return simplify_gen_binary (XOR, mode,
4684
                                          gen_lowpart (mode, op0),
4685
                                          const1_rtx);
4686
            }
4687
 
4688
          else if (STORE_FLAG_VALUE == 1
4689
                   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4690
                   && op1 == const0_rtx
4691
                   && mode == GET_MODE (op0)
4692
                   && (num_sign_bit_copies (op0, mode)
4693
                       == GET_MODE_BITSIZE (mode)))
4694
            {
4695
              op0 = expand_compound_operation (op0);
4696
              return plus_constant (gen_lowpart (mode, op0), 1);
4697
            }
4698
 
4699
          /* If STORE_FLAG_VALUE is -1, we have cases similar to
4700
             those above.  */
4701
          if (STORE_FLAG_VALUE == -1
4702
              && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4703
              && op1 == const0_rtx
4704
              && (num_sign_bit_copies (op0, mode)
4705
                  == GET_MODE_BITSIZE (mode)))
4706
            return gen_lowpart (mode,
4707
                                expand_compound_operation (op0));
4708
 
4709
          else if (STORE_FLAG_VALUE == -1
4710
                   && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4711
                   && op1 == const0_rtx
4712
                   && mode == GET_MODE (op0)
4713
                   && nonzero_bits (op0, mode) == 1)
4714
            {
4715
              op0 = expand_compound_operation (op0);
4716
              return simplify_gen_unary (NEG, mode,
4717
                                         gen_lowpart (mode, op0),
4718
                                         mode);
4719
            }
4720
 
4721
          else if (STORE_FLAG_VALUE == -1
4722
                   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4723
                   && op1 == const0_rtx
4724
                   && mode == GET_MODE (op0)
4725
                   && (num_sign_bit_copies (op0, mode)
4726
                       == GET_MODE_BITSIZE (mode)))
4727
            {
4728
              op0 = expand_compound_operation (op0);
4729
              return simplify_gen_unary (NOT, mode,
4730
                                         gen_lowpart (mode, op0),
4731
                                         mode);
4732
            }
4733
 
4734
          /* If X is 0/1, (eq X 0) is X-1.  */
4735
          else if (STORE_FLAG_VALUE == -1
4736
                   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4737
                   && op1 == const0_rtx
4738
                   && mode == GET_MODE (op0)
4739
                   && nonzero_bits (op0, mode) == 1)
4740
            {
4741
              op0 = expand_compound_operation (op0);
4742
              return plus_constant (gen_lowpart (mode, op0), -1);
4743
            }
4744
 
4745
          /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
4746
             one bit that might be nonzero, we can convert (ne x 0) to
4747
             (ashift x c) where C puts the bit in the sign bit.  Remove any
4748
             AND with STORE_FLAG_VALUE when we are done, since we are only
4749
             going to test the sign bit.  */
4750
          if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4751
              && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4752
              && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
4753
                  == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
4754
              && op1 == const0_rtx
4755
              && mode == GET_MODE (op0)
4756
              && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
4757
            {
4758
              x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
4759
                                        expand_compound_operation (op0),
4760
                                        GET_MODE_BITSIZE (mode) - 1 - i);
4761
              if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
4762
                return XEXP (x, 0);
4763
              else
4764
                return x;
4765
            }
4766
 
4767
          /* If the code changed, return a whole new comparison.  */
4768
          if (new_code != code)
4769
            return gen_rtx_fmt_ee (new_code, mode, op0, op1);
4770
 
4771
          /* Otherwise, keep this operation, but maybe change its operands.
4772
             This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR).  */
4773
          SUBST (XEXP (x, 0), op0);
4774
          SUBST (XEXP (x, 1), op1);
4775
        }
4776
      break;
4777
 
4778
    case IF_THEN_ELSE:
4779
      return simplify_if_then_else (x);
4780
 
4781
    case ZERO_EXTRACT:
4782
    case SIGN_EXTRACT:
4783
    case ZERO_EXTEND:
4784
    case SIGN_EXTEND:
4785
      /* If we are processing SET_DEST, we are done.  */
4786
      if (in_dest)
4787
        return x;
4788
 
4789
      return expand_compound_operation (x);
4790
 
4791
    case SET:
4792
      return simplify_set (x);
4793
 
4794
    case AND:
4795
    case IOR:
4796
    case XOR:
4797
      return simplify_logical (x);
4798
 
4799
    case ABS:
4800
      /* (abs (neg <foo>)) -> (abs <foo>) */
4801
      if (GET_CODE (XEXP (x, 0)) == NEG)
4802
        SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4803
 
4804
      /* If the mode of the operand is VOIDmode (i.e. if it is ASM_OPERANDS),
4805
         do nothing.  */
4806
      if (GET_MODE (XEXP (x, 0)) == VOIDmode)
4807
        break;
4808
 
4809
      /* If operand is something known to be positive, ignore the ABS.  */
4810
      if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
4811
          || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4812
               <= HOST_BITS_PER_WIDE_INT)
4813
              && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4814
                   & ((HOST_WIDE_INT) 1
4815
                      << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
4816
                  == 0)))
4817
        return XEXP (x, 0);
4818
 
4819
      /* If operand is known to be only -1 or 0, convert ABS to NEG.  */
4820
      if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
4821
        return gen_rtx_NEG (mode, XEXP (x, 0));
4822
 
4823
      break;
4824
 
4825
    case FFS:
4826
      /* (ffs (*_extend <X>)) = (ffs <X>) */
4827
      if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4828
          || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4829
        SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4830
      break;
4831
 
4832
    case POPCOUNT:
4833
    case PARITY:
4834
      /* (pop* (zero_extend <X>)) = (pop* <X>) */
4835
      if (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4836
        SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4837
      break;
4838
 
4839
    case FLOAT:
4840
      /* (float (sign_extend <X>)) = (float <X>).  */
4841
      if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
4842
        SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4843
      break;
4844
 
4845
    case ASHIFT:
4846
    case LSHIFTRT:
4847
    case ASHIFTRT:
4848
    case ROTATE:
4849
    case ROTATERT:
4850
      /* If this is a shift by a constant amount, simplify it.  */
4851
      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4852
        return simplify_shift_const (x, code, mode, XEXP (x, 0),
4853
                                     INTVAL (XEXP (x, 1)));
4854
 
4855
      else if (SHIFT_COUNT_TRUNCATED && !REG_P (XEXP (x, 1)))
4856
        SUBST (XEXP (x, 1),
4857
               force_to_mode (XEXP (x, 1), GET_MODE (XEXP (x, 1)),
4858
                              ((HOST_WIDE_INT) 1
4859
                               << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
4860
                              - 1,
4861
                              NULL_RTX, 0));
4862
      break;
4863
 
4864
    case VEC_SELECT:
4865
      {
4866
        rtx op0 = XEXP (x, 0);
4867
        rtx op1 = XEXP (x, 1);
4868
        int len;
4869
 
4870
        gcc_assert (GET_CODE (op1) == PARALLEL);
4871
        len = XVECLEN (op1, 0);
4872
        if (len == 1
4873
            && GET_CODE (XVECEXP (op1, 0, 0)) == CONST_INT
4874
            && GET_CODE (op0) == VEC_CONCAT)
4875
          {
4876
            int offset = INTVAL (XVECEXP (op1, 0, 0)) * GET_MODE_SIZE (GET_MODE (x));
4877
 
4878
            /* Try to find the element in the VEC_CONCAT.  */
4879
            for (;;)
4880
              {
4881
                if (GET_MODE (op0) == GET_MODE (x))
4882
                  return op0;
4883
                if (GET_CODE (op0) == VEC_CONCAT)
4884
                  {
4885
                    HOST_WIDE_INT op0_size = GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)));
4886
                    if (offset < op0_size)
4887
                      op0 = XEXP (op0, 0);
4888
                    else
4889
                      {
4890
                        offset -= op0_size;
4891
                        op0 = XEXP (op0, 1);
4892
                      }
4893
                  }
4894
                else
4895
                  break;
4896
              }
4897
          }
4898
      }
4899
 
4900
      break;
4901
 
4902
    default:
4903
      break;
4904
    }
4905
 
4906
  return x;
4907
}
4908
 
4909
/* Simplify X, an IF_THEN_ELSE expression.  Return the new expression.  */
4910
 
4911
static rtx
4912
simplify_if_then_else (rtx x)
4913
{
4914
  enum machine_mode mode = GET_MODE (x);
4915
  rtx cond = XEXP (x, 0);
4916
  rtx true_rtx = XEXP (x, 1);
4917
  rtx false_rtx = XEXP (x, 2);
4918
  enum rtx_code true_code = GET_CODE (cond);
4919
  int comparison_p = COMPARISON_P (cond);
4920
  rtx temp;
4921
  int i;
4922
  enum rtx_code false_code;
4923
  rtx reversed;
4924
 
4925
  /* Simplify storing of the truth value.  */
4926
  if (comparison_p && true_rtx == const_true_rtx && false_rtx == const0_rtx)
4927
    return simplify_gen_relational (true_code, mode, VOIDmode,
4928
                                    XEXP (cond, 0), XEXP (cond, 1));
4929
 
4930
  /* Also when the truth value has to be reversed.  */
4931
  if (comparison_p
4932
      && true_rtx == const0_rtx && false_rtx == const_true_rtx
4933
      && (reversed = reversed_comparison (cond, mode)))
4934
    return reversed;
4935
 
4936
  /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
4937
     in it is being compared against certain values.  Get the true and false
4938
     comparisons and see if that says anything about the value of each arm.  */
4939
 
4940
  if (comparison_p
4941
      && ((false_code = reversed_comparison_code (cond, NULL))
4942
          != UNKNOWN)
4943
      && REG_P (XEXP (cond, 0)))
4944
    {
4945
      HOST_WIDE_INT nzb;
4946
      rtx from = XEXP (cond, 0);
4947
      rtx true_val = XEXP (cond, 1);
4948
      rtx false_val = true_val;
4949
      int swapped = 0;
4950
 
4951
      /* If FALSE_CODE is EQ, swap the codes and arms.  */
4952
 
4953
      if (false_code == EQ)
4954
        {
4955
          swapped = 1, true_code = EQ, false_code = NE;
4956
          temp = true_rtx, true_rtx = false_rtx, false_rtx = temp;
4957
        }
4958
 
4959
      /* If we are comparing against zero and the expression being tested has
4960
         only a single bit that might be nonzero, that is its value when it is
4961
         not equal to zero.  Similarly if it is known to be -1 or 0.  */
4962
 
4963
      if (true_code == EQ && true_val == const0_rtx
4964
          && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
4965
        false_code = EQ, false_val = GEN_INT (nzb);
4966
      else if (true_code == EQ && true_val == const0_rtx
4967
               && (num_sign_bit_copies (from, GET_MODE (from))
4968
                   == GET_MODE_BITSIZE (GET_MODE (from))))
4969
        false_code = EQ, false_val = constm1_rtx;
4970
 
4971
      /* Now simplify an arm if we know the value of the register in the
4972
         branch and it is used in the arm.  Be careful due to the potential
4973
         of locally-shared RTL.  */
4974
 
4975
      if (reg_mentioned_p (from, true_rtx))
4976
        true_rtx = subst (known_cond (copy_rtx (true_rtx), true_code,
4977
                                      from, true_val),
4978
                      pc_rtx, pc_rtx, 0, 0);
4979
      if (reg_mentioned_p (from, false_rtx))
4980
        false_rtx = subst (known_cond (copy_rtx (false_rtx), false_code,
4981
                                   from, false_val),
4982
                       pc_rtx, pc_rtx, 0, 0);
4983
 
4984
      SUBST (XEXP (x, 1), swapped ? false_rtx : true_rtx);
4985
      SUBST (XEXP (x, 2), swapped ? true_rtx : false_rtx);
4986
 
4987
      true_rtx = XEXP (x, 1);
4988
      false_rtx = XEXP (x, 2);
4989
      true_code = GET_CODE (cond);
4990
    }
4991
 
4992
  /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
4993
     reversed, do so to avoid needing two sets of patterns for
4994
     subtract-and-branch insns.  Similarly if we have a constant in the true
4995
     arm, the false arm is the same as the first operand of the comparison, or
4996
     the false arm is more complicated than the true arm.  */
4997
 
4998
  if (comparison_p
4999
      && reversed_comparison_code (cond, NULL) != UNKNOWN
5000
      && (true_rtx == pc_rtx
5001
          || (CONSTANT_P (true_rtx)
5002
              && GET_CODE (false_rtx) != CONST_INT && false_rtx != pc_rtx)
5003
          || true_rtx == const0_rtx
5004
          || (OBJECT_P (true_rtx) && !OBJECT_P (false_rtx))
5005
          || (GET_CODE (true_rtx) == SUBREG && OBJECT_P (SUBREG_REG (true_rtx))
5006
              && !OBJECT_P (false_rtx))
5007
          || reg_mentioned_p (true_rtx, false_rtx)
5008
          || rtx_equal_p (false_rtx, XEXP (cond, 0))))
5009
    {
5010
      true_code = reversed_comparison_code (cond, NULL);
5011
      SUBST (XEXP (x, 0), reversed_comparison (cond, GET_MODE (cond)));
5012
      SUBST (XEXP (x, 1), false_rtx);
5013
      SUBST (XEXP (x, 2), true_rtx);
5014
 
5015
      temp = true_rtx, true_rtx = false_rtx, false_rtx = temp;
5016
      cond = XEXP (x, 0);
5017
 
5018
      /* It is possible that the conditional has been simplified out.  */
5019
      true_code = GET_CODE (cond);
5020
      comparison_p = COMPARISON_P (cond);
5021
    }
5022
 
5023
  /* If the two arms are identical, we don't need the comparison.  */
5024
 
5025
  if (rtx_equal_p (true_rtx, false_rtx) && ! side_effects_p (cond))
5026
    return true_rtx;
5027
 
5028
  /* Convert a == b ? b : a to "a".  */
5029
  if (true_code == EQ && ! side_effects_p (cond)
5030
      && !HONOR_NANS (mode)
5031
      && rtx_equal_p (XEXP (cond, 0), false_rtx)
5032
      && rtx_equal_p (XEXP (cond, 1), true_rtx))
5033
    return false_rtx;
5034
  else if (true_code == NE && ! side_effects_p (cond)
5035
           && !HONOR_NANS (mode)
5036
           && rtx_equal_p (XEXP (cond, 0), true_rtx)
5037
           && rtx_equal_p (XEXP (cond, 1), false_rtx))
5038
    return true_rtx;
5039
 
5040
  /* Look for cases where we have (abs x) or (neg (abs X)).  */
5041
 
5042
  if (GET_MODE_CLASS (mode) == MODE_INT
5043
      && GET_CODE (false_rtx) == NEG
5044
      && rtx_equal_p (true_rtx, XEXP (false_rtx, 0))
5045
      && comparison_p
5046
      && rtx_equal_p (true_rtx, XEXP (cond, 0))
5047
      && ! side_effects_p (true_rtx))
5048
    switch (true_code)
5049
      {
5050
      case GT:
5051
      case GE:
5052
        return simplify_gen_unary (ABS, mode, true_rtx, mode);
5053
      case LT:
5054
      case LE:
5055
        return
5056
          simplify_gen_unary (NEG, mode,
5057
                              simplify_gen_unary (ABS, mode, true_rtx, mode),
5058
                              mode);
5059
      default:
5060
        break;
5061
      }
5062
 
5063
  /* Look for MIN or MAX.  */
5064
 
5065
  if ((! FLOAT_MODE_P (mode) || flag_unsafe_math_optimizations)
5066
      && comparison_p
5067
      && rtx_equal_p (XEXP (cond, 0), true_rtx)
5068
      && rtx_equal_p (XEXP (cond, 1), false_rtx)
5069
      && ! side_effects_p (cond))
5070
    switch (true_code)
5071
      {
5072
      case GE:
5073
      case GT:
5074
        return simplify_gen_binary (SMAX, mode, true_rtx, false_rtx);
5075
      case LE:
5076
      case LT:
5077
        return simplify_gen_binary (SMIN, mode, true_rtx, false_rtx);
5078
      case GEU:
5079
      case GTU:
5080
        return simplify_gen_binary (UMAX, mode, true_rtx, false_rtx);
5081
      case LEU:
5082
      case LTU:
5083
        return simplify_gen_binary (UMIN, mode, true_rtx, false_rtx);
5084
      default:
5085
        break;
5086
      }
5087
 
5088
  /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
5089
     second operand is zero, this can be done as (OP Z (mult COND C2)) where
5090
     C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
5091
     SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
5092
     We can do this kind of thing in some cases when STORE_FLAG_VALUE is
5093
     neither 1 or -1, but it isn't worth checking for.  */
5094
 
5095
  if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
5096
      && comparison_p
5097
      && GET_MODE_CLASS (mode) == MODE_INT
5098
      && ! side_effects_p (x))
5099
    {
5100
      rtx t = make_compound_operation (true_rtx, SET);
5101
      rtx f = make_compound_operation (false_rtx, SET);
5102
      rtx cond_op0 = XEXP (cond, 0);
5103
      rtx cond_op1 = XEXP (cond, 1);
5104
      enum rtx_code op = UNKNOWN, extend_op = UNKNOWN;
5105
      enum machine_mode m = mode;
5106
      rtx z = 0, c1 = NULL_RTX;
5107
 
5108
      if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
5109
           || GET_CODE (t) == IOR || GET_CODE (t) == XOR
5110
           || GET_CODE (t) == ASHIFT
5111
           || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
5112
          && rtx_equal_p (XEXP (t, 0), f))
5113
        c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
5114
 
5115
      /* If an identity-zero op is commutative, check whether there
5116
         would be a match if we swapped the operands.  */
5117
      else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
5118
                || GET_CODE (t) == XOR)
5119
               && rtx_equal_p (XEXP (t, 1), f))
5120
        c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
5121
      else if (GET_CODE (t) == SIGN_EXTEND
5122
               && (GET_CODE (XEXP (t, 0)) == PLUS
5123
                   || GET_CODE (XEXP (t, 0)) == MINUS
5124
                   || GET_CODE (XEXP (t, 0)) == IOR
5125
                   || GET_CODE (XEXP (t, 0)) == XOR
5126
                   || GET_CODE (XEXP (t, 0)) == ASHIFT
5127
                   || GET_CODE (XEXP (t, 0)) == LSHIFTRT
5128
                   || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
5129
               && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
5130
               && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
5131
               && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
5132
               && (num_sign_bit_copies (f, GET_MODE (f))
5133
                   > (unsigned int)
5134
                     (GET_MODE_BITSIZE (mode)
5135
                      - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
5136
        {
5137
          c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
5138
          extend_op = SIGN_EXTEND;
5139
          m = GET_MODE (XEXP (t, 0));
5140
        }
5141
      else if (GET_CODE (t) == SIGN_EXTEND
5142
               && (GET_CODE (XEXP (t, 0)) == PLUS
5143
                   || GET_CODE (XEXP (t, 0)) == IOR
5144
                   || GET_CODE (XEXP (t, 0)) == XOR)
5145
               && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
5146
               && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
5147
               && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
5148
               && (num_sign_bit_copies (f, GET_MODE (f))
5149
                   > (unsigned int)
5150
                     (GET_MODE_BITSIZE (mode)
5151
                      - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1))))))
5152
        {
5153
          c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
5154
          extend_op = SIGN_EXTEND;
5155
          m = GET_MODE (XEXP (t, 0));
5156
        }
5157
      else if (GET_CODE (t) == ZERO_EXTEND
5158
               && (GET_CODE (XEXP (t, 0)) == PLUS
5159
                   || GET_CODE (XEXP (t, 0)) == MINUS
5160
                   || GET_CODE (XEXP (t, 0)) == IOR
5161
                   || GET_CODE (XEXP (t, 0)) == XOR
5162
                   || GET_CODE (XEXP (t, 0)) == ASHIFT
5163
                   || GET_CODE (XEXP (t, 0)) == LSHIFTRT
5164
                   || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
5165
               && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
5166
               && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5167
               && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
5168
               && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
5169
               && ((nonzero_bits (f, GET_MODE (f))
5170
                    & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
5171
                   == 0))
5172
        {
5173
          c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
5174
          extend_op = ZERO_EXTEND;
5175
          m = GET_MODE (XEXP (t, 0));
5176
        }
5177
      else if (GET_CODE (t) == ZERO_EXTEND
5178
               && (GET_CODE (XEXP (t, 0)) == PLUS
5179
                   || GET_CODE (XEXP (t, 0)) == IOR
5180
                   || GET_CODE (XEXP (t, 0)) == XOR)
5181
               && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
5182
               && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5183
               && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
5184
               && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
5185
               && ((nonzero_bits (f, GET_MODE (f))
5186
                    & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
5187
                   == 0))
5188
        {
5189
          c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
5190
          extend_op = ZERO_EXTEND;
5191
          m = GET_MODE (XEXP (t, 0));
5192
        }
5193
 
5194
      if (z)
5195
        {
5196
          temp = subst (simplify_gen_relational (true_code, m, VOIDmode,
5197
                                                 cond_op0, cond_op1),
5198
                        pc_rtx, pc_rtx, 0, 0);
5199
          temp = simplify_gen_binary (MULT, m, temp,
5200
                                      simplify_gen_binary (MULT, m, c1,
5201
                                                           const_true_rtx));
5202
          temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
5203
          temp = simplify_gen_binary (op, m, gen_lowpart (m, z), temp);
5204
 
5205
          if (extend_op != UNKNOWN)
5206
            temp = simplify_gen_unary (extend_op, mode, temp, m);
5207
 
5208
          return temp;
5209
        }
5210
    }
5211
 
5212
  /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
5213
     1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
5214
     negation of a single bit, we can convert this operation to a shift.  We
5215
     can actually do this more generally, but it doesn't seem worth it.  */
5216
 
5217
  if (true_code == NE && XEXP (cond, 1) == const0_rtx
5218
      && false_rtx == const0_rtx && GET_CODE (true_rtx) == CONST_INT
5219
      && ((1 == nonzero_bits (XEXP (cond, 0), mode)
5220
           && (i = exact_log2 (INTVAL (true_rtx))) >= 0)
5221
          || ((num_sign_bit_copies (XEXP (cond, 0), mode)
5222
               == GET_MODE_BITSIZE (mode))
5223
              && (i = exact_log2 (-INTVAL (true_rtx))) >= 0)))
5224
    return
5225
      simplify_shift_const (NULL_RTX, ASHIFT, mode,
5226
                            gen_lowpart (mode, XEXP (cond, 0)), i);
5227
 
5228
  /* (IF_THEN_ELSE (NE REG 0) (0) (8)) is REG for nonzero_bits (REG) == 8.  */
5229
  if (true_code == NE && XEXP (cond, 1) == const0_rtx
5230
      && false_rtx == const0_rtx && GET_CODE (true_rtx) == CONST_INT
5231
      && GET_MODE (XEXP (cond, 0)) == mode
5232
      && (INTVAL (true_rtx) & GET_MODE_MASK (mode))
5233
          == nonzero_bits (XEXP (cond, 0), mode)
5234
      && (i = exact_log2 (INTVAL (true_rtx) & GET_MODE_MASK (mode))) >= 0)
5235
    return XEXP (cond, 0);
5236
 
5237
  return x;
5238
}
5239
 
5240
/* Simplify X, a SET expression.  Return the new expression.  */
5241
 
5242
static rtx
5243
simplify_set (rtx x)
5244
{
5245
  rtx src = SET_SRC (x);
5246
  rtx dest = SET_DEST (x);
5247
  enum machine_mode mode
5248
    = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
5249
  rtx other_insn;
5250
  rtx *cc_use;
5251
 
5252
  /* (set (pc) (return)) gets written as (return).  */
5253
  if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN)
5254
    return src;
5255
 
5256
  /* Now that we know for sure which bits of SRC we are using, see if we can
5257
     simplify the expression for the object knowing that we only need the
5258
     low-order bits.  */
5259
 
5260
  if (GET_MODE_CLASS (mode) == MODE_INT
5261
      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
5262
    {
5263
      src = force_to_mode (src, mode, ~(HOST_WIDE_INT) 0, NULL_RTX, 0);
5264
      SUBST (SET_SRC (x), src);
5265
    }
5266
 
5267
  /* If we are setting CC0 or if the source is a COMPARE, look for the use of
5268
     the comparison result and try to simplify it unless we already have used
5269
     undobuf.other_insn.  */
5270
  if ((GET_MODE_CLASS (mode) == MODE_CC
5271
       || GET_CODE (src) == COMPARE
5272
       || CC0_P (dest))
5273
      && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
5274
      && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
5275
      && COMPARISON_P (*cc_use)
5276
      && rtx_equal_p (XEXP (*cc_use, 0), dest))
5277
    {
5278
      enum rtx_code old_code = GET_CODE (*cc_use);
5279
      enum rtx_code new_code;
5280
      rtx op0, op1, tmp;
5281
      int other_changed = 0;
5282
      enum machine_mode compare_mode = GET_MODE (dest);
5283
 
5284
      if (GET_CODE (src) == COMPARE)
5285
        op0 = XEXP (src, 0), op1 = XEXP (src, 1);
5286
      else
5287
        op0 = src, op1 = CONST0_RTX (GET_MODE (src));
5288
 
5289
      tmp = simplify_relational_operation (old_code, compare_mode, VOIDmode,
5290
                                           op0, op1);
5291
      if (!tmp)
5292
        new_code = old_code;
5293
      else if (!CONSTANT_P (tmp))
5294
        {
5295
          new_code = GET_CODE (tmp);
5296
          op0 = XEXP (tmp, 0);
5297
          op1 = XEXP (tmp, 1);
5298
        }
5299
      else
5300
        {
5301
          rtx pat = PATTERN (other_insn);
5302
          undobuf.other_insn = other_insn;
5303
          SUBST (*cc_use, tmp);
5304
 
5305
          /* Attempt to simplify CC user.  */
5306
          if (GET_CODE (pat) == SET)
5307
            {
5308
              rtx new = simplify_rtx (SET_SRC (pat));
5309
              if (new != NULL_RTX)
5310
                SUBST (SET_SRC (pat), new);
5311
            }
5312
 
5313
          /* Convert X into a no-op move.  */
5314
          SUBST (SET_DEST (x), pc_rtx);
5315
          SUBST (SET_SRC (x), pc_rtx);
5316
          return x;
5317
        }
5318
 
5319
      /* Simplify our comparison, if possible.  */
5320
      new_code = simplify_comparison (new_code, &op0, &op1);
5321
 
5322
#ifdef SELECT_CC_MODE
5323
      /* If this machine has CC modes other than CCmode, check to see if we
5324
         need to use a different CC mode here.  */
5325
      if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
5326
        compare_mode = GET_MODE (op0);
5327
      else
5328
        compare_mode = SELECT_CC_MODE (new_code, op0, op1);
5329
 
5330
#ifndef HAVE_cc0
5331
      /* If the mode changed, we have to change SET_DEST, the mode in the
5332
         compare, and the mode in the place SET_DEST is used.  If SET_DEST is
5333
         a hard register, just build new versions with the proper mode.  If it
5334
         is a pseudo, we lose unless it is only time we set the pseudo, in
5335
         which case we can safely change its mode.  */
5336
      if (compare_mode != GET_MODE (dest))
5337
        {
5338
          if (can_change_dest_mode (dest, 0, compare_mode))
5339
            {
5340
              unsigned int regno = REGNO (dest);
5341
              rtx new_dest = gen_rtx_REG (compare_mode, regno);
5342
 
5343
              if (regno >= FIRST_PSEUDO_REGISTER)
5344
                SUBST (regno_reg_rtx[regno], new_dest);
5345
 
5346
              SUBST (SET_DEST (x), new_dest);
5347
              SUBST (XEXP (*cc_use, 0), new_dest);
5348
              other_changed = 1;
5349
 
5350
              dest = new_dest;
5351
            }
5352
        }
5353
#endif  /* cc0 */
5354
#endif  /* SELECT_CC_MODE */
5355
 
5356
      /* If the code changed, we have to build a new comparison in
5357
         undobuf.other_insn.  */
5358
      if (new_code != old_code)
5359
        {
5360
          int other_changed_previously = other_changed;
5361
          unsigned HOST_WIDE_INT mask;
5362
 
5363
          SUBST (*cc_use, gen_rtx_fmt_ee (new_code, GET_MODE (*cc_use),
5364
                                          dest, const0_rtx));
5365
          other_changed = 1;
5366
 
5367
          /* If the only change we made was to change an EQ into an NE or
5368
             vice versa, OP0 has only one bit that might be nonzero, and OP1
5369
             is zero, check if changing the user of the condition code will
5370
             produce a valid insn.  If it won't, we can keep the original code
5371
             in that insn by surrounding our operation with an XOR.  */
5372
 
5373
          if (((old_code == NE && new_code == EQ)
5374
               || (old_code == EQ && new_code == NE))
5375
              && ! other_changed_previously && op1 == const0_rtx
5376
              && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
5377
              && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
5378
            {
5379
              rtx pat = PATTERN (other_insn), note = 0;
5380
 
5381
              if ((recog_for_combine (&pat, other_insn, &note) < 0
5382
                   && ! check_asm_operands (pat)))
5383
                {
5384
                  PUT_CODE (*cc_use, old_code);
5385
                  other_changed = 0;
5386
 
5387
                  op0 = simplify_gen_binary (XOR, GET_MODE (op0),
5388
                                             op0, GEN_INT (mask));
5389
                }
5390
            }
5391
        }
5392
 
5393
      if (other_changed)
5394
        undobuf.other_insn = other_insn;
5395
 
5396
#ifdef HAVE_cc0
5397
      /* If we are now comparing against zero, change our source if
5398
         needed.  If we do not use cc0, we always have a COMPARE.  */
5399
      if (op1 == const0_rtx && dest == cc0_rtx)
5400
        {
5401
          SUBST (SET_SRC (x), op0);
5402
          src = op0;
5403
        }
5404
      else
5405
#endif
5406
 
5407
      /* Otherwise, if we didn't previously have a COMPARE in the
5408
         correct mode, we need one.  */
5409
      if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode)
5410
        {
5411
          SUBST (SET_SRC (x), gen_rtx_COMPARE (compare_mode, op0, op1));
5412
          src = SET_SRC (x);
5413
        }
5414
      else if (GET_MODE (op0) == compare_mode && op1 == const0_rtx)
5415
        {
5416
          SUBST(SET_SRC (x), op0);
5417
          src = SET_SRC (x);
5418
        }
5419
      else
5420
        {
5421
          /* Otherwise, update the COMPARE if needed.  */
5422
          SUBST (XEXP (src, 0), op0);
5423
          SUBST (XEXP (src, 1), op1);
5424
        }
5425
    }
5426
  else
5427
    {
5428
      /* Get SET_SRC in a form where we have placed back any
5429
         compound expressions.  Then do the checks below.  */
5430
      src = make_compound_operation (src, SET);
5431
      SUBST (SET_SRC (x), src);
5432
    }
5433
 
5434
  /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
5435
     and X being a REG or (subreg (reg)), we may be able to convert this to
5436
     (set (subreg:m2 x) (op)).
5437
 
5438
     We can always do this if M1 is narrower than M2 because that means that
5439
     we only care about the low bits of the result.
5440
 
5441
     However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
5442
     perform a narrower operation than requested since the high-order bits will
5443
     be undefined.  On machine where it is defined, this transformation is safe
5444
     as long as M1 and M2 have the same number of words.  */
5445
 
5446
  if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
5447
      && !OBJECT_P (SUBREG_REG (src))
5448
      && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
5449
           / UNITS_PER_WORD)
5450
          == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5451
               + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
5452
#ifndef WORD_REGISTER_OPERATIONS
5453
      && (GET_MODE_SIZE (GET_MODE (src))
5454
        < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
5455
#endif
5456
#ifdef CANNOT_CHANGE_MODE_CLASS
5457
      && ! (REG_P (dest) && REGNO (dest) < FIRST_PSEUDO_REGISTER
5458
            && REG_CANNOT_CHANGE_MODE_P (REGNO (dest),
5459
                                         GET_MODE (SUBREG_REG (src)),
5460
                                         GET_MODE (src)))
5461
#endif
5462
      && (REG_P (dest)
5463
          || (GET_CODE (dest) == SUBREG
5464
              && REG_P (SUBREG_REG (dest)))))
5465
    {
5466
      SUBST (SET_DEST (x),
5467
             gen_lowpart (GET_MODE (SUBREG_REG (src)),
5468
                                      dest));
5469
      SUBST (SET_SRC (x), SUBREG_REG (src));
5470
 
5471
      src = SET_SRC (x), dest = SET_DEST (x);
5472
    }
5473
 
5474
#ifdef HAVE_cc0
5475
  /* If we have (set (cc0) (subreg ...)), we try to remove the subreg
5476
     in SRC.  */
5477
  if (dest == cc0_rtx
5478
      && GET_CODE (src) == SUBREG
5479
      && subreg_lowpart_p (src)
5480
      && (GET_MODE_BITSIZE (GET_MODE (src))
5481
          < GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (src)))))
5482
    {
5483
      rtx inner = SUBREG_REG (src);
5484
      enum machine_mode inner_mode = GET_MODE (inner);
5485
 
5486
      /* Here we make sure that we don't have a sign bit on.  */
5487
      if (GET_MODE_BITSIZE (inner_mode) <= HOST_BITS_PER_WIDE_INT
5488
          && (nonzero_bits (inner, inner_mode)
5489
              < ((unsigned HOST_WIDE_INT) 1
5490
                 << (GET_MODE_BITSIZE (GET_MODE (src)) - 1))))
5491
        {
5492
          SUBST (SET_SRC (x), inner);
5493
          src = SET_SRC (x);
5494
        }
5495
    }
5496
#endif
5497
 
5498
#ifdef LOAD_EXTEND_OP
5499
  /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
5500
     would require a paradoxical subreg.  Replace the subreg with a
5501
     zero_extend to avoid the reload that would otherwise be required.  */
5502
 
5503
  if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
5504
      && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != UNKNOWN
5505
      && SUBREG_BYTE (src) == 0
5506
      && (GET_MODE_SIZE (GET_MODE (src))
5507
          > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
5508
      && MEM_P (SUBREG_REG (src)))
5509
    {
5510
      SUBST (SET_SRC (x),
5511
             gen_rtx_fmt_e (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
5512
                            GET_MODE (src), SUBREG_REG (src)));
5513
 
5514
      src = SET_SRC (x);
5515
    }
5516
#endif
5517
 
5518
  /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
5519
     are comparing an item known to be 0 or -1 against 0, use a logical
5520
     operation instead. Check for one of the arms being an IOR of the other
5521
     arm with some value.  We compute three terms to be IOR'ed together.  In
5522
     practice, at most two will be nonzero.  Then we do the IOR's.  */
5523
 
5524
  if (GET_CODE (dest) != PC
5525
      && GET_CODE (src) == IF_THEN_ELSE
5526
      && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
5527
      && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
5528
      && XEXP (XEXP (src, 0), 1) == const0_rtx
5529
      && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0))
5530
#ifdef HAVE_conditional_move
5531
      && ! can_conditionally_move_p (GET_MODE (src))
5532
#endif
5533
      && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
5534
                               GET_MODE (XEXP (XEXP (src, 0), 0)))
5535
          == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0))))
5536
      && ! side_effects_p (src))
5537
    {
5538
      rtx true_rtx = (GET_CODE (XEXP (src, 0)) == NE
5539
                      ? XEXP (src, 1) : XEXP (src, 2));
5540
      rtx false_rtx = (GET_CODE (XEXP (src, 0)) == NE
5541
                   ? XEXP (src, 2) : XEXP (src, 1));
5542
      rtx term1 = const0_rtx, term2, term3;
5543
 
5544
      if (GET_CODE (true_rtx) == IOR
5545
          && rtx_equal_p (XEXP (true_rtx, 0), false_rtx))
5546
        term1 = false_rtx, true_rtx = XEXP (true_rtx, 1), false_rtx = const0_rtx;
5547
      else if (GET_CODE (true_rtx) == IOR
5548
               && rtx_equal_p (XEXP (true_rtx, 1), false_rtx))
5549
        term1 = false_rtx, true_rtx = XEXP (true_rtx, 0), false_rtx = const0_rtx;
5550
      else if (GET_CODE (false_rtx) == IOR
5551
               && rtx_equal_p (XEXP (false_rtx, 0), true_rtx))
5552
        term1 = true_rtx, false_rtx = XEXP (false_rtx, 1), true_rtx = const0_rtx;
5553
      else if (GET_CODE (false_rtx) == IOR
5554
               && rtx_equal_p (XEXP (false_rtx, 1), true_rtx))
5555
        term1 = true_rtx, false_rtx = XEXP (false_rtx, 0), true_rtx = const0_rtx;
5556
 
5557
      term2 = simplify_gen_binary (AND, GET_MODE (src),
5558
                                   XEXP (XEXP (src, 0), 0), true_rtx);
5559
      term3 = simplify_gen_binary (AND, GET_MODE (src),
5560
                                   simplify_gen_unary (NOT, GET_MODE (src),
5561
                                                       XEXP (XEXP (src, 0), 0),
5562
                                                       GET_MODE (src)),
5563
                                   false_rtx);
5564
 
5565
      SUBST (SET_SRC (x),
5566
             simplify_gen_binary (IOR, GET_MODE (src),
5567
                                  simplify_gen_binary (IOR, GET_MODE (src),
5568
                                                       term1, term2),
5569
                                  term3));
5570
 
5571
      src = SET_SRC (x);
5572
    }
5573
 
5574
  /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
5575
     whole thing fail.  */
5576
  if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
5577
    return src;
5578
  else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
5579
    return dest;
5580
  else
5581
    /* Convert this into a field assignment operation, if possible.  */
5582
    return make_field_assignment (x);
5583
}
5584
 
5585
/* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
5586
   result.  */
5587
 
5588
static rtx
5589
simplify_logical (rtx x)
5590
{
5591
  enum machine_mode mode = GET_MODE (x);
5592
  rtx op0 = XEXP (x, 0);
5593
  rtx op1 = XEXP (x, 1);
5594
  rtx reversed;
5595
 
5596
  switch (GET_CODE (x))
5597
    {
5598
    case AND:
5599
      /* Convert (A ^ B) & A to A & (~B) since the latter is often a single
5600
         insn (and may simplify more).  */
5601
      if (GET_CODE (op0) == XOR
5602
          && rtx_equal_p (XEXP (op0, 0), op1)
5603
          && ! side_effects_p (op1))
5604
        x = simplify_gen_binary (AND, mode,
5605
                                 simplify_gen_unary (NOT, mode,
5606
                                                     XEXP (op0, 1), mode),
5607
                                 op1);
5608
 
5609
      if (GET_CODE (op0) == XOR
5610
          && rtx_equal_p (XEXP (op0, 1), op1)
5611
          && ! side_effects_p (op1))
5612
        x = simplify_gen_binary (AND, mode,
5613
                                 simplify_gen_unary (NOT, mode,
5614
                                                     XEXP (op0, 0), mode),
5615
                                 op1);
5616
 
5617
      /* Similarly for (~(A ^ B)) & A.  */
5618
      if (GET_CODE (op0) == NOT
5619
          && GET_CODE (XEXP (op0, 0)) == XOR
5620
          && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1)
5621
          && ! side_effects_p (op1))
5622
        x = simplify_gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1);
5623
 
5624
      if (GET_CODE (op0) == NOT
5625
          && GET_CODE (XEXP (op0, 0)) == XOR
5626
          && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1)
5627
          && ! side_effects_p (op1))
5628
        x = simplify_gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1);
5629
 
5630
      /* We can call simplify_and_const_int only if we don't lose
5631
         any (sign) bits when converting INTVAL (op1) to
5632
         "unsigned HOST_WIDE_INT".  */
5633
      if (GET_CODE (op1) == CONST_INT
5634
          && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5635
              || INTVAL (op1) > 0))
5636
        {
5637
          x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
5638
 
5639
          /* If we have (ior (and (X C1) C2)) and the next restart would be
5640
             the last, simplify this by making C1 as small as possible
5641
             and then exit.  Only do this if C1 actually changes: for now
5642
             this only saves memory but, should this transformation be
5643
             moved to simplify-rtx.c, we'd risk unbounded recursion there.  */
5644
          if (GET_CODE (x) == IOR && GET_CODE (op0) == AND
5645
              && GET_CODE (XEXP (op0, 1)) == CONST_INT
5646
              && GET_CODE (op1) == CONST_INT
5647
              && (INTVAL (XEXP (op0, 1)) & INTVAL (op1)) != 0)
5648
            return simplify_gen_binary (IOR, mode,
5649
                                        simplify_gen_binary
5650
                                          (AND, mode, XEXP (op0, 0),
5651
                                           GEN_INT (INTVAL (XEXP (op0, 1))
5652
                                                    & ~INTVAL (op1))), op1);
5653
 
5654
          if (GET_CODE (x) != AND)
5655
            return x;
5656
 
5657
          op0 = XEXP (x, 0);
5658
          op1 = XEXP (x, 1);
5659
        }
5660
 
5661
      /* Convert (A | B) & A to A.  */
5662
      if (GET_CODE (op0) == IOR
5663
          && (rtx_equal_p (XEXP (op0, 0), op1)
5664
              || rtx_equal_p (XEXP (op0, 1), op1))
5665
          && ! side_effects_p (XEXP (op0, 0))
5666
          && ! side_effects_p (XEXP (op0, 1)))
5667
        return op1;
5668
 
5669
      /* If we have any of (and (ior A B) C) or (and (xor A B) C),
5670
         apply the distributive law and then the inverse distributive
5671
         law to see if things simplify.  */
5672
      if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
5673
        {
5674
          rtx result = distribute_and_simplify_rtx (x, 0);
5675
          if (result)
5676
            return result;
5677
        }
5678
      if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
5679
        {
5680
          rtx result = distribute_and_simplify_rtx (x, 1);
5681
          if (result)
5682
            return result;
5683
        }
5684
      break;
5685
 
5686
    case IOR:
5687
      /* (ior A C) is C if all bits of A that might be nonzero are on in C.  */
5688
      if (GET_CODE (op1) == CONST_INT
5689
          && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5690
          && (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0)
5691
        return op1;
5692
 
5693
      /* Convert (A & B) | A to A.  */
5694
      if (GET_CODE (op0) == AND
5695
          && (rtx_equal_p (XEXP (op0, 0), op1)
5696
              || rtx_equal_p (XEXP (op0, 1), op1))
5697
          && ! side_effects_p (XEXP (op0, 0))
5698
          && ! side_effects_p (XEXP (op0, 1)))
5699
        return op1;
5700
 
5701
      /* If we have (ior (and A B) C), apply the distributive law and then
5702
         the inverse distributive law to see if things simplify.  */
5703
 
5704
      if (GET_CODE (op0) == AND)
5705
        {
5706
          rtx result = distribute_and_simplify_rtx (x, 0);
5707
          if (result)
5708
            return result;
5709
        }
5710
 
5711
      if (GET_CODE (op1) == AND)
5712
        {
5713
          rtx result = distribute_and_simplify_rtx (x, 1);
5714
          if (result)
5715
            return result;
5716
        }
5717
 
5718
      /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
5719
         mode size to (rotate A CX).  */
5720
 
5721
      if (((GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
5722
           || (GET_CODE (op1) == ASHIFT && GET_CODE (op0) == LSHIFTRT))
5723
          && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
5724
          && GET_CODE (XEXP (op0, 1)) == CONST_INT
5725
          && GET_CODE (XEXP (op1, 1)) == CONST_INT
5726
          && (INTVAL (XEXP (op0, 1)) + INTVAL (XEXP (op1, 1))
5727
              == GET_MODE_BITSIZE (mode)))
5728
        return gen_rtx_ROTATE (mode, XEXP (op0, 0),
5729
                               (GET_CODE (op0) == ASHIFT
5730
                                ? XEXP (op0, 1) : XEXP (op1, 1)));
5731
 
5732
      /* If OP0 is (ashiftrt (plus ...) C), it might actually be
5733
         a (sign_extend (plus ...)).  If so, OP1 is a CONST_INT, and the PLUS
5734
         does not affect any of the bits in OP1, it can really be done
5735
         as a PLUS and we can associate.  We do this by seeing if OP1
5736
         can be safely shifted left C bits.  */
5737
      if (GET_CODE (op1) == CONST_INT && GET_CODE (op0) == ASHIFTRT
5738
          && GET_CODE (XEXP (op0, 0)) == PLUS
5739
          && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
5740
          && GET_CODE (XEXP (op0, 1)) == CONST_INT
5741
          && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
5742
        {
5743
          int count = INTVAL (XEXP (op0, 1));
5744
          HOST_WIDE_INT mask = INTVAL (op1) << count;
5745
 
5746
          if (mask >> count == INTVAL (op1)
5747
              && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0)
5748
            {
5749
              SUBST (XEXP (XEXP (op0, 0), 1),
5750
                     GEN_INT (INTVAL (XEXP (XEXP (op0, 0), 1)) | mask));
5751
              return op0;
5752
            }
5753
        }
5754
      break;
5755
 
5756
    case XOR:
5757
      /* If we are XORing two things that have no bits in common,
5758
         convert them into an IOR.  This helps to detect rotation encoded
5759
         using those methods and possibly other simplifications.  */
5760
 
5761
      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5762
          && (nonzero_bits (op0, mode)
5763
              & nonzero_bits (op1, mode)) == 0)
5764
        return (simplify_gen_binary (IOR, mode, op0, op1));
5765
 
5766
      /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
5767
         Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
5768
         (NOT y).  */
5769
      {
5770
        int num_negated = 0;
5771
 
5772
        if (GET_CODE (op0) == NOT)
5773
          num_negated++, op0 = XEXP (op0, 0);
5774
        if (GET_CODE (op1) == NOT)
5775
          num_negated++, op1 = XEXP (op1, 0);
5776
 
5777
        if (num_negated == 2)
5778
          {
5779
            SUBST (XEXP (x, 0), op0);
5780
            SUBST (XEXP (x, 1), op1);
5781
          }
5782
        else if (num_negated == 1)
5783
          return
5784
            simplify_gen_unary (NOT, mode,
5785
                                simplify_gen_binary (XOR, mode, op0, op1),
5786
                                mode);
5787
      }
5788
 
5789
      /* Convert (xor (and A B) B) to (and (not A) B).  The latter may
5790
         correspond to a machine insn or result in further simplifications
5791
         if B is a constant.  */
5792
 
5793
      if (GET_CODE (op0) == AND
5794
          && rtx_equal_p (XEXP (op0, 1), op1)
5795
          && ! side_effects_p (op1))
5796
        return simplify_gen_binary (AND, mode,
5797
                                    simplify_gen_unary (NOT, mode,
5798
                                                        XEXP (op0, 0), mode),
5799
                                    op1);
5800
 
5801
      else if (GET_CODE (op0) == AND
5802
               && rtx_equal_p (XEXP (op0, 0), op1)
5803
               && ! side_effects_p (op1))
5804
        return simplify_gen_binary (AND, mode,
5805
                                    simplify_gen_unary (NOT, mode,
5806
                                                        XEXP (op0, 1), mode),
5807
                                    op1);
5808
 
5809
      /* (xor (comparison foo bar) (const_int 1)) can become the reversed
5810
         comparison if STORE_FLAG_VALUE is 1.  */
5811
      if (STORE_FLAG_VALUE == 1
5812
          && op1 == const1_rtx
5813
          && COMPARISON_P (op0)
5814
          && (reversed = reversed_comparison (op0, mode)))
5815
        return reversed;
5816
 
5817
      /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
5818
         is (lt foo (const_int 0)), so we can perform the above
5819
         simplification if STORE_FLAG_VALUE is 1.  */
5820
 
5821
      if (STORE_FLAG_VALUE == 1
5822
          && op1 == const1_rtx
5823
          && GET_CODE (op0) == LSHIFTRT
5824
          && GET_CODE (XEXP (op0, 1)) == CONST_INT
5825
          && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1)
5826
        return gen_rtx_GE (mode, XEXP (op0, 0), const0_rtx);
5827
 
5828
      /* (xor (comparison foo bar) (const_int sign-bit))
5829
         when STORE_FLAG_VALUE is the sign bit.  */
5830
      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5831
          && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
5832
              == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
5833
          && op1 == const_true_rtx
5834
          && COMPARISON_P (op0)
5835
          && (reversed = reversed_comparison (op0, mode)))
5836
        return reversed;
5837
 
5838
      break;
5839
 
5840
    default:
5841
      gcc_unreachable ();
5842
    }
5843
 
5844
  return x;
5845
}
5846
 
5847
/* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
5848
   operations" because they can be replaced with two more basic operations.
5849
   ZERO_EXTEND is also considered "compound" because it can be replaced with
5850
   an AND operation, which is simpler, though only one operation.
5851
 
5852
   The function expand_compound_operation is called with an rtx expression
5853
   and will convert it to the appropriate shifts and AND operations,
5854
   simplifying at each stage.
5855
 
5856
   The function make_compound_operation is called to convert an expression
5857
   consisting of shifts and ANDs into the equivalent compound expression.
5858
   It is the inverse of this function, loosely speaking.  */
5859
 
5860
static rtx
5861
expand_compound_operation (rtx x)
5862
{
5863
  unsigned HOST_WIDE_INT pos = 0, len;
5864
  int unsignedp = 0;
5865
  unsigned int modewidth;
5866
  rtx tem;
5867
 
5868
  switch (GET_CODE (x))
5869
    {
5870
    case ZERO_EXTEND:
5871
      unsignedp = 1;
5872
    case SIGN_EXTEND:
5873
      /* We can't necessarily use a const_int for a multiword mode;
5874
         it depends on implicitly extending the value.
5875
         Since we don't know the right way to extend it,
5876
         we can't tell whether the implicit way is right.
5877
 
5878
         Even for a mode that is no wider than a const_int,
5879
         we can't win, because we need to sign extend one of its bits through
5880
         the rest of it, and we don't know which bit.  */
5881
      if (GET_CODE (XEXP (x, 0)) == CONST_INT)
5882
        return x;
5883
 
5884
      /* Return if (subreg:MODE FROM 0) is not a safe replacement for
5885
         (zero_extend:MODE FROM) or (sign_extend:MODE FROM).  It is for any MEM
5886
         because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
5887
         reloaded. If not for that, MEM's would very rarely be safe.
5888
 
5889
         Reject MODEs bigger than a word, because we might not be able
5890
         to reference a two-register group starting with an arbitrary register
5891
         (and currently gen_lowpart might crash for a SUBREG).  */
5892
 
5893
      if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD)
5894
        return x;
5895
 
5896
      /* Reject MODEs that aren't scalar integers because turning vector
5897
         or complex modes into shifts causes problems.  */
5898
 
5899
      if (! SCALAR_INT_MODE_P (GET_MODE (XEXP (x, 0))))
5900
        return x;
5901
 
5902
      len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
5903
      /* If the inner object has VOIDmode (the only way this can happen
5904
         is if it is an ASM_OPERANDS), we can't do anything since we don't
5905
         know how much masking to do.  */
5906
      if (len == 0)
5907
        return x;
5908
 
5909
      break;
5910
 
5911
    case ZERO_EXTRACT:
5912
      unsignedp = 1;
5913
 
5914
      /* ... fall through ...  */
5915
 
5916
    case SIGN_EXTRACT:
5917
      /* If the operand is a CLOBBER, just return it.  */
5918
      if (GET_CODE (XEXP (x, 0)) == CLOBBER)
5919
        return XEXP (x, 0);
5920
 
5921
      if (GET_CODE (XEXP (x, 1)) != CONST_INT
5922
          || GET_CODE (XEXP (x, 2)) != CONST_INT
5923
          || GET_MODE (XEXP (x, 0)) == VOIDmode)
5924
        return x;
5925
 
5926
      /* Reject MODEs that aren't scalar integers because turning vector
5927
         or complex modes into shifts causes problems.  */
5928
 
5929
      if (! SCALAR_INT_MODE_P (GET_MODE (XEXP (x, 0))))
5930
        return x;
5931
 
5932
      len = INTVAL (XEXP (x, 1));
5933
      pos = INTVAL (XEXP (x, 2));
5934
 
5935
      /* If this goes outside the object being extracted, replace the object
5936
         with a (use (mem ...)) construct that only combine understands
5937
         and is used only for this purpose.  */
5938
      if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
5939
        SUBST (XEXP (x, 0), gen_rtx_USE (GET_MODE (x), XEXP (x, 0)));
5940
 
5941
      if (BITS_BIG_ENDIAN)
5942
        pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
5943
 
5944
      break;
5945
 
5946
    default:
5947
      return x;
5948
    }
5949
  /* Convert sign extension to zero extension, if we know that the high
5950
     bit is not set, as this is easier to optimize.  It will be converted
5951
     back to cheaper alternative in make_extraction.  */
5952
  if (GET_CODE (x) == SIGN_EXTEND
5953
      && (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5954
          && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
5955
                & ~(((unsigned HOST_WIDE_INT)
5956
                      GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
5957
                     >> 1))
5958
               == 0)))
5959
    {
5960
      rtx temp = gen_rtx_ZERO_EXTEND (GET_MODE (x), XEXP (x, 0));
5961
      rtx temp2 = expand_compound_operation (temp);
5962
 
5963
      /* Make sure this is a profitable operation.  */
5964
      if (rtx_cost (x, SET) > rtx_cost (temp2, SET))
5965
       return temp2;
5966
      else if (rtx_cost (x, SET) > rtx_cost (temp, SET))
5967
       return temp;
5968
      else
5969
       return x;
5970
    }
5971
 
5972
  /* We can optimize some special cases of ZERO_EXTEND.  */
5973
  if (GET_CODE (x) == ZERO_EXTEND)
5974
    {
5975
      /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we
5976
         know that the last value didn't have any inappropriate bits
5977
         set.  */
5978
      if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5979
          && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5980
          && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5981
          && (nonzero_bits (XEXP (XEXP (x, 0), 0), GET_MODE (x))
5982
              & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5983
        return XEXP (XEXP (x, 0), 0);
5984
 
5985
      /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)).  */
5986
      if (GET_CODE (XEXP (x, 0)) == SUBREG
5987
          && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5988
          && subreg_lowpart_p (XEXP (x, 0))
5989
          && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5990
          && (nonzero_bits (SUBREG_REG (XEXP (x, 0)), GET_MODE (x))
5991
              & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5992
        return SUBREG_REG (XEXP (x, 0));
5993
 
5994
      /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
5995
         is a comparison and STORE_FLAG_VALUE permits.  This is like
5996
         the first case, but it works even when GET_MODE (x) is larger
5997
         than HOST_WIDE_INT.  */
5998
      if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5999
          && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
6000
          && COMPARISON_P (XEXP (XEXP (x, 0), 0))
6001
          && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
6002
              <= HOST_BITS_PER_WIDE_INT)
6003
          && ((HOST_WIDE_INT) STORE_FLAG_VALUE
6004
              & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
6005
        return XEXP (XEXP (x, 0), 0);
6006
 
6007
      /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)).  */
6008
      if (GET_CODE (XEXP (x, 0)) == SUBREG
6009
          && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
6010
          && subreg_lowpart_p (XEXP (x, 0))
6011
          && COMPARISON_P (SUBREG_REG (XEXP (x, 0)))
6012
          && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
6013
              <= HOST_BITS_PER_WIDE_INT)
6014
          && ((HOST_WIDE_INT) STORE_FLAG_VALUE
6015
              & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
6016
        return SUBREG_REG (XEXP (x, 0));
6017
 
6018
    }
6019
 
6020
  /* If we reach here, we want to return a pair of shifts.  The inner
6021
     shift is a left shift of BITSIZE - POS - LEN bits.  The outer
6022
     shift is a right shift of BITSIZE - LEN bits.  It is arithmetic or
6023
     logical depending on the value of UNSIGNEDP.
6024
 
6025
     If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
6026
     converted into an AND of a shift.
6027
 
6028
     We must check for the case where the left shift would have a negative
6029
     count.  This can happen in a case like (x >> 31) & 255 on machines
6030
     that can't shift by a constant.  On those machines, we would first
6031
     combine the shift with the AND to produce a variable-position
6032
     extraction.  Then the constant of 31 would be substituted in to produce
6033
     a such a position.  */
6034
 
6035
  modewidth = GET_MODE_BITSIZE (GET_MODE (x));
6036
  if (modewidth + len >= pos)
6037
    tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
6038
                                GET_MODE (x),
6039
                                simplify_shift_const (NULL_RTX, ASHIFT,
6040
                                                      GET_MODE (x),
6041
                                                      XEXP (x, 0),
6042
                                                      modewidth - pos - len),
6043
                                modewidth - len);
6044
 
6045
  else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
6046
    tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
6047
                                  simplify_shift_const (NULL_RTX, LSHIFTRT,
6048
                                                        GET_MODE (x),
6049
                                                        XEXP (x, 0), pos),
6050
                                  ((HOST_WIDE_INT) 1 << len) - 1);
6051
  else
6052
    /* Any other cases we can't handle.  */
6053
    return x;
6054
 
6055
  /* If we couldn't do this for some reason, return the original
6056
     expression.  */
6057
  if (GET_CODE (tem) == CLOBBER)
6058
    return x;
6059
 
6060
  return tem;
6061
}
6062
 
6063
/* X is a SET which contains an assignment of one object into
6064
   a part of another (such as a bit-field assignment, STRICT_LOW_PART,
6065
   or certain SUBREGS). If possible, convert it into a series of
6066
   logical operations.
6067
 
6068
   We half-heartedly support variable positions, but do not at all
6069
   support variable lengths.  */
6070
 
6071
static rtx
6072
expand_field_assignment (rtx x)
6073
{
6074
  rtx inner;
6075
  rtx pos;                      /* Always counts from low bit.  */
6076
  int len;
6077
  rtx mask, cleared, masked;
6078
  enum machine_mode compute_mode;
6079
 
6080
  /* Loop until we find something we can't simplify.  */
6081
  while (1)
6082
    {
6083
      if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
6084
          && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
6085
        {
6086
          inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
6087
          len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
6088
          pos = GEN_INT (subreg_lsb (XEXP (SET_DEST (x), 0)));
6089
        }
6090
      else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
6091
               && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
6092
        {
6093
          inner = XEXP (SET_DEST (x), 0);
6094
          len = INTVAL (XEXP (SET_DEST (x), 1));
6095
          pos = XEXP (SET_DEST (x), 2);
6096
 
6097
          /* If the position is constant and spans the width of INNER,
6098
             surround INNER  with a USE to indicate this.  */
6099
          if (GET_CODE (pos) == CONST_INT
6100
              && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
6101
            inner = gen_rtx_USE (GET_MODE (SET_DEST (x)), inner);
6102
 
6103
          if (BITS_BIG_ENDIAN)
6104
            {
6105
              if (GET_CODE (pos) == CONST_INT)
6106
                pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
6107
                               - INTVAL (pos));
6108
              else if (GET_CODE (pos) == MINUS
6109
                       && GET_CODE (XEXP (pos, 1)) == CONST_INT
6110
                       && (INTVAL (XEXP (pos, 1))
6111
                           == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
6112
                /* If position is ADJUST - X, new position is X.  */
6113
                pos = XEXP (pos, 0);
6114
              else
6115
                pos = simplify_gen_binary (MINUS, GET_MODE (pos),
6116
                                           GEN_INT (GET_MODE_BITSIZE (
6117
                                                    GET_MODE (inner))
6118
                                                    - len),
6119
                                           pos);
6120
            }
6121
        }
6122
 
6123
      /* A SUBREG between two modes that occupy the same numbers of words
6124
         can be done by moving the SUBREG to the source.  */
6125
      else if (GET_CODE (SET_DEST (x)) == SUBREG
6126
               /* We need SUBREGs to compute nonzero_bits properly.  */
6127
               && nonzero_sign_valid
6128
               && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
6129
                     + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
6130
                   == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
6131
                        + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
6132
        {
6133
          x = gen_rtx_SET (VOIDmode, SUBREG_REG (SET_DEST (x)),
6134
                           gen_lowpart
6135
                           (GET_MODE (SUBREG_REG (SET_DEST (x))),
6136
                            SET_SRC (x)));
6137
          continue;
6138
        }
6139
      else
6140
        break;
6141
 
6142
      while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
6143
        inner = SUBREG_REG (inner);
6144
 
6145
      compute_mode = GET_MODE (inner);
6146
 
6147
      /* Don't attempt bitwise arithmetic on non scalar integer modes.  */
6148
      if (! SCALAR_INT_MODE_P (compute_mode))
6149
        {
6150
          enum machine_mode imode;
6151
 
6152
          /* Don't do anything for vector or complex integral types.  */
6153
          if (! FLOAT_MODE_P (compute_mode))
6154
            break;
6155
 
6156
          /* Try to find an integral mode to pun with.  */
6157
          imode = mode_for_size (GET_MODE_BITSIZE (compute_mode), MODE_INT, 0);
6158
          if (imode == BLKmode)
6159
            break;
6160
 
6161
          compute_mode = imode;
6162
          inner = gen_lowpart (imode, inner);
6163
        }
6164
 
6165
      /* Compute a mask of LEN bits, if we can do this on the host machine.  */
6166
      if (len >= HOST_BITS_PER_WIDE_INT)
6167
        break;
6168
 
6169
      /* Now compute the equivalent expression.  Make a copy of INNER
6170
         for the SET_DEST in case it is a MEM into which we will substitute;
6171
         we don't want shared RTL in that case.  */
6172
      mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
6173
      cleared = simplify_gen_binary (AND, compute_mode,
6174
                                     simplify_gen_unary (NOT, compute_mode,
6175
                                       simplify_gen_binary (ASHIFT,
6176
                                                            compute_mode,
6177
                                                            mask, pos),
6178
                                       compute_mode),
6179
                                     inner);
6180
      masked = simplify_gen_binary (ASHIFT, compute_mode,
6181
                                    simplify_gen_binary (
6182
                                      AND, compute_mode,
6183
                                      gen_lowpart (compute_mode, SET_SRC (x)),
6184
                                      mask),
6185
                                    pos);
6186
 
6187
      x = gen_rtx_SET (VOIDmode, copy_rtx (inner),
6188
                       simplify_gen_binary (IOR, compute_mode,
6189
                                            cleared, masked));
6190
    }
6191
 
6192
  return x;
6193
}
6194
 
6195
/* Return an RTX for a reference to LEN bits of INNER.  If POS_RTX is nonzero,
6196
   it is an RTX that represents a variable starting position; otherwise,
6197
   POS is the (constant) starting bit position (counted from the LSB).
6198
 
6199
   INNER may be a USE.  This will occur when we started with a bitfield
6200
   that went outside the boundary of the object in memory, which is
6201
   allowed on most machines.  To isolate this case, we produce a USE
6202
   whose mode is wide enough and surround the MEM with it.  The only
6203
   code that understands the USE is this routine.  If it is not removed,
6204
   it will cause the resulting insn not to match.
6205
 
6206
   UNSIGNEDP is nonzero for an unsigned reference and zero for a
6207
   signed reference.
6208
 
6209
   IN_DEST is nonzero if this is a reference in the destination of a
6210
   SET.  This is used when a ZERO_ or SIGN_EXTRACT isn't needed.  If nonzero,
6211
   a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
6212
   be used.
6213
 
6214
   IN_COMPARE is nonzero if we are in a COMPARE.  This means that a
6215
   ZERO_EXTRACT should be built even for bits starting at bit 0.
6216
 
6217
   MODE is the desired mode of the result (if IN_DEST == 0).
6218
 
6219
   The result is an RTX for the extraction or NULL_RTX if the target
6220
   can't handle it.  */
6221
 
6222
static rtx
6223
make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos,
6224
                 rtx pos_rtx, unsigned HOST_WIDE_INT len, int unsignedp,
6225
                 int in_dest, int in_compare)
6226
{
6227
  /* This mode describes the size of the storage area
6228
     to fetch the overall value from.  Within that, we
6229
     ignore the POS lowest bits, etc.  */
6230
  enum machine_mode is_mode = GET_MODE (inner);
6231
  enum machine_mode inner_mode;
6232
  enum machine_mode wanted_inner_mode = byte_mode;
6233
  enum machine_mode wanted_inner_reg_mode = word_mode;
6234
  enum machine_mode pos_mode = word_mode;
6235
  enum machine_mode extraction_mode = word_mode;
6236
  enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
6237
  int spans_byte = 0;
6238
  rtx new = 0;
6239
  rtx orig_pos_rtx = pos_rtx;
6240
  HOST_WIDE_INT orig_pos;
6241
 
6242
  /* Get some information about INNER and get the innermost object.  */
6243
  if (GET_CODE (inner) == USE)
6244
    /* (use:SI (mem:QI foo)) stands for (mem:SI foo).  */
6245
    /* We don't need to adjust the position because we set up the USE
6246
       to pretend that it was a full-word object.  */
6247
    spans_byte = 1, inner = XEXP (inner, 0);
6248
  else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
6249
    {
6250
      /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
6251
         consider just the QI as the memory to extract from.
6252
         The subreg adds or removes high bits; its mode is
6253
         irrelevant to the meaning of this extraction,
6254
         since POS and LEN count from the lsb.  */
6255
      if (MEM_P (SUBREG_REG (inner)))
6256
        is_mode = GET_MODE (SUBREG_REG (inner));
6257
      inner = SUBREG_REG (inner);
6258
    }
6259
  else if (GET_CODE (inner) == ASHIFT
6260
           && GET_CODE (XEXP (inner, 1)) == CONST_INT
6261
           && pos_rtx == 0 && pos == 0
6262
           && len > (unsigned HOST_WIDE_INT) INTVAL (XEXP (inner, 1)))
6263
    {
6264
      /* We're extracting the least significant bits of an rtx
6265
         (ashift X (const_int C)), where LEN > C.  Extract the
6266
         least significant (LEN - C) bits of X, giving an rtx
6267
         whose mode is MODE, then shift it left C times.  */
6268
      new = make_extraction (mode, XEXP (inner, 0),
6269
                             0, 0, len - INTVAL (XEXP (inner, 1)),
6270
                             unsignedp, in_dest, in_compare);
6271
      if (new != 0)
6272
        return gen_rtx_ASHIFT (mode, new, XEXP (inner, 1));
6273
    }
6274
 
6275
  inner_mode = GET_MODE (inner);
6276
 
6277
  if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
6278
    pos = INTVAL (pos_rtx), pos_rtx = 0;
6279
 
6280
  /* See if this can be done without an extraction.  We never can if the
6281
     width of the field is not the same as that of some integer mode. For
6282
     registers, we can only avoid the extraction if the position is at the
6283
     low-order bit and this is either not in the destination or we have the
6284
     appropriate STRICT_LOW_PART operation available.
6285
 
6286
     For MEM, we can avoid an extract if the field starts on an appropriate
6287
     boundary and we can change the mode of the memory reference.  However,
6288
     we cannot directly access the MEM if we have a USE and the underlying
6289
     MEM is not TMODE.  This combination means that MEM was being used in a
6290
     context where bits outside its mode were being referenced; that is only
6291
     valid in bit-field insns.  */
6292
 
6293
  if (tmode != BLKmode
6294
      && ! (spans_byte && inner_mode != tmode)
6295
      && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0
6296
           && !MEM_P (inner)
6297
           && (! in_dest
6298
               || (REG_P (inner)
6299
                   && have_insn_for (STRICT_LOW_PART, tmode))))
6300
          || (MEM_P (inner) && pos_rtx == 0
6301
              && (pos
6302
                  % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
6303
                     : BITS_PER_UNIT)) == 0
6304
              /* We can't do this if we are widening INNER_MODE (it
6305
                 may not be aligned, for one thing).  */
6306
              && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
6307
              && (inner_mode == tmode
6308
                  || (! mode_dependent_address_p (XEXP (inner, 0))
6309
                      && ! MEM_VOLATILE_P (inner))))))
6310
    {
6311
      /* If INNER is a MEM, make a new MEM that encompasses just the desired
6312
         field.  If the original and current mode are the same, we need not
6313
         adjust the offset.  Otherwise, we do if bytes big endian.
6314
 
6315
         If INNER is not a MEM, get a piece consisting of just the field
6316
         of interest (in this case POS % BITS_PER_WORD must be 0).  */
6317
 
6318
      if (MEM_P (inner))
6319
        {
6320
          HOST_WIDE_INT offset;
6321
 
6322
          /* POS counts from lsb, but make OFFSET count in memory order.  */
6323
          if (BYTES_BIG_ENDIAN)
6324
            offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
6325
          else
6326
            offset = pos / BITS_PER_UNIT;
6327
 
6328
          new = adjust_address_nv (inner, tmode, offset);
6329
        }
6330
      else if (REG_P (inner))
6331
        {
6332
          if (tmode != inner_mode)
6333
            {
6334
              /* We can't call gen_lowpart in a DEST since we
6335
                 always want a SUBREG (see below) and it would sometimes
6336
                 return a new hard register.  */
6337
              if (pos || in_dest)
6338
                {
6339
                  HOST_WIDE_INT final_word = pos / BITS_PER_WORD;
6340
 
6341
                  if (WORDS_BIG_ENDIAN
6342
                      && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD)
6343
                    final_word = ((GET_MODE_SIZE (inner_mode)
6344
                                   - GET_MODE_SIZE (tmode))
6345
                                  / UNITS_PER_WORD) - final_word;
6346
 
6347
                  final_word *= UNITS_PER_WORD;
6348
                  if (BYTES_BIG_ENDIAN &&
6349
                      GET_MODE_SIZE (inner_mode) > GET_MODE_SIZE (tmode))
6350
                    final_word += (GET_MODE_SIZE (inner_mode)
6351
                                   - GET_MODE_SIZE (tmode)) % UNITS_PER_WORD;
6352
 
6353
                  /* Avoid creating invalid subregs, for example when
6354
                     simplifying (x>>32)&255.  */
6355
                  if (!validate_subreg (tmode, inner_mode, inner, final_word))
6356
                    return NULL_RTX;
6357
 
6358
                  new = gen_rtx_SUBREG (tmode, inner, final_word);
6359
                }
6360
              else
6361
                new = gen_lowpart (tmode, inner);
6362
            }
6363
          else
6364
            new = inner;
6365
        }
6366
      else
6367
        new = force_to_mode (inner, tmode,
6368
                             len >= HOST_BITS_PER_WIDE_INT
6369
                             ? ~(unsigned HOST_WIDE_INT) 0
6370
                             : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
6371
                             NULL_RTX, 0);
6372
 
6373
      /* If this extraction is going into the destination of a SET,
6374
         make a STRICT_LOW_PART unless we made a MEM.  */
6375
 
6376
      if (in_dest)
6377
        return (MEM_P (new) ? new
6378
                : (GET_CODE (new) != SUBREG
6379
                   ? gen_rtx_CLOBBER (tmode, const0_rtx)
6380
                   : gen_rtx_STRICT_LOW_PART (VOIDmode, new)));
6381
 
6382
      if (mode == tmode)
6383
        return new;
6384
 
6385
      if (GET_CODE (new) == CONST_INT)
6386
        return gen_int_mode (INTVAL (new), mode);
6387
 
6388
      /* If we know that no extraneous bits are set, and that the high
6389
         bit is not set, convert the extraction to the cheaper of
6390
         sign and zero extension, that are equivalent in these cases.  */
6391
      if (flag_expensive_optimizations
6392
          && (GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT
6393
              && ((nonzero_bits (new, tmode)
6394
                   & ~(((unsigned HOST_WIDE_INT)
6395
                        GET_MODE_MASK (tmode))
6396
                       >> 1))
6397
                  == 0)))
6398
        {
6399
          rtx temp = gen_rtx_ZERO_EXTEND (mode, new);
6400
          rtx temp1 = gen_rtx_SIGN_EXTEND (mode, new);
6401
 
6402
          /* Prefer ZERO_EXTENSION, since it gives more information to
6403
             backends.  */
6404
          if (rtx_cost (temp, SET) <= rtx_cost (temp1, SET))
6405
            return temp;
6406
          return temp1;
6407
        }
6408
 
6409
      /* Otherwise, sign- or zero-extend unless we already are in the
6410
         proper mode.  */
6411
 
6412
      return (gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
6413
                             mode, new));
6414
    }
6415
 
6416
  /* Unless this is a COMPARE or we have a funny memory reference,
6417
     don't do anything with zero-extending field extracts starting at
6418
     the low-order bit since they are simple AND operations.  */
6419
  if (pos_rtx == 0 && pos == 0 && ! in_dest
6420
      && ! in_compare && ! spans_byte && unsignedp)
6421
    return 0;
6422
 
6423
  /* Unless we are allowed to span bytes or INNER is not MEM, reject this if
6424
     we would be spanning bytes or if the position is not a constant and the
6425
     length is not 1.  In all other cases, we would only be going outside
6426
     our object in cases when an original shift would have been
6427
     undefined.  */
6428
  if (! spans_byte && MEM_P (inner)
6429
      && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
6430
          || (pos_rtx != 0 && len != 1)))
6431
    return 0;
6432
 
6433
  /* Get the mode to use should INNER not be a MEM, the mode for the position,
6434
     and the mode for the result.  */
6435
  if (in_dest && mode_for_extraction (EP_insv, -1) != MAX_MACHINE_MODE)
6436
    {
6437
      wanted_inner_reg_mode = mode_for_extraction (EP_insv, 0);
6438
      pos_mode = mode_for_extraction (EP_insv, 2);
6439
      extraction_mode = mode_for_extraction (EP_insv, 3);
6440
    }
6441
 
6442
  if (! in_dest && unsignedp
6443
      && mode_for_extraction (EP_extzv, -1) != MAX_MACHINE_MODE)
6444
    {
6445
      wanted_inner_reg_mode = mode_for_extraction (EP_extzv, 1);
6446
      pos_mode = mode_for_extraction (EP_extzv, 3);
6447
      extraction_mode = mode_for_extraction (EP_extzv, 0);
6448
    }
6449
 
6450
  if (! in_dest && ! unsignedp
6451
      && mode_for_extraction (EP_extv, -1) != MAX_MACHINE_MODE)
6452
    {
6453
      wanted_inner_reg_mode = mode_for_extraction (EP_extv, 1);
6454
      pos_mode = mode_for_extraction (EP_extv, 3);
6455
      extraction_mode = mode_for_extraction (EP_extv, 0);
6456
    }
6457
 
6458
  /* Never narrow an object, since that might not be safe.  */
6459
 
6460
  if (mode != VOIDmode
6461
      && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
6462
    extraction_mode = mode;
6463
 
6464
  if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
6465
      && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
6466
    pos_mode = GET_MODE (pos_rtx);
6467
 
6468
  /* If this is not from memory, the desired mode is wanted_inner_reg_mode;
6469
     if we have to change the mode of memory and cannot, the desired mode is
6470
     EXTRACTION_MODE.  */
6471
  if (!MEM_P (inner))
6472
    wanted_inner_mode = wanted_inner_reg_mode;
6473
  else if (inner_mode != wanted_inner_mode
6474
           && (mode_dependent_address_p (XEXP (inner, 0))
6475
               || MEM_VOLATILE_P (inner)))
6476
    wanted_inner_mode = extraction_mode;
6477
 
6478
  orig_pos = pos;
6479
 
6480
  if (BITS_BIG_ENDIAN)
6481
    {
6482
      /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to
6483
         BITS_BIG_ENDIAN style.  If position is constant, compute new
6484
         position.  Otherwise, build subtraction.
6485
         Note that POS is relative to the mode of the original argument.
6486
         If it's a MEM we need to recompute POS relative to that.
6487
         However, if we're extracting from (or inserting into) a register,
6488
         we want to recompute POS relative to wanted_inner_mode.  */
6489
      int width = (MEM_P (inner)
6490
                   ? GET_MODE_BITSIZE (is_mode)
6491
                   : GET_MODE_BITSIZE (wanted_inner_mode));
6492
 
6493
      if (pos_rtx == 0)
6494
        pos = width - len - pos;
6495
      else
6496
        pos_rtx
6497
          = gen_rtx_MINUS (GET_MODE (pos_rtx), GEN_INT (width - len), pos_rtx);
6498
      /* POS may be less than 0 now, but we check for that below.
6499
         Note that it can only be less than 0 if !MEM_P (inner).  */
6500
    }
6501
 
6502
  /* If INNER has a wider mode, make it smaller.  If this is a constant
6503
     extract, try to adjust the byte to point to the byte containing
6504
     the value.  */
6505
  if (wanted_inner_mode != VOIDmode
6506
      && GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode)
6507
      && ((MEM_P (inner)
6508
           && (inner_mode == wanted_inner_mode
6509
               || (! mode_dependent_address_p (XEXP (inner, 0))
6510
                   && ! MEM_VOLATILE_P (inner))))))
6511
    {
6512
      int offset = 0;
6513
 
6514
      /* The computations below will be correct if the machine is big
6515
         endian in both bits and bytes or little endian in bits and bytes.
6516
         If it is mixed, we must adjust.  */
6517
 
6518
      /* If bytes are big endian and we had a paradoxical SUBREG, we must
6519
         adjust OFFSET to compensate.  */
6520
      if (BYTES_BIG_ENDIAN
6521
          && ! spans_byte
6522
          && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
6523
        offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
6524
 
6525
      /* If this is a constant position, we can move to the desired byte.
6526
         Be careful not to go beyond the original object and maintain the
6527
         natural alignment of the memory.  */
6528
      if (pos_rtx == 0)
6529
        {
6530
          enum machine_mode bfmode = smallest_mode_for_size (len, MODE_INT);
6531
          offset += (pos / GET_MODE_BITSIZE (bfmode)) * GET_MODE_SIZE (bfmode);
6532
          pos %= GET_MODE_BITSIZE (bfmode);
6533
        }
6534
 
6535
      if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
6536
          && ! spans_byte
6537
          && is_mode != wanted_inner_mode)
6538
        offset = (GET_MODE_SIZE (is_mode)
6539
                  - GET_MODE_SIZE (wanted_inner_mode) - offset);
6540
 
6541
      if (offset != 0 || inner_mode != wanted_inner_mode)
6542
        inner = adjust_address_nv (inner, wanted_inner_mode, offset);
6543
    }
6544
 
6545
  /* If INNER is not memory, we can always get it into the proper mode.  If we
6546
     are changing its mode, POS must be a constant and smaller than the size
6547
     of the new mode.  */
6548
  else if (!MEM_P (inner))
6549
    {
6550
      if (GET_MODE (inner) != wanted_inner_mode
6551
          && (pos_rtx != 0
6552
              || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode)))
6553
        return 0;
6554
 
6555
      if (orig_pos < 0)
6556
        return 0;
6557
 
6558
      inner = force_to_mode (inner, wanted_inner_mode,
6559
                             pos_rtx
6560
                             || len + orig_pos >= HOST_BITS_PER_WIDE_INT
6561
                             ? ~(unsigned HOST_WIDE_INT) 0
6562
                             : ((((unsigned HOST_WIDE_INT) 1 << len) - 1)
6563
                                << orig_pos),
6564
                             NULL_RTX, 0);
6565
    }
6566
 
6567
  /* Adjust mode of POS_RTX, if needed.  If we want a wider mode, we
6568
     have to zero extend.  Otherwise, we can just use a SUBREG.  */
6569
  if (pos_rtx != 0
6570
      && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
6571
    {
6572
      rtx temp = gen_rtx_ZERO_EXTEND (pos_mode, pos_rtx);
6573
 
6574
      /* If we know that no extraneous bits are set, and that the high
6575
         bit is not set, convert extraction to cheaper one - either
6576
         SIGN_EXTENSION or ZERO_EXTENSION, that are equivalent in these
6577
         cases.  */
6578
      if (flag_expensive_optimizations
6579
          && (GET_MODE_BITSIZE (GET_MODE (pos_rtx)) <= HOST_BITS_PER_WIDE_INT
6580
              && ((nonzero_bits (pos_rtx, GET_MODE (pos_rtx))
6581
                   & ~(((unsigned HOST_WIDE_INT)
6582
                        GET_MODE_MASK (GET_MODE (pos_rtx)))
6583
                       >> 1))
6584
                  == 0)))
6585
        {
6586
          rtx temp1 = gen_rtx_SIGN_EXTEND (pos_mode, pos_rtx);
6587
 
6588
          /* Prefer ZERO_EXTENSION, since it gives more information to
6589
             backends.  */
6590
          if (rtx_cost (temp1, SET) < rtx_cost (temp, SET))
6591
            temp = temp1;
6592
        }
6593
      pos_rtx = temp;
6594
    }
6595
  else if (pos_rtx != 0
6596
           && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
6597
    pos_rtx = gen_lowpart (pos_mode, pos_rtx);
6598
 
6599
  /* Make POS_RTX unless we already have it and it is correct.  If we don't
6600
     have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
6601
     be a CONST_INT.  */
6602
  if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
6603
    pos_rtx = orig_pos_rtx;
6604
 
6605
  else if (pos_rtx == 0)
6606
    pos_rtx = GEN_INT (pos);
6607
 
6608
  /* Make the required operation.  See if we can use existing rtx.  */
6609
  new = gen_rtx_fmt_eee (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
6610
                         extraction_mode, inner, GEN_INT (len), pos_rtx);
6611
  if (! in_dest)
6612
    new = gen_lowpart (mode, new);
6613
 
6614
  return new;
6615
}
6616
 
6617
/* See if X contains an ASHIFT of COUNT or more bits that can be commuted
6618
   with any other operations in X.  Return X without that shift if so.  */
6619
 
6620
static rtx
6621
extract_left_shift (rtx x, int count)
6622
{
6623
  enum rtx_code code = GET_CODE (x);
6624
  enum machine_mode mode = GET_MODE (x);
6625
  rtx tem;
6626
 
6627
  switch (code)
6628
    {
6629
    case ASHIFT:
6630
      /* This is the shift itself.  If it is wide enough, we will return
6631
         either the value being shifted if the shift count is equal to
6632
         COUNT or a shift for the difference.  */
6633
      if (GET_CODE (XEXP (x, 1)) == CONST_INT
6634
          && INTVAL (XEXP (x, 1)) >= count)
6635
        return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
6636
                                     INTVAL (XEXP (x, 1)) - count);
6637
      break;
6638
 
6639
    case NEG:  case NOT:
6640
      if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0)
6641
        return simplify_gen_unary (code, mode, tem, mode);
6642
 
6643
      break;
6644
 
6645
    case PLUS:  case IOR:  case XOR:  case AND:
6646
      /* If we can safely shift this constant and we find the inner shift,
6647
         make a new operation.  */
6648
      if (GET_CODE (XEXP (x, 1)) == CONST_INT
6649
          && (INTVAL (XEXP (x, 1)) & ((((HOST_WIDE_INT) 1 << count)) - 1)) == 0
6650
          && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
6651
        return simplify_gen_binary (code, mode, tem,
6652
                                    GEN_INT (INTVAL (XEXP (x, 1)) >> count));
6653
 
6654
      break;
6655
 
6656
    default:
6657
      break;
6658
    }
6659
 
6660
  return 0;
6661
}
6662
 
6663
/* Look at the expression rooted at X.  Look for expressions
6664
   equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
6665
   Form these expressions.
6666
 
6667
   Return the new rtx, usually just X.
6668
 
6669
   Also, for machines like the VAX that don't have logical shift insns,
6670
   try to convert logical to arithmetic shift operations in cases where
6671
   they are equivalent.  This undoes the canonicalizations to logical
6672
   shifts done elsewhere.
6673
 
6674
   We try, as much as possible, to re-use rtl expressions to save memory.
6675
 
6676
   IN_CODE says what kind of expression we are processing.  Normally, it is
6677
   SET.  In a memory address (inside a MEM, PLUS or minus, the latter two
6678
   being kludges), it is MEM.  When processing the arguments of a comparison
6679
   or a COMPARE against zero, it is COMPARE.  */
6680
 
6681
static rtx
6682
make_compound_operation (rtx x, enum rtx_code in_code)
6683
{
6684
  enum rtx_code code = GET_CODE (x);
6685
  enum machine_mode mode = GET_MODE (x);
6686
  int mode_width = GET_MODE_BITSIZE (mode);
6687
  rtx rhs, lhs;
6688
  enum rtx_code next_code;
6689
  int i;
6690
  rtx new = 0;
6691
  rtx tem;
6692
  const char *fmt;
6693
 
6694
  /* Select the code to be used in recursive calls.  Once we are inside an
6695
     address, we stay there.  If we have a comparison, set to COMPARE,
6696
     but once inside, go back to our default of SET.  */
6697
 
6698
  next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
6699
               : ((code == COMPARE || COMPARISON_P (x))
6700
                  && XEXP (x, 1) == const0_rtx) ? COMPARE
6701
               : in_code == COMPARE ? SET : in_code);
6702
 
6703
  /* Process depending on the code of this operation.  If NEW is set
6704
     nonzero, it will be returned.  */
6705
 
6706
  switch (code)
6707
    {
6708
    case ASHIFT:
6709
      /* Convert shifts by constants into multiplications if inside
6710
         an address.  */
6711
      if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
6712
          && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
6713
          && INTVAL (XEXP (x, 1)) >= 0)
6714
        {
6715
          new = make_compound_operation (XEXP (x, 0), next_code);
6716
          new = gen_rtx_MULT (mode, new,
6717
                              GEN_INT ((HOST_WIDE_INT) 1
6718
                                       << INTVAL (XEXP (x, 1))));
6719
        }
6720
      break;
6721
 
6722
    case AND:
6723
      /* If the second operand is not a constant, we can't do anything
6724
         with it.  */
6725
      if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6726
        break;
6727
 
6728
      /* If the constant is a power of two minus one and the first operand
6729
         is a logical right shift, make an extraction.  */
6730
      if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6731
          && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6732
        {
6733
          new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6734
          new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
6735
                                 0, in_code == COMPARE);
6736
        }
6737
 
6738
      /* Same as previous, but for (subreg (lshiftrt ...)) in first op.  */
6739
      else if (GET_CODE (XEXP (x, 0)) == SUBREG
6740
               && subreg_lowpart_p (XEXP (x, 0))
6741
               && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
6742
               && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6743
        {
6744
          new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
6745
                                         next_code);
6746
          new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0,
6747
                                 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
6748
                                 0, in_code == COMPARE);
6749
        }
6750
      /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)).  */
6751
      else if ((GET_CODE (XEXP (x, 0)) == XOR
6752
                || GET_CODE (XEXP (x, 0)) == IOR)
6753
               && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
6754
               && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
6755
               && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6756
        {
6757
          /* Apply the distributive law, and then try to make extractions.  */
6758
          new = gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)), mode,
6759
                                gen_rtx_AND (mode, XEXP (XEXP (x, 0), 0),
6760
                                             XEXP (x, 1)),
6761
                                gen_rtx_AND (mode, XEXP (XEXP (x, 0), 1),
6762
                                             XEXP (x, 1)));
6763
          new = make_compound_operation (new, in_code);
6764
        }
6765
 
6766
      /* If we are have (and (rotate X C) M) and C is larger than the number
6767
         of bits in M, this is an extraction.  */
6768
 
6769
      else if (GET_CODE (XEXP (x, 0)) == ROTATE
6770
               && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6771
               && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
6772
               && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
6773
        {
6774
          new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6775
          new = make_extraction (mode, new,
6776
                                 (GET_MODE_BITSIZE (mode)
6777
                                  - INTVAL (XEXP (XEXP (x, 0), 1))),
6778
                                 NULL_RTX, i, 1, 0, in_code == COMPARE);
6779
        }
6780
 
6781
      /* On machines without logical shifts, if the operand of the AND is
6782
         a logical shift and our mask turns off all the propagated sign
6783
         bits, we can replace the logical shift with an arithmetic shift.  */
6784
      else if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6785
               && !have_insn_for (LSHIFTRT, mode)
6786
               && have_insn_for (ASHIFTRT, mode)
6787
               && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6788
               && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6789
               && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6790
               && mode_width <= HOST_BITS_PER_WIDE_INT)
6791
        {
6792
          unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
6793
 
6794
          mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
6795
          if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
6796
            SUBST (XEXP (x, 0),
6797
                   gen_rtx_ASHIFTRT (mode,
6798
                                     make_compound_operation
6799
                                     (XEXP (XEXP (x, 0), 0), next_code),
6800
                                     XEXP (XEXP (x, 0), 1)));
6801
        }
6802
 
6803
      /* If the constant is one less than a power of two, this might be
6804
         representable by an extraction even if no shift is present.
6805
         If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
6806
         we are in a COMPARE.  */
6807
      else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6808
        new = make_extraction (mode,
6809
                               make_compound_operation (XEXP (x, 0),
6810
                                                        next_code),
6811
                               0, NULL_RTX, i, 1, 0, in_code == COMPARE);
6812
 
6813
      /* If we are in a comparison and this is an AND with a power of two,
6814
         convert this into the appropriate bit extract.  */
6815
      else if (in_code == COMPARE
6816
               && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
6817
        new = make_extraction (mode,
6818
                               make_compound_operation (XEXP (x, 0),
6819
                                                        next_code),
6820
                               i, NULL_RTX, 1, 1, 0, 1);
6821
 
6822
      break;
6823
 
6824
    case LSHIFTRT:
6825
      /* If the sign bit is known to be zero, replace this with an
6826
         arithmetic shift.  */
6827
      if (have_insn_for (ASHIFTRT, mode)
6828
          && ! have_insn_for (LSHIFTRT, mode)
6829
          && mode_width <= HOST_BITS_PER_WIDE_INT
6830
          && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
6831
        {
6832
          new = gen_rtx_ASHIFTRT (mode,
6833
                                  make_compound_operation (XEXP (x, 0),
6834
                                                           next_code),
6835
                                  XEXP (x, 1));
6836
          break;
6837
        }
6838
 
6839
      /* ... fall through ...  */
6840
 
6841
    case ASHIFTRT:
6842
      lhs = XEXP (x, 0);
6843
      rhs = XEXP (x, 1);
6844
 
6845
      /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
6846
         this is a SIGN_EXTRACT.  */
6847
      if (GET_CODE (rhs) == CONST_INT
6848
          && GET_CODE (lhs) == ASHIFT
6849
          && GET_CODE (XEXP (lhs, 1)) == CONST_INT
6850
          && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1)))
6851
        {
6852
          new = make_compound_operation (XEXP (lhs, 0), next_code);
6853
          new = make_extraction (mode, new,
6854
                                 INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
6855
                                 NULL_RTX, mode_width - INTVAL (rhs),
6856
                                 code == LSHIFTRT, 0, in_code == COMPARE);
6857
          break;
6858
        }
6859
 
6860
      /* See if we have operations between an ASHIFTRT and an ASHIFT.
6861
         If so, try to merge the shifts into a SIGN_EXTEND.  We could
6862
         also do this for some cases of SIGN_EXTRACT, but it doesn't
6863
         seem worth the effort; the case checked for occurs on Alpha.  */
6864
 
6865
      if (!OBJECT_P (lhs)
6866
          && ! (GET_CODE (lhs) == SUBREG
6867
                && (OBJECT_P (SUBREG_REG (lhs))))
6868
          && GET_CODE (rhs) == CONST_INT
6869
          && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
6870
          && (new = extract_left_shift (lhs, INTVAL (rhs))) != 0)
6871
        new = make_extraction (mode, make_compound_operation (new, next_code),
6872
                               0, NULL_RTX, mode_width - INTVAL (rhs),
6873
                               code == LSHIFTRT, 0, in_code == COMPARE);
6874
 
6875
      break;
6876
 
6877
    case SUBREG:
6878
      /* Call ourselves recursively on the inner expression.  If we are
6879
         narrowing the object and it has a different RTL code from
6880
         what it originally did, do this SUBREG as a force_to_mode.  */
6881
 
6882
      tem = make_compound_operation (SUBREG_REG (x), in_code);
6883
 
6884
      {
6885
        rtx simplified;
6886
        simplified = simplify_subreg (GET_MODE (x), tem, GET_MODE (tem),
6887
                                      SUBREG_BYTE (x));
6888
 
6889
        if (simplified)
6890
          tem = simplified;
6891
 
6892
        if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
6893
            && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
6894
            && subreg_lowpart_p (x))
6895
          {
6896
            rtx newer = force_to_mode (tem, mode, ~(HOST_WIDE_INT) 0,
6897
                                       NULL_RTX, 0);
6898
 
6899
            /* If we have something other than a SUBREG, we might have
6900
               done an expansion, so rerun ourselves.  */
6901
            if (GET_CODE (newer) != SUBREG)
6902
              newer = make_compound_operation (newer, in_code);
6903
 
6904
            return newer;
6905
          }
6906
 
6907
        if (simplified)
6908
          return tem;
6909
      }
6910
      break;
6911
 
6912
    default:
6913
      break;
6914
    }
6915
 
6916
  if (new)
6917
    {
6918
      x = gen_lowpart (mode, new);
6919
      code = GET_CODE (x);
6920
    }
6921
 
6922
  /* Now recursively process each operand of this operation.  */
6923
  fmt = GET_RTX_FORMAT (code);
6924
  for (i = 0; i < GET_RTX_LENGTH (code); i++)
6925
    if (fmt[i] == 'e')
6926
      {
6927
        new = make_compound_operation (XEXP (x, i), next_code);
6928
        SUBST (XEXP (x, i), new);
6929
      }
6930
 
6931
  return x;
6932
}
6933
 
6934
/* Given M see if it is a value that would select a field of bits
6935
   within an item, but not the entire word.  Return -1 if not.
6936
   Otherwise, return the starting position of the field, where 0 is the
6937
   low-order bit.
6938
 
6939
   *PLEN is set to the length of the field.  */
6940
 
6941
static int
6942
get_pos_from_mask (unsigned HOST_WIDE_INT m, unsigned HOST_WIDE_INT *plen)
6943
{
6944
  /* Get the bit number of the first 1 bit from the right, -1 if none.  */
6945
  int pos = exact_log2 (m & -m);
6946
  int len = 0;
6947
 
6948
  if (pos >= 0)
6949
    /* Now shift off the low-order zero bits and see if we have a
6950
       power of two minus 1.  */
6951
    len = exact_log2 ((m >> pos) + 1);
6952
 
6953
  if (len <= 0)
6954
    pos = -1;
6955
 
6956
  *plen = len;
6957
  return pos;
6958
}
6959
 
6960
/* See if X can be simplified knowing that we will only refer to it in
6961
   MODE and will only refer to those bits that are nonzero in MASK.
6962
   If other bits are being computed or if masking operations are done
6963
   that select a superset of the bits in MASK, they can sometimes be
6964
   ignored.
6965
 
6966
   Return a possibly simplified expression, but always convert X to
6967
   MODE.  If X is a CONST_INT, AND the CONST_INT with MASK.
6968
 
6969
   Also, if REG is nonzero and X is a register equal in value to REG,
6970
   replace X with REG.
6971
 
6972
   If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
6973
   are all off in X.  This is used when X will be complemented, by either
6974
   NOT, NEG, or XOR.  */
6975
 
6976
static rtx
6977
force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask,
6978
               rtx reg, int just_select)
6979
{
6980
  enum rtx_code code = GET_CODE (x);
6981
  int next_select = just_select || code == XOR || code == NOT || code == NEG;
6982
  enum machine_mode op_mode;
6983
  unsigned HOST_WIDE_INT fuller_mask, nonzero;
6984
  rtx op0, op1, temp;
6985
 
6986
  /* If this is a CALL or ASM_OPERANDS, don't do anything.  Some of the
6987
     code below will do the wrong thing since the mode of such an
6988
     expression is VOIDmode.
6989
 
6990
     Also do nothing if X is a CLOBBER; this can happen if X was
6991
     the return value from a call to gen_lowpart.  */
6992
  if (code == CALL || code == ASM_OPERANDS || code == CLOBBER)
6993
    return x;
6994
 
6995
  /* We want to perform the operation is its present mode unless we know
6996
     that the operation is valid in MODE, in which case we do the operation
6997
     in MODE.  */
6998
  op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
6999
              && have_insn_for (code, mode))
7000
             ? mode : GET_MODE (x));
7001
 
7002
  /* It is not valid to do a right-shift in a narrower mode
7003
     than the one it came in with.  */
7004
  if ((code == LSHIFTRT || code == ASHIFTRT)
7005
      && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x)))
7006
    op_mode = GET_MODE (x);
7007
 
7008
  /* Truncate MASK to fit OP_MODE.  */
7009
  if (op_mode)
7010
    mask &= GET_MODE_MASK (op_mode);
7011
 
7012
  /* When we have an arithmetic operation, or a shift whose count we
7013
     do not know, we need to assume that all bits up to the highest-order
7014
     bit in MASK will be needed.  This is how we form such a mask.  */
7015
  if (mask & ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)))
7016
    fuller_mask = ~(unsigned HOST_WIDE_INT) 0;
7017
  else
7018
    fuller_mask = (((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1))
7019
                   - 1);
7020
 
7021
  /* Determine what bits of X are guaranteed to be (non)zero.  */
7022
  nonzero = nonzero_bits (x, mode);
7023
 
7024
  /* If none of the bits in X are needed, return a zero.  */
7025
  if (! just_select && (nonzero & mask) == 0)
7026
    x = const0_rtx;
7027
 
7028
  /* If X is a CONST_INT, return a new one.  Do this here since the
7029
     test below will fail.  */
7030
  if (GET_CODE (x) == CONST_INT)
7031
    {
7032
      if (SCALAR_INT_MODE_P (mode))
7033
        return gen_int_mode (INTVAL (x) & mask, mode);
7034
      else
7035
        {
7036
          x = GEN_INT (INTVAL (x) & mask);
7037
          return gen_lowpart_common (mode, x);
7038
        }
7039
    }
7040
 
7041
  /* If X is narrower than MODE and we want all the bits in X's mode, just
7042
     get X in the proper mode.  */
7043
  if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
7044
      && (GET_MODE_MASK (GET_MODE (x)) & ~mask) == 0)
7045
    return gen_lowpart (mode, x);
7046
 
7047
  switch (code)
7048
    {
7049
    case CLOBBER:
7050
      /* If X is a (clobber (const_int)), return it since we know we are
7051
         generating something that won't match.  */
7052
      return x;
7053
 
7054
    case USE:
7055
      /* X is a (use (mem ..)) that was made from a bit-field extraction that
7056
         spanned the boundary of the MEM.  If we are now masking so it is
7057
         within that boundary, we don't need the USE any more.  */
7058
      if (! BITS_BIG_ENDIAN
7059
          && (mask & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
7060
        return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
7061
      break;
7062
 
7063
    case SIGN_EXTEND:
7064
    case ZERO_EXTEND:
7065
    case ZERO_EXTRACT:
7066
    case SIGN_EXTRACT:
7067
      x = expand_compound_operation (x);
7068
      if (GET_CODE (x) != code)
7069
        return force_to_mode (x, mode, mask, reg, next_select);
7070
      break;
7071
 
7072
    case REG:
7073
      if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
7074
                       || rtx_equal_p (reg, get_last_value (x))))
7075
        x = reg;
7076
      break;
7077
 
7078
    case SUBREG:
7079
      if (subreg_lowpart_p (x)
7080
          /* We can ignore the effect of this SUBREG if it narrows the mode or
7081
             if the constant masks to zero all the bits the mode doesn't
7082
             have.  */
7083
          && ((GET_MODE_SIZE (GET_MODE (x))
7084
               < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
7085
              || (0 == (mask
7086
                        & GET_MODE_MASK (GET_MODE (x))
7087
                        & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
7088
        return force_to_mode (SUBREG_REG (x), mode, mask, reg, next_select);
7089
      break;
7090
 
7091
    case AND:
7092
      /* If this is an AND with a constant, convert it into an AND
7093
         whose constant is the AND of that constant with MASK.  If it
7094
         remains an AND of MASK, delete it since it is redundant.  */
7095
 
7096
      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
7097
        {
7098
          x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
7099
                                      mask & INTVAL (XEXP (x, 1)));
7100
 
7101
          /* If X is still an AND, see if it is an AND with a mask that
7102
             is just some low-order bits.  If so, and it is MASK, we don't
7103
             need it.  */
7104
 
7105
          if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
7106
              && ((INTVAL (XEXP (x, 1)) & GET_MODE_MASK (GET_MODE (x)))
7107
                  == mask))
7108
            x = XEXP (x, 0);
7109
 
7110
          /* If it remains an AND, try making another AND with the bits
7111
             in the mode mask that aren't in MASK turned on.  If the
7112
             constant in the AND is wide enough, this might make a
7113
             cheaper constant.  */
7114
 
7115
          if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
7116
              && GET_MODE_MASK (GET_MODE (x)) != mask
7117
              && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
7118
            {
7119
              HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1))
7120
                                    | (GET_MODE_MASK (GET_MODE (x)) & ~mask));
7121
              int width = GET_MODE_BITSIZE (GET_MODE (x));
7122
              rtx y;
7123
 
7124
              /* If MODE is narrower than HOST_WIDE_INT and CVAL is a negative
7125
                 number, sign extend it.  */
7126
              if (width > 0 && width < HOST_BITS_PER_WIDE_INT
7127
                  && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
7128
                cval |= (HOST_WIDE_INT) -1 << width;
7129
 
7130
              y = simplify_gen_binary (AND, GET_MODE (x),
7131
                                       XEXP (x, 0), GEN_INT (cval));
7132
              if (rtx_cost (y, SET) < rtx_cost (x, SET))
7133
                x = y;
7134
            }
7135
 
7136
          break;
7137
        }
7138
 
7139
      goto binop;
7140
 
7141
    case PLUS:
7142
      /* In (and (plus FOO C1) M), if M is a mask that just turns off
7143
         low-order bits (as in an alignment operation) and FOO is already
7144
         aligned to that boundary, mask C1 to that boundary as well.
7145
         This may eliminate that PLUS and, later, the AND.  */
7146
 
7147
      {
7148
        unsigned int width = GET_MODE_BITSIZE (mode);
7149
        unsigned HOST_WIDE_INT smask = mask;
7150
 
7151
        /* If MODE is narrower than HOST_WIDE_INT and mask is a negative
7152
           number, sign extend it.  */
7153
 
7154
        if (width < HOST_BITS_PER_WIDE_INT
7155
            && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
7156
          smask |= (HOST_WIDE_INT) -1 << width;
7157
 
7158
        if (GET_CODE (XEXP (x, 1)) == CONST_INT
7159
            && exact_log2 (- smask) >= 0
7160
            && (nonzero_bits (XEXP (x, 0), mode) & ~smask) == 0
7161
            && (INTVAL (XEXP (x, 1)) & ~smask) != 0)
7162
          return force_to_mode (plus_constant (XEXP (x, 0),
7163
                                               (INTVAL (XEXP (x, 1)) & smask)),
7164
                                mode, smask, reg, next_select);
7165
      }
7166
 
7167
      /* ... fall through ...  */
7168
 
7169
    case MULT:
7170
      /* For PLUS, MINUS and MULT, we need any bits less significant than the
7171
         most significant bit in MASK since carries from those bits will
7172
         affect the bits we are interested in.  */
7173
      mask = fuller_mask;
7174
      goto binop;
7175
 
7176
    case MINUS:
7177
      /* If X is (minus C Y) where C's least set bit is larger than any bit
7178
         in the mask, then we may replace with (neg Y).  */
7179
      if (GET_CODE (XEXP (x, 0)) == CONST_INT
7180
          && (((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 0))
7181
                                        & -INTVAL (XEXP (x, 0))))
7182
              > mask))
7183
        {
7184
          x = simplify_gen_unary (NEG, GET_MODE (x), XEXP (x, 1),
7185
                                  GET_MODE (x));
7186
          return force_to_mode (x, mode, mask, reg, next_select);
7187
        }
7188
 
7189
      /* Similarly, if C contains every bit in the fuller_mask, then we may
7190
         replace with (not Y).  */
7191
      if (GET_CODE (XEXP (x, 0)) == CONST_INT
7192
          && ((INTVAL (XEXP (x, 0)) | (HOST_WIDE_INT) fuller_mask)
7193
              == INTVAL (XEXP (x, 0))))
7194
        {
7195
          x = simplify_gen_unary (NOT, GET_MODE (x),
7196
                                  XEXP (x, 1), GET_MODE (x));
7197
          return force_to_mode (x, mode, mask, reg, next_select);
7198
        }
7199
 
7200
      mask = fuller_mask;
7201
      goto binop;
7202
 
7203
    case IOR:
7204
    case XOR:
7205
      /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
7206
         LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
7207
         operation which may be a bitfield extraction.  Ensure that the
7208
         constant we form is not wider than the mode of X.  */
7209
 
7210
      if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
7211
          && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7212
          && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
7213
          && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
7214
          && GET_CODE (XEXP (x, 1)) == CONST_INT
7215
          && ((INTVAL (XEXP (XEXP (x, 0), 1))
7216
               + floor_log2 (INTVAL (XEXP (x, 1))))
7217
              < GET_MODE_BITSIZE (GET_MODE (x)))
7218
          && (INTVAL (XEXP (x, 1))
7219
              & ~nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0)
7220
        {
7221
          temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
7222
                          << INTVAL (XEXP (XEXP (x, 0), 1)));
7223
          temp = simplify_gen_binary (GET_CODE (x), GET_MODE (x),
7224
                                      XEXP (XEXP (x, 0), 0), temp);
7225
          x = simplify_gen_binary (LSHIFTRT, GET_MODE (x), temp,
7226
                                   XEXP (XEXP (x, 0), 1));
7227
          return force_to_mode (x, mode, mask, reg, next_select);
7228
        }
7229
 
7230
    binop:
7231
      /* For most binary operations, just propagate into the operation and
7232
         change the mode if we have an operation of that mode.  */
7233
 
7234
      op0 = gen_lowpart (op_mode,
7235
                         force_to_mode (XEXP (x, 0), mode, mask,
7236
                                        reg, next_select));
7237
      op1 = gen_lowpart (op_mode,
7238
                         force_to_mode (XEXP (x, 1), mode, mask,
7239
                                        reg, next_select));
7240
 
7241
      if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
7242
        x = simplify_gen_binary (code, op_mode, op0, op1);
7243
      break;
7244
 
7245
    case ASHIFT:
7246
      /* For left shifts, do the same, but just for the first operand.
7247
         However, we cannot do anything with shifts where we cannot
7248
         guarantee that the counts are smaller than the size of the mode
7249
         because such a count will have a different meaning in a
7250
         wider mode.  */
7251
 
7252
      if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
7253
             && INTVAL (XEXP (x, 1)) >= 0
7254
             && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
7255
          && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
7256
                && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
7257
                    < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
7258
        break;
7259
 
7260
      /* If the shift count is a constant and we can do arithmetic in
7261
         the mode of the shift, refine which bits we need.  Otherwise, use the
7262
         conservative form of the mask.  */
7263
      if (GET_CODE (XEXP (x, 1)) == CONST_INT
7264
          && INTVAL (XEXP (x, 1)) >= 0
7265
          && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
7266
          && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
7267
        mask >>= INTVAL (XEXP (x, 1));
7268
      else
7269
        mask = fuller_mask;
7270
 
7271
      op0 = gen_lowpart (op_mode,
7272
                         force_to_mode (XEXP (x, 0), op_mode,
7273
                                        mask, reg, next_select));
7274
 
7275
      if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
7276
        x = simplify_gen_binary (code, op_mode, op0, XEXP (x, 1));
7277
      break;
7278
 
7279
    case LSHIFTRT:
7280
      /* Here we can only do something if the shift count is a constant,
7281
         this shift constant is valid for the host, and we can do arithmetic
7282
         in OP_MODE.  */
7283
 
7284
      if (GET_CODE (XEXP (x, 1)) == CONST_INT
7285
          && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
7286
          && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
7287
        {
7288
          rtx inner = XEXP (x, 0);
7289
          unsigned HOST_WIDE_INT inner_mask;
7290
 
7291
          /* Select the mask of the bits we need for the shift operand.  */
7292
          inner_mask = mask << INTVAL (XEXP (x, 1));
7293
 
7294
          /* We can only change the mode of the shift if we can do arithmetic
7295
             in the mode of the shift and INNER_MASK is no wider than the
7296
             width of X's mode.  */
7297
          if ((inner_mask & ~GET_MODE_MASK (GET_MODE (x))) != 0)
7298
            op_mode = GET_MODE (x);
7299
 
7300
          inner = force_to_mode (inner, op_mode, inner_mask, reg, next_select);
7301
 
7302
          if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
7303
            x = simplify_gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
7304
        }
7305
 
7306
      /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
7307
         shift and AND produces only copies of the sign bit (C2 is one less
7308
         than a power of two), we can do this with just a shift.  */
7309
 
7310
      if (GET_CODE (x) == LSHIFTRT
7311
          && GET_CODE (XEXP (x, 1)) == CONST_INT
7312
          /* The shift puts one of the sign bit copies in the least significant
7313
             bit.  */
7314
          && ((INTVAL (XEXP (x, 1))
7315
               + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
7316
              >= GET_MODE_BITSIZE (GET_MODE (x)))
7317
          && exact_log2 (mask + 1) >= 0
7318
          /* Number of bits left after the shift must be more than the mask
7319
             needs.  */
7320
          && ((INTVAL (XEXP (x, 1)) + exact_log2 (mask + 1))
7321
              <= GET_MODE_BITSIZE (GET_MODE (x)))
7322
          /* Must be more sign bit copies than the mask needs.  */
7323
          && ((int) num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
7324
              >= exact_log2 (mask + 1)))
7325
        x = simplify_gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
7326
                                 GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
7327
                                          - exact_log2 (mask + 1)));
7328
 
7329
      goto shiftrt;
7330
 
7331
    case ASHIFTRT:
7332
      /* If we are just looking for the sign bit, we don't need this shift at
7333
         all, even if it has a variable count.  */
7334
      if (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
7335
          && (mask == ((unsigned HOST_WIDE_INT) 1
7336
                       << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
7337
        return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
7338
 
7339
      /* If this is a shift by a constant, get a mask that contains those bits
7340
         that are not copies of the sign bit.  We then have two cases:  If
7341
         MASK only includes those bits, this can be a logical shift, which may
7342
         allow simplifications.  If MASK is a single-bit field not within
7343
         those bits, we are requesting a copy of the sign bit and hence can
7344
         shift the sign bit to the appropriate location.  */
7345
 
7346
      if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0
7347
          && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
7348
        {
7349
          int i = -1;
7350
 
7351
          /* If the considered data is wider than HOST_WIDE_INT, we can't
7352
             represent a mask for all its bits in a single scalar.
7353
             But we only care about the lower bits, so calculate these.  */
7354
 
7355
          if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT)
7356
            {
7357
              nonzero = ~(HOST_WIDE_INT) 0;
7358
 
7359
              /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7360
                 is the number of bits a full-width mask would have set.
7361
                 We need only shift if these are fewer than nonzero can
7362
                 hold.  If not, we must keep all bits set in nonzero.  */
7363
 
7364
              if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7365
                  < HOST_BITS_PER_WIDE_INT)
7366
                nonzero >>= INTVAL (XEXP (x, 1))
7367
                            + HOST_BITS_PER_WIDE_INT
7368
                            - GET_MODE_BITSIZE (GET_MODE (x)) ;
7369
            }
7370
          else
7371
            {
7372
              nonzero = GET_MODE_MASK (GET_MODE (x));
7373
              nonzero >>= INTVAL (XEXP (x, 1));
7374
            }
7375
 
7376
          if ((mask & ~nonzero) == 0
7377
              || (i = exact_log2 (mask)) >= 0)
7378
            {
7379
              x = simplify_shift_const
7380
                (x, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
7381
                 i < 0 ? INTVAL (XEXP (x, 1))
7382
                 : GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
7383
 
7384
              if (GET_CODE (x) != ASHIFTRT)
7385
                return force_to_mode (x, mode, mask, reg, next_select);
7386
            }
7387
        }
7388
 
7389
      /* If MASK is 1, convert this to an LSHIFTRT.  This can be done
7390
         even if the shift count isn't a constant.  */
7391
      if (mask == 1)
7392
        x = simplify_gen_binary (LSHIFTRT, GET_MODE (x),
7393
                                 XEXP (x, 0), XEXP (x, 1));
7394
 
7395
    shiftrt:
7396
 
7397
      /* If this is a zero- or sign-extension operation that just affects bits
7398
         we don't care about, remove it.  Be sure the call above returned
7399
         something that is still a shift.  */
7400
 
7401
      if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
7402
          && GET_CODE (XEXP (x, 1)) == CONST_INT
7403
          && INTVAL (XEXP (x, 1)) >= 0
7404
          && (INTVAL (XEXP (x, 1))
7405
              <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
7406
          && GET_CODE (XEXP (x, 0)) == ASHIFT
7407
          && XEXP (XEXP (x, 0), 1) == XEXP (x, 1))
7408
        return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
7409
                              reg, next_select);
7410
 
7411
      break;
7412
 
7413
    case ROTATE:
7414
    case ROTATERT:
7415
      /* If the shift count is constant and we can do computations
7416
         in the mode of X, compute where the bits we care about are.
7417
         Otherwise, we can't do anything.  Don't change the mode of
7418
         the shift or propagate MODE into the shift, though.  */
7419
      if (GET_CODE (XEXP (x, 1)) == CONST_INT
7420
          && INTVAL (XEXP (x, 1)) >= 0)
7421
        {
7422
          temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
7423
                                            GET_MODE (x), GEN_INT (mask),
7424
                                            XEXP (x, 1));
7425
          if (temp && GET_CODE (temp) == CONST_INT)
7426
            SUBST (XEXP (x, 0),
7427
                   force_to_mode (XEXP (x, 0), GET_MODE (x),
7428
                                  INTVAL (temp), reg, next_select));
7429
        }
7430
      break;
7431
 
7432
    case NEG:
7433
      /* If we just want the low-order bit, the NEG isn't needed since it
7434
         won't change the low-order bit.  */
7435
      if (mask == 1)
7436
        return force_to_mode (XEXP (x, 0), mode, mask, reg, just_select);
7437
 
7438
      /* We need any bits less significant than the most significant bit in
7439
         MASK since carries from those bits will affect the bits we are
7440
         interested in.  */
7441
      mask = fuller_mask;
7442
      goto unop;
7443
 
7444
    case NOT:
7445
      /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
7446
         same as the XOR case above.  Ensure that the constant we form is not
7447
         wider than the mode of X.  */
7448
 
7449
      if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
7450
          && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7451
          && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
7452
          && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
7453
              < GET_MODE_BITSIZE (GET_MODE (x)))
7454
          && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
7455
        {
7456
          temp = gen_int_mode (mask << INTVAL (XEXP (XEXP (x, 0), 1)),
7457
                               GET_MODE (x));
7458
          temp = simplify_gen_binary (XOR, GET_MODE (x),
7459
                                      XEXP (XEXP (x, 0), 0), temp);
7460
          x = simplify_gen_binary (LSHIFTRT, GET_MODE (x),
7461
                                   temp, XEXP (XEXP (x, 0), 1));
7462
 
7463
          return force_to_mode (x, mode, mask, reg, next_select);
7464
        }
7465
 
7466
      /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must
7467
         use the full mask inside the NOT.  */
7468
      mask = fuller_mask;
7469
 
7470
    unop:
7471
      op0 = gen_lowpart (op_mode,
7472
                         force_to_mode (XEXP (x, 0), mode, mask,
7473
                                        reg, next_select));
7474
      if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
7475
        x = simplify_gen_unary (code, op_mode, op0, op_mode);
7476
      break;
7477
 
7478
    case NE:
7479
      /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
7480
         in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
7481
         which is equal to STORE_FLAG_VALUE.  */
7482
      if ((mask & ~STORE_FLAG_VALUE) == 0 && XEXP (x, 1) == const0_rtx
7483
          && GET_MODE (XEXP (x, 0)) == mode
7484
          && exact_log2 (nonzero_bits (XEXP (x, 0), mode)) >= 0
7485
          && (nonzero_bits (XEXP (x, 0), mode)
7486
              == (unsigned HOST_WIDE_INT) STORE_FLAG_VALUE))
7487
        return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
7488
 
7489
      break;
7490
 
7491
    case IF_THEN_ELSE:
7492
      /* We have no way of knowing if the IF_THEN_ELSE can itself be
7493
         written in a narrower mode.  We play it safe and do not do so.  */
7494
 
7495
      SUBST (XEXP (x, 1),
7496
             gen_lowpart (GET_MODE (x),
7497
                                      force_to_mode (XEXP (x, 1), mode,
7498
                                                     mask, reg, next_select)));
7499
      SUBST (XEXP (x, 2),
7500
             gen_lowpart (GET_MODE (x),
7501
                                      force_to_mode (XEXP (x, 2), mode,
7502
                                                     mask, reg, next_select)));
7503
      break;
7504
 
7505
    default:
7506
      break;
7507
    }
7508
 
7509
  /* Ensure we return a value of the proper mode.  */
7510
  return gen_lowpart (mode, x);
7511
}
7512
 
7513
/* Return nonzero if X is an expression that has one of two values depending on
7514
   whether some other value is zero or nonzero.  In that case, we return the
7515
   value that is being tested, *PTRUE is set to the value if the rtx being
7516
   returned has a nonzero value, and *PFALSE is set to the other alternative.
7517
 
7518
   If we return zero, we set *PTRUE and *PFALSE to X.  */
7519
 
7520
static rtx
7521
if_then_else_cond (rtx x, rtx *ptrue, rtx *pfalse)
7522
{
7523
  enum machine_mode mode = GET_MODE (x);
7524
  enum rtx_code code = GET_CODE (x);
7525
  rtx cond0, cond1, true0, true1, false0, false1;
7526
  unsigned HOST_WIDE_INT nz;
7527
 
7528
  /* If we are comparing a value against zero, we are done.  */
7529
  if ((code == NE || code == EQ)
7530
      && XEXP (x, 1) == const0_rtx)
7531
    {
7532
      *ptrue = (code == NE) ? const_true_rtx : const0_rtx;
7533
      *pfalse = (code == NE) ? const0_rtx : const_true_rtx;
7534
      return XEXP (x, 0);
7535
    }
7536
 
7537
  /* If this is a unary operation whose operand has one of two values, apply
7538
     our opcode to compute those values.  */
7539
  else if (UNARY_P (x)
7540
           && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
7541
    {
7542
      *ptrue = simplify_gen_unary (code, mode, true0, GET_MODE (XEXP (x, 0)));
7543
      *pfalse = simplify_gen_unary (code, mode, false0,
7544
                                    GET_MODE (XEXP (x, 0)));
7545
      return cond0;
7546
    }
7547
 
7548
  /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
7549
     make can't possibly match and would suppress other optimizations.  */
7550
  else if (code == COMPARE)
7551
    ;
7552
 
7553
  /* If this is a binary operation, see if either side has only one of two
7554
     values.  If either one does or if both do and they are conditional on
7555
     the same value, compute the new true and false values.  */
7556
  else if (BINARY_P (x))
7557
    {
7558
      cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
7559
      cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
7560
 
7561
      if ((cond0 != 0 || cond1 != 0)
7562
          && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
7563
        {
7564
          /* If if_then_else_cond returned zero, then true/false are the
7565
             same rtl.  We must copy one of them to prevent invalid rtl
7566
             sharing.  */
7567
          if (cond0 == 0)
7568
            true0 = copy_rtx (true0);
7569
          else if (cond1 == 0)
7570
            true1 = copy_rtx (true1);
7571
 
7572
          if (COMPARISON_P (x))
7573
            {
7574
              *ptrue = simplify_gen_relational (code, mode, VOIDmode,
7575
                                                true0, true1);
7576
              *pfalse = simplify_gen_relational (code, mode, VOIDmode,
7577
                                                 false0, false1);
7578
             }
7579
          else
7580
            {
7581
              *ptrue = simplify_gen_binary (code, mode, true0, true1);
7582
              *pfalse = simplify_gen_binary (code, mode, false0, false1);
7583
            }
7584
 
7585
          return cond0 ? cond0 : cond1;
7586
        }
7587
 
7588
      /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
7589
         operands is zero when the other is nonzero, and vice-versa,
7590
         and STORE_FLAG_VALUE is 1 or -1.  */
7591
 
7592
      if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7593
          && (code == PLUS || code == IOR || code == XOR || code == MINUS
7594
              || code == UMAX)
7595
          && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
7596
        {
7597
          rtx op0 = XEXP (XEXP (x, 0), 1);
7598
          rtx op1 = XEXP (XEXP (x, 1), 1);
7599
 
7600
          cond0 = XEXP (XEXP (x, 0), 0);
7601
          cond1 = XEXP (XEXP (x, 1), 0);
7602
 
7603
          if (COMPARISON_P (cond0)
7604
              && COMPARISON_P (cond1)
7605
              && ((GET_CODE (cond0) == reversed_comparison_code (cond1, NULL)
7606
                   && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
7607
                   && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
7608
                  || ((swap_condition (GET_CODE (cond0))
7609
                       == reversed_comparison_code (cond1, NULL))
7610
                      && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
7611
                      && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
7612
              && ! side_effects_p (x))
7613
            {
7614
              *ptrue = simplify_gen_binary (MULT, mode, op0, const_true_rtx);
7615
              *pfalse = simplify_gen_binary (MULT, mode,
7616
                                             (code == MINUS
7617
                                              ? simplify_gen_unary (NEG, mode,
7618
                                                                    op1, mode)
7619
                                              : op1),
7620
                                              const_true_rtx);
7621
              return cond0;
7622
            }
7623
        }
7624
 
7625
      /* Similarly for MULT, AND and UMIN, except that for these the result
7626
         is always zero.  */
7627
      if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7628
          && (code == MULT || code == AND || code == UMIN)
7629
          && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
7630
        {
7631
          cond0 = XEXP (XEXP (x, 0), 0);
7632
          cond1 = XEXP (XEXP (x, 1), 0);
7633
 
7634
          if (COMPARISON_P (cond0)
7635
              && COMPARISON_P (cond1)
7636
              && ((GET_CODE (cond0) == reversed_comparison_code (cond1, NULL)
7637
                   && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
7638
                   && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
7639
                  || ((swap_condition (GET_CODE (cond0))
7640
                       == reversed_comparison_code (cond1, NULL))
7641
                      && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
7642
                      && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
7643
              && ! side_effects_p (x))
7644
            {
7645
              *ptrue = *pfalse = const0_rtx;
7646
              return cond0;
7647
            }
7648
        }
7649
    }
7650
 
7651
  else if (code == IF_THEN_ELSE)
7652
    {
7653
      /* If we have IF_THEN_ELSE already, extract the condition and
7654
         canonicalize it if it is NE or EQ.  */
7655
      cond0 = XEXP (x, 0);
7656
      *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
7657
      if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
7658
        return XEXP (cond0, 0);
7659
      else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
7660
        {
7661
          *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
7662
          return XEXP (cond0, 0);
7663
        }
7664
      else
7665
        return cond0;
7666
    }
7667
 
7668
  /* If X is a SUBREG, we can narrow both the true and false values
7669
     if the inner expression, if there is a condition.  */
7670
  else if (code == SUBREG
7671
           && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
7672
                                               &true0, &false0)))
7673
    {
7674
      true0 = simplify_gen_subreg (mode, true0,
7675
                                   GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
7676
      false0 = simplify_gen_subreg (mode, false0,
7677
                                    GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
7678
      if (true0 && false0)
7679
        {
7680
          *ptrue = true0;
7681
          *pfalse = false0;
7682
          return cond0;
7683
        }
7684
    }
7685
 
7686
  /* If X is a constant, this isn't special and will cause confusions
7687
     if we treat it as such.  Likewise if it is equivalent to a constant.  */
7688
  else if (CONSTANT_P (x)
7689
           || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
7690
    ;
7691
 
7692
  /* If we're in BImode, canonicalize on 0 and STORE_FLAG_VALUE, as that
7693
     will be least confusing to the rest of the compiler.  */
7694
  else if (mode == BImode)
7695
    {
7696
      *ptrue = GEN_INT (STORE_FLAG_VALUE), *pfalse = const0_rtx;
7697
      return x;
7698
    }
7699
 
7700
  /* If X is known to be either 0 or -1, those are the true and
7701
     false values when testing X.  */
7702
  else if (x == constm1_rtx || x == const0_rtx
7703
           || (mode != VOIDmode
7704
               && num_sign_bit_copies (x, mode) == GET_MODE_BITSIZE (mode)))
7705
    {
7706
      *ptrue = constm1_rtx, *pfalse = const0_rtx;
7707
      return x;
7708
    }
7709
 
7710
  /* Likewise for 0 or a single bit.  */
7711
  else if (SCALAR_INT_MODE_P (mode)
7712
           && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7713
           && exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
7714
    {
7715
      *ptrue = gen_int_mode (nz, mode), *pfalse = const0_rtx;
7716
      return x;
7717
    }
7718
 
7719
  /* Otherwise fail; show no condition with true and false values the same.  */
7720
  *ptrue = *pfalse = x;
7721
  return 0;
7722
}
7723
 
7724
/* Return the value of expression X given the fact that condition COND
7725
   is known to be true when applied to REG as its first operand and VAL
7726
   as its second.  X is known to not be shared and so can be modified in
7727
   place.
7728
 
7729
   We only handle the simplest cases, and specifically those cases that
7730
   arise with IF_THEN_ELSE expressions.  */
7731
 
7732
static rtx
7733
known_cond (rtx x, enum rtx_code cond, rtx reg, rtx val)
7734
{
7735
  enum rtx_code code = GET_CODE (x);
7736
  rtx temp;
7737
  const char *fmt;
7738
  int i, j;
7739
 
7740
  if (side_effects_p (x))
7741
    return x;
7742
 
7743
  /* If either operand of the condition is a floating point value,
7744
     then we have to avoid collapsing an EQ comparison.  */
7745
  if (cond == EQ
7746
      && rtx_equal_p (x, reg)
7747
      && ! FLOAT_MODE_P (GET_MODE (x))
7748
      && ! FLOAT_MODE_P (GET_MODE (val)))
7749
    return val;
7750
 
7751
  if (cond == UNEQ && rtx_equal_p (x, reg))
7752
    return val;
7753
 
7754
  /* If X is (abs REG) and we know something about REG's relationship
7755
     with zero, we may be able to simplify this.  */
7756
 
7757
  if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
7758
    switch (cond)
7759
      {
7760
      case GE:  case GT:  case EQ:
7761
        return XEXP (x, 0);
7762
      case LT:  case LE:
7763
        return simplify_gen_unary (NEG, GET_MODE (XEXP (x, 0)),
7764
                                   XEXP (x, 0),
7765
                                   GET_MODE (XEXP (x, 0)));
7766
      default:
7767
        break;
7768
      }
7769
 
7770
  /* The only other cases we handle are MIN, MAX, and comparisons if the
7771
     operands are the same as REG and VAL.  */
7772
 
7773
  else if (COMPARISON_P (x) || COMMUTATIVE_ARITH_P (x))
7774
    {
7775
      if (rtx_equal_p (XEXP (x, 0), val))
7776
        cond = swap_condition (cond), temp = val, val = reg, reg = temp;
7777
 
7778
      if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
7779
        {
7780
          if (COMPARISON_P (x))
7781
            {
7782
              if (comparison_dominates_p (cond, code))
7783
                return const_true_rtx;
7784
 
7785
              code = reversed_comparison_code (x, NULL);
7786
              if (code != UNKNOWN
7787
                  && comparison_dominates_p (cond, code))
7788
                return const0_rtx;
7789
              else
7790
                return x;
7791
            }
7792
          else if (code == SMAX || code == SMIN
7793
                   || code == UMIN || code == UMAX)
7794
            {
7795
              int unsignedp = (code == UMIN || code == UMAX);
7796
 
7797
              /* Do not reverse the condition when it is NE or EQ.
7798
                 This is because we cannot conclude anything about
7799
                 the value of 'SMAX (x, y)' when x is not equal to y,
7800
                 but we can when x equals y.  */
7801
              if ((code == SMAX || code == UMAX)
7802
                  && ! (cond == EQ || cond == NE))
7803
                cond = reverse_condition (cond);
7804
 
7805
              switch (cond)
7806
                {
7807
                case GE:   case GT:
7808
                  return unsignedp ? x : XEXP (x, 1);
7809
                case LE:   case LT:
7810
                  return unsignedp ? x : XEXP (x, 0);
7811
                case GEU:  case GTU:
7812
                  return unsignedp ? XEXP (x, 1) : x;
7813
                case LEU:  case LTU:
7814
                  return unsignedp ? XEXP (x, 0) : x;
7815
                default:
7816
                  break;
7817
                }
7818
            }
7819
        }
7820
    }
7821
  else if (code == SUBREG)
7822
    {
7823
      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
7824
      rtx new, r = known_cond (SUBREG_REG (x), cond, reg, val);
7825
 
7826
      if (SUBREG_REG (x) != r)
7827
        {
7828
          /* We must simplify subreg here, before we lose track of the
7829
             original inner_mode.  */
7830
          new = simplify_subreg (GET_MODE (x), r,
7831
                                 inner_mode, SUBREG_BYTE (x));
7832
          if (new)
7833
            return new;
7834
          else
7835
            SUBST (SUBREG_REG (x), r);
7836
        }
7837
 
7838
      return x;
7839
    }
7840
  /* We don't have to handle SIGN_EXTEND here, because even in the
7841
     case of replacing something with a modeless CONST_INT, a
7842
     CONST_INT is already (supposed to be) a valid sign extension for
7843
     its narrower mode, which implies it's already properly
7844
     sign-extended for the wider mode.  Now, for ZERO_EXTEND, the
7845
     story is different.  */
7846
  else if (code == ZERO_EXTEND)
7847
    {
7848
      enum machine_mode inner_mode = GET_MODE (XEXP (x, 0));
7849
      rtx new, r = known_cond (XEXP (x, 0), cond, reg, val);
7850
 
7851
      if (XEXP (x, 0) != r)
7852
        {
7853
          /* We must simplify the zero_extend here, before we lose
7854
             track of the original inner_mode.  */
7855
          new = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
7856
                                          r, inner_mode);
7857
          if (new)
7858
            return new;
7859
          else
7860
            SUBST (XEXP (x, 0), r);
7861
        }
7862
 
7863
      return x;
7864
    }
7865
 
7866
  fmt = GET_RTX_FORMAT (code);
7867
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7868
    {
7869
      if (fmt[i] == 'e')
7870
        SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
7871
      else if (fmt[i] == 'E')
7872
        for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7873
          SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
7874
                                                cond, reg, val));
7875
    }
7876
 
7877
  return x;
7878
}
7879
 
7880
/* See if X and Y are equal for the purposes of seeing if we can rewrite an
7881
   assignment as a field assignment.  */
7882
 
7883
static int
7884
rtx_equal_for_field_assignment_p (rtx x, rtx y)
7885
{
7886
  if (x == y || rtx_equal_p (x, y))
7887
    return 1;
7888
 
7889
  if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y))
7890
    return 0;
7891
 
7892
  /* Check for a paradoxical SUBREG of a MEM compared with the MEM.
7893
     Note that all SUBREGs of MEM are paradoxical; otherwise they
7894
     would have been rewritten.  */
7895
  if (MEM_P (x) && GET_CODE (y) == SUBREG
7896
      && MEM_P (SUBREG_REG (y))
7897
      && rtx_equal_p (SUBREG_REG (y),
7898
                      gen_lowpart (GET_MODE (SUBREG_REG (y)), x)))
7899
    return 1;
7900
 
7901
  if (MEM_P (y) && GET_CODE (x) == SUBREG
7902
      && MEM_P (SUBREG_REG (x))
7903
      && rtx_equal_p (SUBREG_REG (x),
7904
                      gen_lowpart (GET_MODE (SUBREG_REG (x)), y)))
7905
    return 1;
7906
 
7907
  /* We used to see if get_last_value of X and Y were the same but that's
7908
     not correct.  In one direction, we'll cause the assignment to have
7909
     the wrong destination and in the case, we'll import a register into this
7910
     insn that might have already have been dead.   So fail if none of the
7911
     above cases are true.  */
7912
  return 0;
7913
}
7914
 
7915
/* See if X, a SET operation, can be rewritten as a bit-field assignment.
7916
   Return that assignment if so.
7917
 
7918
   We only handle the most common cases.  */
7919
 
7920
static rtx
7921
make_field_assignment (rtx x)
7922
{
7923
  rtx dest = SET_DEST (x);
7924
  rtx src = SET_SRC (x);
7925
  rtx assign;
7926
  rtx rhs, lhs;
7927
  HOST_WIDE_INT c1;
7928
  HOST_WIDE_INT pos;
7929
  unsigned HOST_WIDE_INT len;
7930
  rtx other;
7931
  enum machine_mode mode;
7932
 
7933
  /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
7934
     a clear of a one-bit field.  We will have changed it to
7935
     (and (rotate (const_int -2) POS) DEST), so check for that.  Also check
7936
     for a SUBREG.  */
7937
 
7938
  if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
7939
      && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
7940
      && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
7941
      && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7942
    {
7943
      assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
7944
                                1, 1, 1, 0);
7945
      if (assign != 0)
7946
        return gen_rtx_SET (VOIDmode, assign, const0_rtx);
7947
      return x;
7948
    }
7949
 
7950
  if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
7951
      && subreg_lowpart_p (XEXP (src, 0))
7952
      && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
7953
          < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
7954
      && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
7955
      && GET_CODE (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == CONST_INT
7956
      && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
7957
      && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7958
    {
7959
      assign = make_extraction (VOIDmode, dest, 0,
7960
                                XEXP (SUBREG_REG (XEXP (src, 0)), 1),
7961
                                1, 1, 1, 0);
7962
      if (assign != 0)
7963
        return gen_rtx_SET (VOIDmode, assign, const0_rtx);
7964
      return x;
7965
    }
7966
 
7967
  /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
7968
     one-bit field.  */
7969
  if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
7970
      && XEXP (XEXP (src, 0), 0) == const1_rtx
7971
      && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7972
    {
7973
      assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
7974
                                1, 1, 1, 0);
7975
      if (assign != 0)
7976
        return gen_rtx_SET (VOIDmode, assign, const1_rtx);
7977
      return x;
7978
    }
7979
 
7980
  /* If DEST is already a field assignment, i.e. ZERO_EXTRACT, and the
7981
     SRC is an AND with all bits of that field set, then we can discard
7982
     the AND.  */
7983
  if (GET_CODE (dest) == ZERO_EXTRACT
7984
      && GET_CODE (XEXP (dest, 1)) == CONST_INT
7985
      && GET_CODE (src) == AND
7986
      && GET_CODE (XEXP (src, 1)) == CONST_INT)
7987
    {
7988
      HOST_WIDE_INT width = INTVAL (XEXP (dest, 1));
7989
      unsigned HOST_WIDE_INT and_mask = INTVAL (XEXP (src, 1));
7990
      unsigned HOST_WIDE_INT ze_mask;
7991
 
7992
      if (width >= HOST_BITS_PER_WIDE_INT)
7993
        ze_mask = -1;
7994
      else
7995
        ze_mask = ((unsigned HOST_WIDE_INT)1 << width) - 1;
7996
 
7997
      /* Complete overlap.  We can remove the source AND.  */
7998
      if ((and_mask & ze_mask) == ze_mask)
7999
        return gen_rtx_SET (VOIDmode, dest, XEXP (src, 0));
8000
 
8001
      /* Partial overlap.  We can reduce the source AND.  */
8002
      if ((and_mask & ze_mask) != and_mask)
8003
        {
8004
          mode = GET_MODE (src);
8005
          src = gen_rtx_AND (mode, XEXP (src, 0),
8006
                             gen_int_mode (and_mask & ze_mask, mode));
8007
          return gen_rtx_SET (VOIDmode, dest, src);
8008
        }
8009
    }
8010
 
8011
  /* The other case we handle is assignments into a constant-position
8012
     field.  They look like (ior/xor (and DEST C1) OTHER).  If C1 represents
8013
     a mask that has all one bits except for a group of zero bits and
8014
     OTHER is known to have zeros where C1 has ones, this is such an
8015
     assignment.  Compute the position and length from C1.  Shift OTHER
8016
     to the appropriate position, force it to the required mode, and
8017
     make the extraction.  Check for the AND in both operands.  */
8018
 
8019
  if (GET_CODE (src) != IOR && GET_CODE (src) != XOR)
8020
    return x;
8021
 
8022
  rhs = expand_compound_operation (XEXP (src, 0));
8023
  lhs = expand_compound_operation (XEXP (src, 1));
8024
 
8025
  if (GET_CODE (rhs) == AND
8026
      && GET_CODE (XEXP (rhs, 1)) == CONST_INT
8027
      && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest))
8028
    c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
8029
  else if (GET_CODE (lhs) == AND
8030
           && GET_CODE (XEXP (lhs, 1)) == CONST_INT
8031
           && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest))
8032
    c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
8033
  else
8034
    return x;
8035
 
8036
  pos = get_pos_from_mask ((~c1) & GET_MODE_MASK (GET_MODE (dest)), &len);
8037
  if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
8038
      || GET_MODE_BITSIZE (GET_MODE (dest)) > HOST_BITS_PER_WIDE_INT
8039
      || (c1 & nonzero_bits (other, GET_MODE (dest))) != 0)
8040
    return x;
8041
 
8042
  assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
8043
  if (assign == 0)
8044
    return x;
8045
 
8046
  /* The mode to use for the source is the mode of the assignment, or of
8047
     what is inside a possible STRICT_LOW_PART.  */
8048
  mode = (GET_CODE (assign) == STRICT_LOW_PART
8049
          ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
8050
 
8051
  /* Shift OTHER right POS places and make it the source, restricting it
8052
     to the proper length and mode.  */
8053
 
8054
  src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
8055
                                             GET_MODE (src), other, pos),
8056
                       mode,
8057
                       GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
8058
                       ? ~(unsigned HOST_WIDE_INT) 0
8059
                       : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
8060
                       dest, 0);
8061
 
8062
  /* If SRC is masked by an AND that does not make a difference in
8063
     the value being stored, strip it.  */
8064
  if (GET_CODE (assign) == ZERO_EXTRACT
8065
      && GET_CODE (XEXP (assign, 1)) == CONST_INT
8066
      && INTVAL (XEXP (assign, 1)) < HOST_BITS_PER_WIDE_INT
8067
      && GET_CODE (src) == AND
8068
      && GET_CODE (XEXP (src, 1)) == CONST_INT
8069
      && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (src, 1))
8070
          == ((unsigned HOST_WIDE_INT) 1 << INTVAL (XEXP (assign, 1))) - 1))
8071
    src = XEXP (src, 0);
8072
 
8073
  return gen_rtx_SET (VOIDmode, assign, src);
8074
}
8075
 
8076
/* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
8077
   if so.  */
8078
 
8079
static rtx
8080
apply_distributive_law (rtx x)
8081
{
8082
  enum rtx_code code = GET_CODE (x);
8083
  enum rtx_code inner_code;
8084
  rtx lhs, rhs, other;
8085
  rtx tem;
8086
 
8087
  /* Distributivity is not true for floating point as it can change the
8088
     value.  So we don't do it unless -funsafe-math-optimizations.  */
8089
  if (FLOAT_MODE_P (GET_MODE (x))
8090
      && ! flag_unsafe_math_optimizations)
8091
    return x;
8092
 
8093
  /* The outer operation can only be one of the following:  */
8094
  if (code != IOR && code != AND && code != XOR
8095
      && code != PLUS && code != MINUS)
8096
    return x;
8097
 
8098
  lhs = XEXP (x, 0);
8099
  rhs = XEXP (x, 1);
8100
 
8101
  /* If either operand is a primitive we can't do anything, so get out
8102
     fast.  */
8103
  if (OBJECT_P (lhs) || OBJECT_P (rhs))
8104
    return x;
8105
 
8106
  lhs = expand_compound_operation (lhs);
8107
  rhs = expand_compound_operation (rhs);
8108
  inner_code = GET_CODE (lhs);
8109
  if (inner_code != GET_CODE (rhs))
8110
    return x;
8111
 
8112
  /* See if the inner and outer operations distribute.  */
8113
  switch (inner_code)
8114
    {
8115
    case LSHIFTRT:
8116
    case ASHIFTRT:
8117
    case AND:
8118
    case IOR:
8119
      /* These all distribute except over PLUS.  */
8120
      if (code == PLUS || code == MINUS)
8121
        return x;
8122
      break;
8123
 
8124
    case MULT:
8125
      if (code != PLUS && code != MINUS)
8126
        return x;
8127
      break;
8128
 
8129
    case ASHIFT:
8130
      /* This is also a multiply, so it distributes over everything.  */
8131
      break;
8132
 
8133
    case SUBREG:
8134
      /* Non-paradoxical SUBREGs distributes over all operations,
8135
         provided the inner modes and byte offsets are the same, this
8136
         is an extraction of a low-order part, we don't convert an fp
8137
         operation to int or vice versa, this is not a vector mode,
8138
         and we would not be converting a single-word operation into a
8139
         multi-word operation.  The latter test is not required, but
8140
         it prevents generating unneeded multi-word operations.  Some
8141
         of the previous tests are redundant given the latter test,
8142
         but are retained because they are required for correctness.
8143
 
8144
         We produce the result slightly differently in this case.  */
8145
 
8146
      if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
8147
          || SUBREG_BYTE (lhs) != SUBREG_BYTE (rhs)
8148
          || ! subreg_lowpart_p (lhs)
8149
          || (GET_MODE_CLASS (GET_MODE (lhs))
8150
              != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
8151
          || (GET_MODE_SIZE (GET_MODE (lhs))
8152
              > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
8153
          || VECTOR_MODE_P (GET_MODE (lhs))
8154
          || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
8155
        return x;
8156
 
8157
      tem = simplify_gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
8158
                                 SUBREG_REG (lhs), SUBREG_REG (rhs));
8159
      return gen_lowpart (GET_MODE (x), tem);
8160
 
8161
    default:
8162
      return x;
8163
    }
8164
 
8165
  /* Set LHS and RHS to the inner operands (A and B in the example
8166
     above) and set OTHER to the common operand (C in the example).
8167
     There is only one way to do this unless the inner operation is
8168
     commutative.  */
8169
  if (COMMUTATIVE_ARITH_P (lhs)
8170
      && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
8171
    other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
8172
  else if (COMMUTATIVE_ARITH_P (lhs)
8173
           && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
8174
    other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
8175
  else if (COMMUTATIVE_ARITH_P (lhs)
8176
           && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
8177
    other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
8178
  else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
8179
    other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
8180
  else
8181
    return x;
8182
 
8183
  /* Form the new inner operation, seeing if it simplifies first.  */
8184
  tem = simplify_gen_binary (code, GET_MODE (x), lhs, rhs);
8185
 
8186
  /* There is one exception to the general way of distributing:
8187
     (a | c) ^ (b | c) -> (a ^ b) & ~c  */
8188
  if (code == XOR && inner_code == IOR)
8189
    {
8190
      inner_code = AND;
8191
      other = simplify_gen_unary (NOT, GET_MODE (x), other, GET_MODE (x));
8192
    }
8193
 
8194
  /* We may be able to continuing distributing the result, so call
8195
     ourselves recursively on the inner operation before forming the
8196
     outer operation, which we return.  */
8197
  return simplify_gen_binary (inner_code, GET_MODE (x),
8198
                              apply_distributive_law (tem), other);
8199
}
8200
 
8201
/* See if X is of the form (* (+ A B) C), and if so convert to
8202
   (+ (* A C) (* B C)) and try to simplify.
8203
 
8204
   Most of the time, this results in no change.  However, if some of
8205
   the operands are the same or inverses of each other, simplifications
8206
   will result.
8207
 
8208
   For example, (and (ior A B) (not B)) can occur as the result of
8209
   expanding a bit field assignment.  When we apply the distributive
8210
   law to this, we get (ior (and (A (not B))) (and (B (not B)))),
8211
   which then simplifies to (and (A (not B))).
8212
 
8213
   Note that no checks happen on the validity of applying the inverse
8214
   distributive law.  This is pointless since we can do it in the
8215
   few places where this routine is called.
8216
 
8217
   N is the index of the term that is decomposed (the arithmetic operation,
8218
   i.e. (+ A B) in the first example above).  !N is the index of the term that
8219
   is distributed, i.e. of C in the first example above.  */
8220
static rtx
8221
distribute_and_simplify_rtx (rtx x, int n)
8222
{
8223
  enum machine_mode mode;
8224
  enum rtx_code outer_code, inner_code;
8225
  rtx decomposed, distributed, inner_op0, inner_op1, new_op0, new_op1, tmp;
8226
 
8227
  decomposed = XEXP (x, n);
8228
  if (!ARITHMETIC_P (decomposed))
8229
    return NULL_RTX;
8230
 
8231
  mode = GET_MODE (x);
8232
  outer_code = GET_CODE (x);
8233
  distributed = XEXP (x, !n);
8234
 
8235
  inner_code = GET_CODE (decomposed);
8236
  inner_op0 = XEXP (decomposed, 0);
8237
  inner_op1 = XEXP (decomposed, 1);
8238
 
8239
  /* Special case (and (xor B C) (not A)), which is equivalent to
8240
     (xor (ior A B) (ior A C))  */
8241
  if (outer_code == AND && inner_code == XOR && GET_CODE (distributed) == NOT)
8242
    {
8243
      distributed = XEXP (distributed, 0);
8244
      outer_code = IOR;
8245
    }
8246
 
8247
  if (n == 0)
8248
    {
8249
      /* Distribute the second term.  */
8250
      new_op0 = simplify_gen_binary (outer_code, mode, inner_op0, distributed);
8251
      new_op1 = simplify_gen_binary (outer_code, mode, inner_op1, distributed);
8252
    }
8253
  else
8254
    {
8255
      /* Distribute the first term.  */
8256
      new_op0 = simplify_gen_binary (outer_code, mode, distributed, inner_op0);
8257
      new_op1 = simplify_gen_binary (outer_code, mode, distributed, inner_op1);
8258
    }
8259
 
8260
  tmp = apply_distributive_law (simplify_gen_binary (inner_code, mode,
8261
                                                     new_op0, new_op1));
8262
  if (GET_CODE (tmp) != outer_code
8263
      && rtx_cost (tmp, SET) < rtx_cost (x, SET))
8264
    return tmp;
8265
 
8266
  return NULL_RTX;
8267
}
8268
 
8269
/* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
8270
   in MODE.
8271
 
8272
   Return an equivalent form, if different from X.  Otherwise, return X.  If
8273
   X is zero, we are to always construct the equivalent form.  */
8274
 
8275
static rtx
8276
simplify_and_const_int (rtx x, enum machine_mode mode, rtx varop,
8277
                        unsigned HOST_WIDE_INT constop)
8278
{
8279
  unsigned HOST_WIDE_INT nonzero;
8280
  int i;
8281
 
8282
  /* Simplify VAROP knowing that we will be only looking at some of the
8283
     bits in it.
8284
 
8285
     Note by passing in CONSTOP, we guarantee that the bits not set in
8286
     CONSTOP are not significant and will never be examined.  We must
8287
     ensure that is the case by explicitly masking out those bits
8288
     before returning.  */
8289
  varop = force_to_mode (varop, mode, constop, NULL_RTX, 0);
8290
 
8291
  /* If VAROP is a CLOBBER, we will fail so return it.  */
8292
  if (GET_CODE (varop) == CLOBBER)
8293
    return varop;
8294
 
8295
  /* If VAROP is a CONST_INT, then we need to apply the mask in CONSTOP
8296
     to VAROP and return the new constant.  */
8297
  if (GET_CODE (varop) == CONST_INT)
8298
    return gen_int_mode (INTVAL (varop) & constop, mode);
8299
 
8300
  /* See what bits may be nonzero in VAROP.  Unlike the general case of
8301
     a call to nonzero_bits, here we don't care about bits outside
8302
     MODE.  */
8303
 
8304
  nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
8305
 
8306
  /* Turn off all bits in the constant that are known to already be zero.
8307
     Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
8308
     which is tested below.  */
8309
 
8310
  constop &= nonzero;
8311
 
8312
  /* If we don't have any bits left, return zero.  */
8313
  if (constop == 0)
8314
    return const0_rtx;
8315
 
8316
  /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
8317
     a power of two, we can replace this with an ASHIFT.  */
8318
  if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
8319
      && (i = exact_log2 (constop)) >= 0)
8320
    return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
8321
 
8322
  /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
8323
     or XOR, then try to apply the distributive law.  This may eliminate
8324
     operations if either branch can be simplified because of the AND.
8325
     It may also make some cases more complex, but those cases probably
8326
     won't match a pattern either with or without this.  */
8327
 
8328
  if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
8329
    return
8330
      gen_lowpart
8331
        (mode,
8332
         apply_distributive_law
8333
         (simplify_gen_binary (GET_CODE (varop), GET_MODE (varop),
8334
                               simplify_and_const_int (NULL_RTX,
8335
                                                       GET_MODE (varop),
8336
                                                       XEXP (varop, 0),
8337
                                                       constop),
8338
                               simplify_and_const_int (NULL_RTX,
8339
                                                       GET_MODE (varop),
8340
                                                       XEXP (varop, 1),
8341
                                                       constop))));
8342
 
8343
  /* If VAROP is PLUS, and the constant is a mask of low bite, distribute
8344
     the AND and see if one of the operands simplifies to zero.  If so, we
8345
     may eliminate it.  */
8346
 
8347
  if (GET_CODE (varop) == PLUS
8348
      && exact_log2 (constop + 1) >= 0)
8349
    {
8350
      rtx o0, o1;
8351
 
8352
      o0 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 0), constop);
8353
      o1 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 1), constop);
8354
      if (o0 == const0_rtx)
8355
        return o1;
8356
      if (o1 == const0_rtx)
8357
        return o0;
8358
    }
8359
 
8360
  /* Get VAROP in MODE.  Try to get a SUBREG if not.  Don't make a new SUBREG
8361
     if we already had one (just check for the simplest cases).  */
8362
  if (x && GET_CODE (XEXP (x, 0)) == SUBREG
8363
      && GET_MODE (XEXP (x, 0)) == mode
8364
      && SUBREG_REG (XEXP (x, 0)) == varop)
8365
    varop = XEXP (x, 0);
8366
  else
8367
    varop = gen_lowpart (mode, varop);
8368
 
8369
  /* If we can't make the SUBREG, try to return what we were given.  */
8370
  if (GET_CODE (varop) == CLOBBER)
8371
    return x ? x : varop;
8372
 
8373
  /* If we are only masking insignificant bits, return VAROP.  */
8374
  if (constop == nonzero)
8375
    x = varop;
8376
  else
8377
    {
8378
      /* Otherwise, return an AND.  */
8379
      constop = trunc_int_for_mode (constop, mode);
8380
      /* See how much, if any, of X we can use.  */
8381
      if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
8382
        x = simplify_gen_binary (AND, mode, varop, GEN_INT (constop));
8383
 
8384
      else
8385
        {
8386
          if (GET_CODE (XEXP (x, 1)) != CONST_INT
8387
              || (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) != constop)
8388
            SUBST (XEXP (x, 1), GEN_INT (constop));
8389
 
8390
          SUBST (XEXP (x, 0), varop);
8391
        }
8392
    }
8393
 
8394
  return x;
8395
}
8396
 
8397
/* Given a REG, X, compute which bits in X can be nonzero.
8398
   We don't care about bits outside of those defined in MODE.
8399
 
8400
   For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
8401
   a shift, AND, or zero_extract, we can do better.  */
8402
 
8403
static rtx
8404
reg_nonzero_bits_for_combine (rtx x, enum machine_mode mode,
8405
                              rtx known_x ATTRIBUTE_UNUSED,
8406
                              enum machine_mode known_mode ATTRIBUTE_UNUSED,
8407
                              unsigned HOST_WIDE_INT known_ret ATTRIBUTE_UNUSED,
8408
                              unsigned HOST_WIDE_INT *nonzero)
8409
{
8410
  rtx tem;
8411
 
8412
  /* If X is a register whose nonzero bits value is current, use it.
8413
     Otherwise, if X is a register whose value we can find, use that
8414
     value.  Otherwise, use the previously-computed global nonzero bits
8415
     for this register.  */
8416
 
8417
  if (reg_stat[REGNO (x)].last_set_value != 0
8418
      && (reg_stat[REGNO (x)].last_set_mode == mode
8419
          || (GET_MODE_CLASS (reg_stat[REGNO (x)].last_set_mode) == MODE_INT
8420
              && GET_MODE_CLASS (mode) == MODE_INT))
8421
      && (reg_stat[REGNO (x)].last_set_label == label_tick
8422
          || (REGNO (x) >= FIRST_PSEUDO_REGISTER
8423
              && REG_N_SETS (REGNO (x)) == 1
8424
              && ! REGNO_REG_SET_P
8425
                 (ENTRY_BLOCK_PTR->next_bb->il.rtl->global_live_at_start,
8426
                  REGNO (x))))
8427
      && INSN_CUID (reg_stat[REGNO (x)].last_set) < subst_low_cuid)
8428
    {
8429
      *nonzero &= reg_stat[REGNO (x)].last_set_nonzero_bits;
8430
      return NULL;
8431
    }
8432
 
8433
  tem = get_last_value (x);
8434
 
8435
  if (tem)
8436
    {
8437
#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
8438
      /* If X is narrower than MODE and TEM is a non-negative
8439
         constant that would appear negative in the mode of X,
8440
         sign-extend it for use in reg_nonzero_bits because some
8441
         machines (maybe most) will actually do the sign-extension
8442
         and this is the conservative approach.
8443
 
8444
         ??? For 2.5, try to tighten up the MD files in this regard
8445
         instead of this kludge.  */
8446
 
8447
      if (GET_MODE_BITSIZE (GET_MODE (x)) < GET_MODE_BITSIZE (mode)
8448
          && GET_CODE (tem) == CONST_INT
8449
          && INTVAL (tem) > 0
8450
          && 0 != (INTVAL (tem)
8451
                   & ((HOST_WIDE_INT) 1
8452
                      << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
8453
        tem = GEN_INT (INTVAL (tem)
8454
                       | ((HOST_WIDE_INT) (-1)
8455
                          << GET_MODE_BITSIZE (GET_MODE (x))));
8456
#endif
8457
      return tem;
8458
    }
8459
  else if (nonzero_sign_valid && reg_stat[REGNO (x)].nonzero_bits)
8460
    {
8461
      unsigned HOST_WIDE_INT mask = reg_stat[REGNO (x)].nonzero_bits;
8462
 
8463
      if (GET_MODE_BITSIZE (GET_MODE (x)) < GET_MODE_BITSIZE (mode))
8464
        /* We don't know anything about the upper bits.  */
8465
        mask |= GET_MODE_MASK (mode) ^ GET_MODE_MASK (GET_MODE (x));
8466
      *nonzero &= mask;
8467
    }
8468
 
8469
  return NULL;
8470
}
8471
 
8472
/* Return the number of bits at the high-order end of X that are known to
8473
   be equal to the sign bit.  X will be used in mode MODE; if MODE is
8474
   VOIDmode, X will be used in its own mode.  The returned value  will always
8475
   be between 1 and the number of bits in MODE.  */
8476
 
8477
static rtx
8478
reg_num_sign_bit_copies_for_combine (rtx x, enum machine_mode mode,
8479
                                     rtx known_x ATTRIBUTE_UNUSED,
8480
                                     enum machine_mode known_mode
8481
                                     ATTRIBUTE_UNUSED,
8482
                                     unsigned int known_ret ATTRIBUTE_UNUSED,
8483
                                     unsigned int *result)
8484
{
8485
  rtx tem;
8486
 
8487
  if (reg_stat[REGNO (x)].last_set_value != 0
8488
      && reg_stat[REGNO (x)].last_set_mode == mode
8489
      && (reg_stat[REGNO (x)].last_set_label == label_tick
8490
          || (REGNO (x) >= FIRST_PSEUDO_REGISTER
8491
              && REG_N_SETS (REGNO (x)) == 1
8492
              && ! REGNO_REG_SET_P
8493
                 (ENTRY_BLOCK_PTR->next_bb->il.rtl->global_live_at_start,
8494
                  REGNO (x))))
8495
      && INSN_CUID (reg_stat[REGNO (x)].last_set) < subst_low_cuid)
8496
    {
8497
      *result = reg_stat[REGNO (x)].last_set_sign_bit_copies;
8498
      return NULL;
8499
    }
8500
 
8501
  tem = get_last_value (x);
8502
  if (tem != 0)
8503
    return tem;
8504
 
8505
  if (nonzero_sign_valid && reg_stat[REGNO (x)].sign_bit_copies != 0
8506
      && GET_MODE_BITSIZE (GET_MODE (x)) == GET_MODE_BITSIZE (mode))
8507
    *result = reg_stat[REGNO (x)].sign_bit_copies;
8508
 
8509
  return NULL;
8510
}
8511
 
8512
/* Return the number of "extended" bits there are in X, when interpreted
8513
   as a quantity in MODE whose signedness is indicated by UNSIGNEDP.  For
8514
   unsigned quantities, this is the number of high-order zero bits.
8515
   For signed quantities, this is the number of copies of the sign bit
8516
   minus 1.  In both case, this function returns the number of "spare"
8517
   bits.  For example, if two quantities for which this function returns
8518
   at least 1 are added, the addition is known not to overflow.
8519
 
8520
   This function will always return 0 unless called during combine, which
8521
   implies that it must be called from a define_split.  */
8522
 
8523
unsigned int
8524
extended_count (rtx x, enum machine_mode mode, int unsignedp)
8525
{
8526
  if (nonzero_sign_valid == 0)
8527
    return 0;
8528
 
8529
  return (unsignedp
8530
          ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8531
             ? (unsigned int) (GET_MODE_BITSIZE (mode) - 1
8532
                               - floor_log2 (nonzero_bits (x, mode)))
8533
             : 0)
8534
          : num_sign_bit_copies (x, mode) - 1);
8535
}
8536
 
8537
/* This function is called from `simplify_shift_const' to merge two
8538
   outer operations.  Specifically, we have already found that we need
8539
   to perform operation *POP0 with constant *PCONST0 at the outermost
8540
   position.  We would now like to also perform OP1 with constant CONST1
8541
   (with *POP0 being done last).
8542
 
8543
   Return 1 if we can do the operation and update *POP0 and *PCONST0 with
8544
   the resulting operation.  *PCOMP_P is set to 1 if we would need to
8545
   complement the innermost operand, otherwise it is unchanged.
8546
 
8547
   MODE is the mode in which the operation will be done.  No bits outside
8548
   the width of this mode matter.  It is assumed that the width of this mode
8549
   is smaller than or equal to HOST_BITS_PER_WIDE_INT.
8550
 
8551
   If *POP0 or OP1 are UNKNOWN, it means no operation is required.  Only NEG, PLUS,
8552
   IOR, XOR, and AND are supported.  We may set *POP0 to SET if the proper
8553
   result is simply *PCONST0.
8554
 
8555
   If the resulting operation cannot be expressed as one operation, we
8556
   return 0 and do not change *POP0, *PCONST0, and *PCOMP_P.  */
8557
 
8558
static int
8559
merge_outer_ops (enum rtx_code *pop0, HOST_WIDE_INT *pconst0, enum rtx_code op1, HOST_WIDE_INT const1, enum machine_mode mode, int *pcomp_p)
8560
{
8561
  enum rtx_code op0 = *pop0;
8562
  HOST_WIDE_INT const0 = *pconst0;
8563
 
8564
  const0 &= GET_MODE_MASK (mode);
8565
  const1 &= GET_MODE_MASK (mode);
8566
 
8567
  /* If OP0 is an AND, clear unimportant bits in CONST1.  */
8568
  if (op0 == AND)
8569
    const1 &= const0;
8570
 
8571
  /* If OP0 or OP1 is UNKNOWN, this is easy.  Similarly if they are the same or
8572
     if OP0 is SET.  */
8573
 
8574
  if (op1 == UNKNOWN || op0 == SET)
8575
    return 1;
8576
 
8577
  else if (op0 == UNKNOWN)
8578
    op0 = op1, const0 = const1;
8579
 
8580
  else if (op0 == op1)
8581
    {
8582
      switch (op0)
8583
        {
8584
        case AND:
8585
          const0 &= const1;
8586
          break;
8587
        case IOR:
8588
          const0 |= const1;
8589
          break;
8590
        case XOR:
8591
          const0 ^= const1;
8592
          break;
8593
        case PLUS:
8594
          const0 += const1;
8595
          break;
8596
        case NEG:
8597
          op0 = UNKNOWN;
8598
          break;
8599
        default:
8600
          break;
8601
        }
8602
    }
8603
 
8604
  /* Otherwise, if either is a PLUS or NEG, we can't do anything.  */
8605
  else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
8606
    return 0;
8607
 
8608
  /* If the two constants aren't the same, we can't do anything.  The
8609
     remaining six cases can all be done.  */
8610
  else if (const0 != const1)
8611
    return 0;
8612
 
8613
  else
8614
    switch (op0)
8615
      {
8616
      case IOR:
8617
        if (op1 == AND)
8618
          /* (a & b) | b == b */
8619
          op0 = SET;
8620
        else /* op1 == XOR */
8621
          /* (a ^ b) | b == a | b */
8622
          {;}
8623
        break;
8624
 
8625
      case XOR:
8626
        if (op1 == AND)
8627
          /* (a & b) ^ b == (~a) & b */
8628
          op0 = AND, *pcomp_p = 1;
8629
        else /* op1 == IOR */
8630
          /* (a | b) ^ b == a & ~b */
8631
          op0 = AND, const0 = ~const0;
8632
        break;
8633
 
8634
      case AND:
8635
        if (op1 == IOR)
8636
          /* (a | b) & b == b */
8637
        op0 = SET;
8638
        else /* op1 == XOR */
8639
          /* (a ^ b) & b) == (~a) & b */
8640
          *pcomp_p = 1;
8641
        break;
8642
      default:
8643
        break;
8644
      }
8645
 
8646
  /* Check for NO-OP cases.  */
8647
  const0 &= GET_MODE_MASK (mode);
8648
  if (const0 == 0
8649
      && (op0 == IOR || op0 == XOR || op0 == PLUS))
8650
    op0 = UNKNOWN;
8651
  else if (const0 == 0 && op0 == AND)
8652
    op0 = SET;
8653
  else if ((unsigned HOST_WIDE_INT) const0 == GET_MODE_MASK (mode)
8654
           && op0 == AND)
8655
    op0 = UNKNOWN;
8656
 
8657
  /* ??? Slightly redundant with the above mask, but not entirely.
8658
     Moving this above means we'd have to sign-extend the mode mask
8659
     for the final test.  */
8660
  const0 = trunc_int_for_mode (const0, mode);
8661
 
8662
  *pop0 = op0;
8663
  *pconst0 = const0;
8664
 
8665
  return 1;
8666
}
8667
 
8668
/* Simplify a shift of VAROP by COUNT bits.  CODE says what kind of shift.
8669
   The result of the shift is RESULT_MODE.  X, if nonzero, is an expression
8670
   that we started with.
8671
 
8672
   The shift is normally computed in the widest mode we find in VAROP, as
8673
   long as it isn't a different number of words than RESULT_MODE.  Exceptions
8674
   are ASHIFTRT and ROTATE, which are always done in their original mode,  */
8675
 
8676
static rtx
8677
simplify_shift_const (rtx x, enum rtx_code code,
8678
                      enum machine_mode result_mode, rtx varop,
8679
                      int orig_count)
8680
{
8681
  enum rtx_code orig_code = code;
8682
  unsigned int count;
8683
  int signed_count;
8684
  enum machine_mode mode = result_mode;
8685
  enum machine_mode shift_mode, tmode;
8686
  unsigned int mode_words
8687
    = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
8688
  /* We form (outer_op (code varop count) (outer_const)).  */
8689
  enum rtx_code outer_op = UNKNOWN;
8690
  HOST_WIDE_INT outer_const = 0;
8691
  rtx const_rtx;
8692
  int complement_p = 0;
8693
  rtx new;
8694
 
8695
  /* Make sure and truncate the "natural" shift on the way in.  We don't
8696
     want to do this inside the loop as it makes it more difficult to
8697
     combine shifts.  */
8698
  if (SHIFT_COUNT_TRUNCATED)
8699
    orig_count &= GET_MODE_BITSIZE (mode) - 1;
8700
 
8701
  /* If we were given an invalid count, don't do anything except exactly
8702
     what was requested.  */
8703
 
8704
  if (orig_count < 0 || orig_count >= (int) GET_MODE_BITSIZE (mode))
8705
    {
8706
      if (x)
8707
        return x;
8708
 
8709
      return gen_rtx_fmt_ee (code, mode, varop, GEN_INT (orig_count));
8710
    }
8711
 
8712
  count = orig_count;
8713
 
8714
  /* Unless one of the branches of the `if' in this loop does a `continue',
8715
     we will `break' the loop after the `if'.  */
8716
 
8717
  while (count != 0)
8718
    {
8719
      /* If we have an operand of (clobber (const_int 0)), just return that
8720
         value.  */
8721
      if (GET_CODE (varop) == CLOBBER)
8722
        return varop;
8723
 
8724
      /* If we discovered we had to complement VAROP, leave.  Making a NOT
8725
         here would cause an infinite loop.  */
8726
      if (complement_p)
8727
        break;
8728
 
8729
      /* Convert ROTATERT to ROTATE.  */
8730
      if (code == ROTATERT)
8731
        {
8732
          unsigned int bitsize = GET_MODE_BITSIZE (result_mode);;
8733
          code = ROTATE;
8734
          if (VECTOR_MODE_P (result_mode))
8735
            count = bitsize / GET_MODE_NUNITS (result_mode) - count;
8736
          else
8737
            count = bitsize - count;
8738
        }
8739
 
8740
      /* We need to determine what mode we will do the shift in.  If the
8741
         shift is a right shift or a ROTATE, we must always do it in the mode
8742
         it was originally done in.  Otherwise, we can do it in MODE, the
8743
         widest mode encountered.  */
8744
      shift_mode
8745
        = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
8746
           ? result_mode : mode);
8747
 
8748
      /* Handle cases where the count is greater than the size of the mode
8749
         minus 1.  For ASHIFT, use the size minus one as the count (this can
8750
         occur when simplifying (lshiftrt (ashiftrt ..))).  For rotates,
8751
         take the count modulo the size.  For other shifts, the result is
8752
         zero.
8753
 
8754
         Since these shifts are being produced by the compiler by combining
8755
         multiple operations, each of which are defined, we know what the
8756
         result is supposed to be.  */
8757
 
8758
      if (count > (unsigned int) (GET_MODE_BITSIZE (shift_mode) - 1))
8759
        {
8760
          if (code == ASHIFTRT)
8761
            count = GET_MODE_BITSIZE (shift_mode) - 1;
8762
          else if (code == ROTATE || code == ROTATERT)
8763
            count %= GET_MODE_BITSIZE (shift_mode);
8764
          else
8765
            {
8766
              /* We can't simply return zero because there may be an
8767
                 outer op.  */
8768
              varop = const0_rtx;
8769
              count = 0;
8770
              break;
8771
            }
8772
        }
8773
 
8774
      /* An arithmetic right shift of a quantity known to be -1 or 0
8775
         is a no-op.  */
8776
      if (code == ASHIFTRT
8777
          && (num_sign_bit_copies (varop, shift_mode)
8778
              == GET_MODE_BITSIZE (shift_mode)))
8779
        {
8780
          count = 0;
8781
          break;
8782
        }
8783
 
8784
      /* If we are doing an arithmetic right shift and discarding all but
8785
         the sign bit copies, this is equivalent to doing a shift by the
8786
         bitsize minus one.  Convert it into that shift because it will often
8787
         allow other simplifications.  */
8788
 
8789
      if (code == ASHIFTRT
8790
          && (count + num_sign_bit_copies (varop, shift_mode)
8791
              >= GET_MODE_BITSIZE (shift_mode)))
8792
        count = GET_MODE_BITSIZE (shift_mode) - 1;
8793
 
8794
      /* We simplify the tests below and elsewhere by converting
8795
         ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
8796
         `make_compound_operation' will convert it to an ASHIFTRT for
8797
         those machines (such as VAX) that don't have an LSHIFTRT.  */
8798
      if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
8799
          && code == ASHIFTRT
8800
          && ((nonzero_bits (varop, shift_mode)
8801
               & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
8802
              == 0))
8803
        code = LSHIFTRT;
8804
 
8805
      if (code == LSHIFTRT
8806
          && GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
8807
          && !(nonzero_bits (varop, shift_mode) >> count))
8808
        varop = const0_rtx;
8809
      if (code == ASHIFT
8810
          && GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
8811
          && !((nonzero_bits (varop, shift_mode) << count)
8812
               & GET_MODE_MASK (shift_mode)))
8813
        varop = const0_rtx;
8814
 
8815
      switch (GET_CODE (varop))
8816
        {
8817
        case SIGN_EXTEND:
8818
        case ZERO_EXTEND:
8819
        case SIGN_EXTRACT:
8820
        case ZERO_EXTRACT:
8821
          new = expand_compound_operation (varop);
8822
          if (new != varop)
8823
            {
8824
              varop = new;
8825
              continue;
8826
            }
8827
          break;
8828
 
8829
        case MEM:
8830
          /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
8831
             minus the width of a smaller mode, we can do this with a
8832
             SIGN_EXTEND or ZERO_EXTEND from the narrower memory location.  */
8833
          if ((code == ASHIFTRT || code == LSHIFTRT)
8834
              && ! mode_dependent_address_p (XEXP (varop, 0))
8835
              && ! MEM_VOLATILE_P (varop)
8836
              && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
8837
                                         MODE_INT, 1)) != BLKmode)
8838
            {
8839
              new = adjust_address_nv (varop, tmode,
8840
                                       BYTES_BIG_ENDIAN ? 0
8841
                                       : count / BITS_PER_UNIT);
8842
 
8843
              varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND
8844
                                     : ZERO_EXTEND, mode, new);
8845
              count = 0;
8846
              continue;
8847
            }
8848
          break;
8849
 
8850
        case USE:
8851
          /* Similar to the case above, except that we can only do this if
8852
             the resulting mode is the same as that of the underlying
8853
             MEM and adjust the address depending on the *bits* endianness
8854
             because of the way that bit-field extract insns are defined.  */
8855
          if ((code == ASHIFTRT || code == LSHIFTRT)
8856
              && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
8857
                                         MODE_INT, 1)) != BLKmode
8858
              && tmode == GET_MODE (XEXP (varop, 0)))
8859
            {
8860
              if (BITS_BIG_ENDIAN)
8861
                new = XEXP (varop, 0);
8862
              else
8863
                {
8864
                  new = copy_rtx (XEXP (varop, 0));
8865
                  SUBST (XEXP (new, 0),
8866
                         plus_constant (XEXP (new, 0),
8867
                                        count / BITS_PER_UNIT));
8868
                }
8869
 
8870
              varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND
8871
                                     : ZERO_EXTEND, mode, new);
8872
              count = 0;
8873
              continue;
8874
            }
8875
          break;
8876
 
8877
        case SUBREG:
8878
          /* If VAROP is a SUBREG, strip it as long as the inner operand has
8879
             the same number of words as what we've seen so far.  Then store
8880
             the widest mode in MODE.  */
8881
          if (subreg_lowpart_p (varop)
8882
              && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
8883
                  > GET_MODE_SIZE (GET_MODE (varop)))
8884
              && (unsigned int) ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
8885
                                  + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
8886
                 == mode_words)
8887
            {
8888
              varop = SUBREG_REG (varop);
8889
              if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
8890
                mode = GET_MODE (varop);
8891
              continue;
8892
            }
8893
          break;
8894
 
8895
        case MULT:
8896
          /* Some machines use MULT instead of ASHIFT because MULT
8897
             is cheaper.  But it is still better on those machines to
8898
             merge two shifts into one.  */
8899
          if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8900
              && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
8901
            {
8902
              varop
8903
                = simplify_gen_binary (ASHIFT, GET_MODE (varop),
8904
                                       XEXP (varop, 0),
8905
                                       GEN_INT (exact_log2 (
8906
                                                INTVAL (XEXP (varop, 1)))));
8907
              continue;
8908
            }
8909
          break;
8910
 
8911
        case UDIV:
8912
          /* Similar, for when divides are cheaper.  */
8913
          if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8914
              && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
8915
            {
8916
              varop
8917
                = simplify_gen_binary (LSHIFTRT, GET_MODE (varop),
8918
                                       XEXP (varop, 0),
8919
                                       GEN_INT (exact_log2 (
8920
                                                INTVAL (XEXP (varop, 1)))));
8921
              continue;
8922
            }
8923
          break;
8924
 
8925
        case ASHIFTRT:
8926
          /* If we are extracting just the sign bit of an arithmetic
8927
             right shift, that shift is not needed.  However, the sign
8928
             bit of a wider mode may be different from what would be
8929
             interpreted as the sign bit in a narrower mode, so, if
8930
             the result is narrower, don't discard the shift.  */
8931
          if (code == LSHIFTRT
8932
              && count == (unsigned int) (GET_MODE_BITSIZE (result_mode) - 1)
8933
              && (GET_MODE_BITSIZE (result_mode)
8934
                  >= GET_MODE_BITSIZE (GET_MODE (varop))))
8935
            {
8936
              varop = XEXP (varop, 0);
8937
              continue;
8938
            }
8939
 
8940
          /* ... fall through ...  */
8941
 
8942
        case LSHIFTRT:
8943
        case ASHIFT:
8944
        case ROTATE:
8945
          /* Here we have two nested shifts.  The result is usually the
8946
             AND of a new shift with a mask.  We compute the result below.  */
8947
          if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8948
              && INTVAL (XEXP (varop, 1)) >= 0
8949
              && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
8950
              && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
8951
              && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8952
              && !VECTOR_MODE_P (result_mode))
8953
            {
8954
              enum rtx_code first_code = GET_CODE (varop);
8955
              unsigned int first_count = INTVAL (XEXP (varop, 1));
8956
              unsigned HOST_WIDE_INT mask;
8957
              rtx mask_rtx;
8958
 
8959
              /* We have one common special case.  We can't do any merging if
8960
                 the inner code is an ASHIFTRT of a smaller mode.  However, if
8961
                 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
8962
                 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
8963
                 we can convert it to
8964
                 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
8965
                 This simplifies certain SIGN_EXTEND operations.  */
8966
              if (code == ASHIFT && first_code == ASHIFTRT
8967
                  && count == (unsigned int)
8968
                              (GET_MODE_BITSIZE (result_mode)
8969
                               - GET_MODE_BITSIZE (GET_MODE (varop))))
8970
                {
8971
                  /* C3 has the low-order C1 bits zero.  */
8972
 
8973
                  mask = (GET_MODE_MASK (mode)
8974
                          & ~(((HOST_WIDE_INT) 1 << first_count) - 1));
8975
 
8976
                  varop = simplify_and_const_int (NULL_RTX, result_mode,
8977
                                                  XEXP (varop, 0), mask);
8978
                  varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
8979
                                                varop, count);
8980
                  count = first_count;
8981
                  code = ASHIFTRT;
8982
                  continue;
8983
                }
8984
 
8985
              /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
8986
                 than C1 high-order bits equal to the sign bit, we can convert
8987
                 this to either an ASHIFT or an ASHIFTRT depending on the
8988
                 two counts.
8989
 
8990
                 We cannot do this if VAROP's mode is not SHIFT_MODE.  */
8991
 
8992
              if (code == ASHIFTRT && first_code == ASHIFT
8993
                  && GET_MODE (varop) == shift_mode
8994
                  && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
8995
                      > first_count))
8996
                {
8997
                  varop = XEXP (varop, 0);
8998
 
8999
                  signed_count = count - first_count;
9000
                  if (signed_count < 0)
9001
                    count = -signed_count, code = ASHIFT;
9002
                  else
9003
                    count = signed_count;
9004
 
9005
                  continue;
9006
                }
9007
 
9008
              /* There are some cases we can't do.  If CODE is ASHIFTRT,
9009
                 we can only do this if FIRST_CODE is also ASHIFTRT.
9010
 
9011
                 We can't do the case when CODE is ROTATE and FIRST_CODE is
9012
                 ASHIFTRT.
9013
 
9014
                 If the mode of this shift is not the mode of the outer shift,
9015
                 we can't do this if either shift is a right shift or ROTATE.
9016
 
9017
                 Finally, we can't do any of these if the mode is too wide
9018
                 unless the codes are the same.
9019
 
9020
                 Handle the case where the shift codes are the same
9021
                 first.  */
9022
 
9023
              if (code == first_code)
9024
                {
9025
                  if (GET_MODE (varop) != result_mode
9026
                      && (code == ASHIFTRT || code == LSHIFTRT
9027
                          || code == ROTATE))
9028
                    break;
9029
 
9030
                  count += first_count;
9031
                  varop = XEXP (varop, 0);
9032
                  continue;
9033
                }
9034
 
9035
              if (code == ASHIFTRT
9036
                  || (code == ROTATE && first_code == ASHIFTRT)
9037
                  || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
9038
                  || (GET_MODE (varop) != result_mode
9039
                      && (first_code == ASHIFTRT || first_code == LSHIFTRT
9040
                          || first_code == ROTATE
9041
                          || code == ROTATE)))
9042
                break;
9043
 
9044
              /* To compute the mask to apply after the shift, shift the
9045
                 nonzero bits of the inner shift the same way the
9046
                 outer shift will.  */
9047
 
9048
              mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
9049
 
9050
              mask_rtx
9051
                = simplify_binary_operation (code, result_mode, mask_rtx,
9052
                                             GEN_INT (count));
9053
 
9054
              /* Give up if we can't compute an outer operation to use.  */
9055
              if (mask_rtx == 0
9056
                  || GET_CODE (mask_rtx) != CONST_INT
9057
                  || ! merge_outer_ops (&outer_op, &outer_const, AND,
9058
                                        INTVAL (mask_rtx),
9059
                                        result_mode, &complement_p))
9060
                break;
9061
 
9062
              /* If the shifts are in the same direction, we add the
9063
                 counts.  Otherwise, we subtract them.  */
9064
              signed_count = count;
9065
              if ((code == ASHIFTRT || code == LSHIFTRT)
9066
                  == (first_code == ASHIFTRT || first_code == LSHIFTRT))
9067
                signed_count += first_count;
9068
              else
9069
                signed_count -= first_count;
9070
 
9071
              /* If COUNT is positive, the new shift is usually CODE,
9072
                 except for the two exceptions below, in which case it is
9073
                 FIRST_CODE.  If the count is negative, FIRST_CODE should
9074
                 always be used  */
9075
              if (signed_count > 0
9076
                  && ((first_code == ROTATE && code == ASHIFT)
9077
                      || (first_code == ASHIFTRT && code == LSHIFTRT)))
9078
                code = first_code, count = signed_count;
9079
              else if (signed_count < 0)
9080
                code = first_code, count = -signed_count;
9081
              else
9082
                count = signed_count;
9083
 
9084
              varop = XEXP (varop, 0);
9085
              continue;
9086
            }
9087
 
9088
          /* If we have (A << B << C) for any shift, we can convert this to
9089
             (A << C << B).  This wins if A is a constant.  Only try this if
9090
             B is not a constant.  */
9091
 
9092
          else if (GET_CODE (varop) == code
9093
                   && GET_CODE (XEXP (varop, 1)) != CONST_INT
9094
                   && 0 != (new
9095
                            = simplify_binary_operation (code, mode,
9096
                                                         XEXP (varop, 0),
9097
                                                         GEN_INT (count))))
9098
            {
9099
              varop = gen_rtx_fmt_ee (code, mode, new, XEXP (varop, 1));
9100
              count = 0;
9101
              continue;
9102
            }
9103
          break;
9104
 
9105
        case NOT:
9106
          /* Make this fit the case below.  */
9107
          varop = gen_rtx_XOR (mode, XEXP (varop, 0),
9108
                               GEN_INT (GET_MODE_MASK (mode)));
9109
          continue;
9110
 
9111
        case IOR:
9112
        case AND:
9113
        case XOR:
9114
          /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
9115
             with C the size of VAROP - 1 and the shift is logical if
9116
             STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9117
             we have an (le X 0) operation.   If we have an arithmetic shift
9118
             and STORE_FLAG_VALUE is 1 or we have a logical shift with
9119
             STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation.  */
9120
 
9121
          if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
9122
              && XEXP (XEXP (varop, 0), 1) == constm1_rtx
9123
              && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9124
              && (code == LSHIFTRT || code == ASHIFTRT)
9125
              && count == (unsigned int)
9126
                          (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)
9127
              && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9128
            {
9129
              count = 0;
9130
              varop = gen_rtx_LE (GET_MODE (varop), XEXP (varop, 1),
9131
                                  const0_rtx);
9132
 
9133
              if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
9134
                varop = gen_rtx_NEG (GET_MODE (varop), varop);
9135
 
9136
              continue;
9137
            }
9138
 
9139
          /* If we have (shift (logical)), move the logical to the outside
9140
             to allow it to possibly combine with another logical and the
9141
             shift to combine with another shift.  This also canonicalizes to
9142
             what a ZERO_EXTRACT looks like.  Also, some machines have
9143
             (and (shift)) insns.  */
9144
 
9145
          if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9146
              /* We can't do this if we have (ashiftrt (xor))  and the
9147
                 constant has its sign bit set in shift_mode.  */
9148
              && !(code == ASHIFTRT && GET_CODE (varop) == XOR
9149
                   && 0 > trunc_int_for_mode (INTVAL (XEXP (varop, 1)),
9150
                                              shift_mode))
9151
              && (new = simplify_binary_operation (code, result_mode,
9152
                                                   XEXP (varop, 1),
9153
                                                   GEN_INT (count))) != 0
9154
              && GET_CODE (new) == CONST_INT
9155
              && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
9156
                                  INTVAL (new), result_mode, &complement_p))
9157
            {
9158
              varop = XEXP (varop, 0);
9159
              continue;
9160
            }
9161
 
9162
          /* If we can't do that, try to simplify the shift in each arm of the
9163
             logical expression, make a new logical expression, and apply
9164
             the inverse distributive law.  This also can't be done
9165
             for some (ashiftrt (xor)).  */
9166
          if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9167
             && !(code == ASHIFTRT && GET_CODE (varop) == XOR
9168
                  && 0 > trunc_int_for_mode (INTVAL (XEXP (varop, 1)),
9169
                                             shift_mode)))
9170
            {
9171
              rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
9172
                                              XEXP (varop, 0), count);
9173
              rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
9174
                                              XEXP (varop, 1), count);
9175
 
9176
              varop = simplify_gen_binary (GET_CODE (varop), shift_mode,
9177
                                           lhs, rhs);
9178
              varop = apply_distributive_law (varop);
9179
 
9180
              count = 0;
9181
              continue;
9182
            }
9183
          break;
9184
 
9185
        case EQ:
9186
          /* Convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
9187
             says that the sign bit can be tested, FOO has mode MODE, C is
9188
             GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
9189
             that may be nonzero.  */
9190
          if (code == LSHIFTRT
9191
              && XEXP (varop, 1) == const0_rtx
9192
              && GET_MODE (XEXP (varop, 0)) == result_mode
9193
              && count == (unsigned int) (GET_MODE_BITSIZE (result_mode) - 1)
9194
              && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9195
              && ((STORE_FLAG_VALUE
9196
                   & ((HOST_WIDE_INT) 1
9197
                      < (GET_MODE_BITSIZE (result_mode) - 1))))
9198
              && nonzero_bits (XEXP (varop, 0), result_mode) == 1
9199
              && merge_outer_ops (&outer_op, &outer_const, XOR,
9200
                                  (HOST_WIDE_INT) 1, result_mode,
9201
                                  &complement_p))
9202
            {
9203
              varop = XEXP (varop, 0);
9204
              count = 0;
9205
              continue;
9206
            }
9207
          break;
9208
 
9209
        case NEG:
9210
          /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
9211
             than the number of bits in the mode is equivalent to A.  */
9212
          if (code == LSHIFTRT
9213
              && count == (unsigned int) (GET_MODE_BITSIZE (result_mode) - 1)
9214
              && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
9215
            {
9216
              varop = XEXP (varop, 0);
9217
              count = 0;
9218
              continue;
9219
            }
9220
 
9221
          /* NEG commutes with ASHIFT since it is multiplication.  Move the
9222
             NEG outside to allow shifts to combine.  */
9223
          if (code == ASHIFT
9224
              && merge_outer_ops (&outer_op, &outer_const, NEG,
9225
                                  (HOST_WIDE_INT) 0, result_mode,
9226
                                  &complement_p))
9227
            {
9228
              varop = XEXP (varop, 0);
9229
              continue;
9230
            }
9231
          break;
9232
 
9233
        case PLUS:
9234
          /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
9235
             is one less than the number of bits in the mode is
9236
             equivalent to (xor A 1).  */
9237
          if (code == LSHIFTRT
9238
              && count == (unsigned int) (GET_MODE_BITSIZE (result_mode) - 1)
9239
              && XEXP (varop, 1) == constm1_rtx
9240
              && nonzero_bits (XEXP (varop, 0), result_mode) == 1
9241
              && merge_outer_ops (&outer_op, &outer_const, XOR,
9242
                                  (HOST_WIDE_INT) 1, result_mode,
9243
                                  &complement_p))
9244
            {
9245
              count = 0;
9246
              varop = XEXP (varop, 0);
9247
              continue;
9248
            }
9249
 
9250
          /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
9251
             that might be nonzero in BAR are those being shifted out and those
9252
             bits are known zero in FOO, we can replace the PLUS with FOO.
9253
             Similarly in the other operand order.  This code occurs when
9254
             we are computing the size of a variable-size array.  */
9255
 
9256
          if ((code == ASHIFTRT || code == LSHIFTRT)
9257
              && count < HOST_BITS_PER_WIDE_INT
9258
              && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
9259
              && (nonzero_bits (XEXP (varop, 1), result_mode)
9260
                  & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
9261
            {
9262
              varop = XEXP (varop, 0);
9263
              continue;
9264
            }
9265
          else if ((code == ASHIFTRT || code == LSHIFTRT)
9266
                   && count < HOST_BITS_PER_WIDE_INT
9267
                   && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9268
                   && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9269
                            >> count)
9270
                   && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9271
                            & nonzero_bits (XEXP (varop, 1),
9272
                                                 result_mode)))
9273
            {
9274
              varop = XEXP (varop, 1);
9275
              continue;
9276
            }
9277
 
9278
          /* (ashift (plus foo C) N) is (plus (ashift foo N) C').  */
9279
          if (code == ASHIFT
9280
              && GET_CODE (XEXP (varop, 1)) == CONST_INT
9281
              && (new = simplify_binary_operation (ASHIFT, result_mode,
9282
                                                   XEXP (varop, 1),
9283
                                                   GEN_INT (count))) != 0
9284
              && GET_CODE (new) == CONST_INT
9285
              && merge_outer_ops (&outer_op, &outer_const, PLUS,
9286
                                  INTVAL (new), result_mode, &complement_p))
9287
            {
9288
              varop = XEXP (varop, 0);
9289
              continue;
9290
            }
9291
 
9292
          /* Check for 'PLUS signbit', which is the canonical form of 'XOR
9293
             signbit', and attempt to change the PLUS to an XOR and move it to
9294
             the outer operation as is done above in the AND/IOR/XOR case
9295
             leg for shift(logical). See details in logical handling above
9296
             for reasoning in doing so.  */
9297
          if (code == LSHIFTRT
9298
              && GET_CODE (XEXP (varop, 1)) == CONST_INT
9299
              && mode_signbit_p (result_mode, XEXP (varop, 1))
9300
              && (new = simplify_binary_operation (code, result_mode,
9301
                                                   XEXP (varop, 1),
9302
                                                   GEN_INT (count))) != 0
9303
              && GET_CODE (new) == CONST_INT
9304
              && merge_outer_ops (&outer_op, &outer_const, XOR,
9305
                                  INTVAL (new), result_mode, &complement_p))
9306
            {
9307
              varop = XEXP (varop, 0);
9308
              continue;
9309
            }
9310
 
9311
          break;
9312
 
9313
        case MINUS:
9314
          /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
9315
             with C the size of VAROP - 1 and the shift is logical if
9316
             STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9317
             we have a (gt X 0) operation.  If the shift is arithmetic with
9318
             STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
9319
             we have a (neg (gt X 0)) operation.  */
9320
 
9321
          if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9322
              && GET_CODE (XEXP (varop, 0)) == ASHIFTRT
9323
              && count == (unsigned int)
9324
                          (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)
9325
              && (code == LSHIFTRT || code == ASHIFTRT)
9326
              && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
9327
              && (unsigned HOST_WIDE_INT) INTVAL (XEXP (XEXP (varop, 0), 1))
9328
                 == count
9329
              && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9330
            {
9331
              count = 0;
9332
              varop = gen_rtx_GT (GET_MODE (varop), XEXP (varop, 1),
9333
                                  const0_rtx);
9334
 
9335
              if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
9336
                varop = gen_rtx_NEG (GET_MODE (varop), varop);
9337
 
9338
              continue;
9339
            }
9340
          break;
9341
 
9342
        case TRUNCATE:
9343
          /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt))
9344
             if the truncate does not affect the value.  */
9345
          if (code == LSHIFTRT
9346
              && GET_CODE (XEXP (varop, 0)) == LSHIFTRT
9347
              && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
9348
              && (INTVAL (XEXP (XEXP (varop, 0), 1))
9349
                  >= (GET_MODE_BITSIZE (GET_MODE (XEXP (varop, 0)))
9350
                      - GET_MODE_BITSIZE (GET_MODE (varop)))))
9351
            {
9352
              rtx varop_inner = XEXP (varop, 0);
9353
 
9354
              varop_inner
9355
                = gen_rtx_LSHIFTRT (GET_MODE (varop_inner),
9356
                                    XEXP (varop_inner, 0),
9357
                                    GEN_INT
9358
                                    (count + INTVAL (XEXP (varop_inner, 1))));
9359
              varop = gen_rtx_TRUNCATE (GET_MODE (varop), varop_inner);
9360
              count = 0;
9361
              continue;
9362
            }
9363
          break;
9364
 
9365
        default:
9366
          break;
9367
        }
9368
 
9369
      break;
9370
    }
9371
 
9372
  /* We need to determine what mode to do the shift in.  If the shift is
9373
     a right shift or ROTATE, we must always do it in the mode it was
9374
     originally done in.  Otherwise, we can do it in MODE, the widest mode
9375
     encountered.  The code we care about is that of the shift that will
9376
     actually be done, not the shift that was originally requested.  */
9377
  shift_mode
9378
    = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
9379
       ? result_mode : mode);
9380
 
9381
  /* We have now finished analyzing the shift.  The result should be
9382
     a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places.  If
9383
     OUTER_OP is non-UNKNOWN, it is an operation that needs to be applied
9384
     to the result of the shift.  OUTER_CONST is the relevant constant,
9385
     but we must turn off all bits turned off in the shift.
9386
 
9387
     If we were passed a value for X, see if we can use any pieces of
9388
     it.  If not, make new rtx.  */
9389
 
9390
  if (x && GET_RTX_CLASS (GET_CODE (x)) == RTX_BIN_ARITH
9391
      && GET_CODE (XEXP (x, 1)) == CONST_INT
9392
      && (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) == count)
9393
    const_rtx = XEXP (x, 1);
9394
  else
9395
    const_rtx = GEN_INT (count);
9396
 
9397
  if (x && GET_CODE (XEXP (x, 0)) == SUBREG
9398
      && GET_MODE (XEXP (x, 0)) == shift_mode
9399
      && SUBREG_REG (XEXP (x, 0)) == varop)
9400
    varop = XEXP (x, 0);
9401
  else if (GET_MODE (varop) != shift_mode)
9402
    varop = gen_lowpart (shift_mode, varop);
9403
 
9404
  /* If we can't make the SUBREG, try to return what we were given.  */
9405
  if (GET_CODE (varop) == CLOBBER)
9406
    return x ? x : varop;
9407
 
9408
  new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
9409
  if (new != 0)
9410
    x = new;
9411
  else
9412
    x = gen_rtx_fmt_ee (code, shift_mode, varop, const_rtx);
9413
 
9414
  /* If we have an outer operation and we just made a shift, it is
9415
     possible that we could have simplified the shift were it not
9416
     for the outer operation.  So try to do the simplification
9417
     recursively.  */
9418
 
9419
  if (outer_op != UNKNOWN && GET_CODE (x) == code
9420
      && GET_CODE (XEXP (x, 1)) == CONST_INT)
9421
    x = simplify_shift_const (x, code, shift_mode, XEXP (x, 0),
9422
                              INTVAL (XEXP (x, 1)));
9423
 
9424
  /* If we were doing an LSHIFTRT in a wider mode than it was originally,
9425
     turn off all the bits that the shift would have turned off.  */
9426
  if (orig_code == LSHIFTRT && result_mode != shift_mode)
9427
    x = simplify_and_const_int (NULL_RTX, shift_mode, x,
9428
                                GET_MODE_MASK (result_mode) >> orig_count);
9429
 
9430
  /* Do the remainder of the processing in RESULT_MODE.  */
9431
  x = gen_lowpart (result_mode, x);
9432
 
9433
  /* If COMPLEMENT_P is set, we have to complement X before doing the outer
9434
     operation.  */
9435
  if (complement_p)
9436
    x = simplify_gen_unary (NOT, result_mode, x, result_mode);
9437
 
9438
  if (outer_op != UNKNOWN)
9439
    {
9440
      if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
9441
        outer_const = trunc_int_for_mode (outer_const, result_mode);
9442
 
9443
      if (outer_op == AND)
9444
        x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
9445
      else if (outer_op == SET)
9446
        /* This means that we have determined that the result is
9447
           equivalent to a constant.  This should be rare.  */
9448
        x = GEN_INT (outer_const);
9449
      else if (GET_RTX_CLASS (outer_op) == RTX_UNARY)
9450
        x = simplify_gen_unary (outer_op, result_mode, x, result_mode);
9451
      else
9452
        x = simplify_gen_binary (outer_op, result_mode, x,
9453
                                 GEN_INT (outer_const));
9454
    }
9455
 
9456
  return x;
9457
}
9458
 
9459
/* Like recog, but we receive the address of a pointer to a new pattern.
9460
   We try to match the rtx that the pointer points to.
9461
   If that fails, we may try to modify or replace the pattern,
9462
   storing the replacement into the same pointer object.
9463
 
9464
   Modifications include deletion or addition of CLOBBERs.
9465
 
9466
   PNOTES is a pointer to a location where any REG_UNUSED notes added for
9467
   the CLOBBERs are placed.
9468
 
9469
   The value is the final insn code from the pattern ultimately matched,
9470
   or -1.  */
9471
 
9472
static int
9473
recog_for_combine (rtx *pnewpat, rtx insn, rtx *pnotes)
9474
{
9475
  rtx pat = *pnewpat;
9476
  int insn_code_number;
9477
  int num_clobbers_to_add = 0;
9478
  int i;
9479
  rtx notes = 0;
9480
  rtx old_notes, old_pat;
9481
 
9482
  /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
9483
     we use to indicate that something didn't match.  If we find such a
9484
     thing, force rejection.  */
9485
  if (GET_CODE (pat) == PARALLEL)
9486
    for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
9487
      if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
9488
          && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
9489
        return -1;
9490
 
9491
  old_pat = PATTERN (insn);
9492
  old_notes = REG_NOTES (insn);
9493
  PATTERN (insn) = pat;
9494
  REG_NOTES (insn) = 0;
9495
 
9496
  insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9497
 
9498
  /* If it isn't, there is the possibility that we previously had an insn
9499
     that clobbered some register as a side effect, but the combined
9500
     insn doesn't need to do that.  So try once more without the clobbers
9501
     unless this represents an ASM insn.  */
9502
 
9503
  if (insn_code_number < 0 && ! check_asm_operands (pat)
9504
      && GET_CODE (pat) == PARALLEL)
9505
    {
9506
      int pos;
9507
 
9508
      for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
9509
        if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
9510
          {
9511
            if (i != pos)
9512
              SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
9513
            pos++;
9514
          }
9515
 
9516
      SUBST_INT (XVECLEN (pat, 0), pos);
9517
 
9518
      if (pos == 1)
9519
        pat = XVECEXP (pat, 0, 0);
9520
 
9521
      PATTERN (insn) = pat;
9522
      insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9523
    }
9524
  PATTERN (insn) = old_pat;
9525
  REG_NOTES (insn) = old_notes;
9526
 
9527
  /* Recognize all noop sets, these will be killed by followup pass.  */
9528
  if (insn_code_number < 0 && GET_CODE (pat) == SET && set_noop_p (pat))
9529
    insn_code_number = NOOP_MOVE_INSN_CODE, num_clobbers_to_add = 0;
9530
 
9531
  /* If we had any clobbers to add, make a new pattern than contains
9532
     them.  Then check to make sure that all of them are dead.  */
9533
  if (num_clobbers_to_add)
9534
    {
9535
      rtx newpat = gen_rtx_PARALLEL (VOIDmode,
9536
                                     rtvec_alloc (GET_CODE (pat) == PARALLEL
9537
                                                  ? (XVECLEN (pat, 0)
9538
                                                     + num_clobbers_to_add)
9539
                                                  : num_clobbers_to_add + 1));
9540
 
9541
      if (GET_CODE (pat) == PARALLEL)
9542
        for (i = 0; i < XVECLEN (pat, 0); i++)
9543
          XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
9544
      else
9545
        XVECEXP (newpat, 0, 0) = pat;
9546
 
9547
      add_clobbers (newpat, insn_code_number);
9548
 
9549
      for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
9550
           i < XVECLEN (newpat, 0); i++)
9551
        {
9552
          if (REG_P (XEXP (XVECEXP (newpat, 0, i), 0))
9553
              && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
9554
            return -1;
9555
          notes = gen_rtx_EXPR_LIST (REG_UNUSED,
9556
                                     XEXP (XVECEXP (newpat, 0, i), 0), notes);
9557
        }
9558
      pat = newpat;
9559
    }
9560
 
9561
  *pnewpat = pat;
9562
  *pnotes = notes;
9563
 
9564
  return insn_code_number;
9565
}
9566
 
9567
/* Like gen_lowpart_general but for use by combine.  In combine it
9568
   is not possible to create any new pseudoregs.  However, it is
9569
   safe to create invalid memory addresses, because combine will
9570
   try to recognize them and all they will do is make the combine
9571
   attempt fail.
9572
 
9573
   If for some reason this cannot do its job, an rtx
9574
   (clobber (const_int 0)) is returned.
9575
   An insn containing that will not be recognized.  */
9576
 
9577
static rtx
9578
gen_lowpart_for_combine (enum machine_mode omode, rtx x)
9579
{
9580
  enum machine_mode imode = GET_MODE (x);
9581
  unsigned int osize = GET_MODE_SIZE (omode);
9582
  unsigned int isize = GET_MODE_SIZE (imode);
9583
  rtx result;
9584
 
9585
  if (omode == imode)
9586
    return x;
9587
 
9588
  /* Return identity if this is a CONST or symbolic reference.  */
9589
  if (omode == Pmode
9590
      && (GET_CODE (x) == CONST
9591
          || GET_CODE (x) == SYMBOL_REF
9592
          || GET_CODE (x) == LABEL_REF))
9593
    return x;
9594
 
9595
  /* We can only support MODE being wider than a word if X is a
9596
     constant integer or has a mode the same size.  */
9597
  if (GET_MODE_SIZE (omode) > UNITS_PER_WORD
9598
      && ! ((imode == VOIDmode
9599
             && (GET_CODE (x) == CONST_INT
9600
                 || GET_CODE (x) == CONST_DOUBLE))
9601
            || isize == osize))
9602
    goto fail;
9603
 
9604
  /* X might be a paradoxical (subreg (mem)).  In that case, gen_lowpart
9605
     won't know what to do.  So we will strip off the SUBREG here and
9606
     process normally.  */
9607
  if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
9608
    {
9609
      x = SUBREG_REG (x);
9610
 
9611
      /* For use in case we fall down into the address adjustments
9612
         further below, we need to adjust the known mode and size of
9613
         x; imode and isize, since we just adjusted x.  */
9614
      imode = GET_MODE (x);
9615
 
9616
      if (imode == omode)
9617
        return x;
9618
 
9619
      isize = GET_MODE_SIZE (imode);
9620
    }
9621
 
9622
  result = gen_lowpart_common (omode, x);
9623
 
9624
#ifdef CANNOT_CHANGE_MODE_CLASS
9625
  if (result != 0 && GET_CODE (result) == SUBREG)
9626
    record_subregs_of_mode (result);
9627
#endif
9628
 
9629
  if (result)
9630
    return result;
9631
 
9632
  if (MEM_P (x))
9633
    {
9634
      int offset = 0;
9635
 
9636
      /* Refuse to work on a volatile memory ref or one with a mode-dependent
9637
         address.  */
9638
      if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
9639
        goto fail;
9640
 
9641
      /* If we want to refer to something bigger than the original memref,
9642
         generate a paradoxical subreg instead.  That will force a reload
9643
         of the original memref X.  */
9644
      if (isize < osize)
9645
        return gen_rtx_SUBREG (omode, x, 0);
9646
 
9647
      if (WORDS_BIG_ENDIAN)
9648
        offset = MAX (isize, UNITS_PER_WORD) - MAX (osize, UNITS_PER_WORD);
9649
 
9650
      /* Adjust the address so that the address-after-the-data is
9651
         unchanged.  */
9652
      if (BYTES_BIG_ENDIAN)
9653
        offset -= MIN (UNITS_PER_WORD, osize) - MIN (UNITS_PER_WORD, isize);
9654
 
9655
      return adjust_address_nv (x, omode, offset);
9656
    }
9657
 
9658
  /* If X is a comparison operator, rewrite it in a new mode.  This
9659
     probably won't match, but may allow further simplifications.  */
9660
  else if (COMPARISON_P (x))
9661
    return gen_rtx_fmt_ee (GET_CODE (x), omode, XEXP (x, 0), XEXP (x, 1));
9662
 
9663
  /* If we couldn't simplify X any other way, just enclose it in a
9664
     SUBREG.  Normally, this SUBREG won't match, but some patterns may
9665
     include an explicit SUBREG or we may simplify it further in combine.  */
9666
  else
9667
    {
9668
      int offset = 0;
9669
      rtx res;
9670
 
9671
      offset = subreg_lowpart_offset (omode, imode);
9672
      if (imode == VOIDmode)
9673
        {
9674
          imode = int_mode_for_mode (omode);
9675
          x = gen_lowpart_common (imode, x);
9676
          if (x == NULL)
9677
            goto fail;
9678
        }
9679
      res = simplify_gen_subreg (omode, x, imode, offset);
9680
      if (res)
9681
        return res;
9682
    }
9683
 
9684
 fail:
9685
  return gen_rtx_CLOBBER (imode, const0_rtx);
9686
}
9687
 
9688
/* Simplify a comparison between *POP0 and *POP1 where CODE is the
9689
   comparison code that will be tested.
9690
 
9691
   The result is a possibly different comparison code to use.  *POP0 and
9692
   *POP1 may be updated.
9693
 
9694
   It is possible that we might detect that a comparison is either always
9695
   true or always false.  However, we do not perform general constant
9696
   folding in combine, so this knowledge isn't useful.  Such tautologies
9697
   should have been detected earlier.  Hence we ignore all such cases.  */
9698
 
9699
static enum rtx_code
9700
simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
9701
{
9702
  rtx op0 = *pop0;
9703
  rtx op1 = *pop1;
9704
  rtx tem, tem1;
9705
  int i;
9706
  enum machine_mode mode, tmode;
9707
 
9708
  /* Try a few ways of applying the same transformation to both operands.  */
9709
  while (1)
9710
    {
9711
#ifndef WORD_REGISTER_OPERATIONS
9712
      /* The test below this one won't handle SIGN_EXTENDs on these machines,
9713
         so check specially.  */
9714
      if (code != GTU && code != GEU && code != LTU && code != LEU
9715
          && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
9716
          && GET_CODE (XEXP (op0, 0)) == ASHIFT
9717
          && GET_CODE (XEXP (op1, 0)) == ASHIFT
9718
          && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
9719
          && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
9720
          && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
9721
              == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
9722
          && GET_CODE (XEXP (op0, 1)) == CONST_INT
9723
          && XEXP (op0, 1) == XEXP (op1, 1)
9724
          && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
9725
          && XEXP (op0, 1) == XEXP (XEXP (op1, 0), 1)
9726
          && (INTVAL (XEXP (op0, 1))
9727
              == (GET_MODE_BITSIZE (GET_MODE (op0))
9728
                  - (GET_MODE_BITSIZE
9729
                     (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
9730
        {
9731
          op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
9732
          op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
9733
        }
9734
#endif
9735
 
9736
      /* If both operands are the same constant shift, see if we can ignore the
9737
         shift.  We can if the shift is a rotate or if the bits shifted out of
9738
         this shift are known to be zero for both inputs and if the type of
9739
         comparison is compatible with the shift.  */
9740
      if (GET_CODE (op0) == GET_CODE (op1)
9741
          && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
9742
          && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
9743
              || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
9744
                  && (code != GT && code != LT && code != GE && code != LE))
9745
              || (GET_CODE (op0) == ASHIFTRT
9746
                  && (code != GTU && code != LTU
9747
                      && code != GEU && code != LEU)))
9748
          && GET_CODE (XEXP (op0, 1)) == CONST_INT
9749
          && INTVAL (XEXP (op0, 1)) >= 0
9750
          && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
9751
          && XEXP (op0, 1) == XEXP (op1, 1))
9752
        {
9753
          enum machine_mode mode = GET_MODE (op0);
9754
          unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
9755
          int shift_count = INTVAL (XEXP (op0, 1));
9756
 
9757
          if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
9758
            mask &= (mask >> shift_count) << shift_count;
9759
          else if (GET_CODE (op0) == ASHIFT)
9760
            mask = (mask & (mask << shift_count)) >> shift_count;
9761
 
9762
          if ((nonzero_bits (XEXP (op0, 0), mode) & ~mask) == 0
9763
              && (nonzero_bits (XEXP (op1, 0), mode) & ~mask) == 0)
9764
            op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
9765
          else
9766
            break;
9767
        }
9768
 
9769
      /* If both operands are AND's of a paradoxical SUBREG by constant, the
9770
         SUBREGs are of the same mode, and, in both cases, the AND would
9771
         be redundant if the comparison was done in the narrower mode,
9772
         do the comparison in the narrower mode (e.g., we are AND'ing with 1
9773
         and the operand's possibly nonzero bits are 0xffffff01; in that case
9774
         if we only care about QImode, we don't need the AND).  This case
9775
         occurs if the output mode of an scc insn is not SImode and
9776
         STORE_FLAG_VALUE == 1 (e.g., the 386).
9777
 
9778
         Similarly, check for a case where the AND's are ZERO_EXTEND
9779
         operations from some narrower mode even though a SUBREG is not
9780
         present.  */
9781
 
9782
      else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
9783
               && GET_CODE (XEXP (op0, 1)) == CONST_INT
9784
               && GET_CODE (XEXP (op1, 1)) == CONST_INT)
9785
        {
9786
          rtx inner_op0 = XEXP (op0, 0);
9787
          rtx inner_op1 = XEXP (op1, 0);
9788
          HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
9789
          HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
9790
          int changed = 0;
9791
 
9792
          if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG
9793
              && (GET_MODE_SIZE (GET_MODE (inner_op0))
9794
                  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0))))
9795
              && (GET_MODE (SUBREG_REG (inner_op0))
9796
                  == GET_MODE (SUBREG_REG (inner_op1)))
9797
              && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (inner_op0)))
9798
                  <= HOST_BITS_PER_WIDE_INT)
9799
              && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0),
9800
                                             GET_MODE (SUBREG_REG (inner_op0)))))
9801
              && (0 == ((~c1) & nonzero_bits (SUBREG_REG (inner_op1),
9802
                                             GET_MODE (SUBREG_REG (inner_op1))))))
9803
            {
9804
              op0 = SUBREG_REG (inner_op0);
9805
              op1 = SUBREG_REG (inner_op1);
9806
 
9807
              /* The resulting comparison is always unsigned since we masked
9808
                 off the original sign bit.  */
9809
              code = unsigned_condition (code);
9810
 
9811
              changed = 1;
9812
            }
9813
 
9814
          else if (c0 == c1)
9815
            for (tmode = GET_CLASS_NARROWEST_MODE
9816
                 (GET_MODE_CLASS (GET_MODE (op0)));
9817
                 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
9818
              if ((unsigned HOST_WIDE_INT) c0 == GET_MODE_MASK (tmode))
9819
                {
9820
                  op0 = gen_lowpart (tmode, inner_op0);
9821
                  op1 = gen_lowpart (tmode, inner_op1);
9822
                  code = unsigned_condition (code);
9823
                  changed = 1;
9824
                  break;
9825
                }
9826
 
9827
          if (! changed)
9828
            break;
9829
        }
9830
 
9831
      /* If both operands are NOT, we can strip off the outer operation
9832
         and adjust the comparison code for swapped operands; similarly for
9833
         NEG, except that this must be an equality comparison.  */
9834
      else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
9835
               || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
9836
                   && (code == EQ || code == NE)))
9837
        op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
9838
 
9839
      else
9840
        break;
9841
    }
9842
 
9843
  /* If the first operand is a constant, swap the operands and adjust the
9844
     comparison code appropriately, but don't do this if the second operand
9845
     is already a constant integer.  */
9846
  if (swap_commutative_operands_p (op0, op1))
9847
    {
9848
      tem = op0, op0 = op1, op1 = tem;
9849
      code = swap_condition (code);
9850
    }
9851
 
9852
  /* We now enter a loop during which we will try to simplify the comparison.
9853
     For the most part, we only are concerned with comparisons with zero,
9854
     but some things may really be comparisons with zero but not start
9855
     out looking that way.  */
9856
 
9857
  while (GET_CODE (op1) == CONST_INT)
9858
    {
9859
      enum machine_mode mode = GET_MODE (op0);
9860
      unsigned int mode_width = GET_MODE_BITSIZE (mode);
9861
      unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
9862
      int equality_comparison_p;
9863
      int sign_bit_comparison_p;
9864
      int unsigned_comparison_p;
9865
      HOST_WIDE_INT const_op;
9866
 
9867
      /* We only want to handle integral modes.  This catches VOIDmode,
9868
         CCmode, and the floating-point modes.  An exception is that we
9869
         can handle VOIDmode if OP0 is a COMPARE or a comparison
9870
         operation.  */
9871
 
9872
      if (GET_MODE_CLASS (mode) != MODE_INT
9873
          && ! (mode == VOIDmode
9874
                && (GET_CODE (op0) == COMPARE || COMPARISON_P (op0))))
9875
        break;
9876
 
9877
      /* Get the constant we are comparing against and turn off all bits
9878
         not on in our mode.  */
9879
      const_op = INTVAL (op1);
9880
      if (mode != VOIDmode)
9881
        const_op = trunc_int_for_mode (const_op, mode);
9882
      op1 = GEN_INT (const_op);
9883
 
9884
      /* If we are comparing against a constant power of two and the value
9885
         being compared can only have that single bit nonzero (e.g., it was
9886
         `and'ed with that bit), we can replace this with a comparison
9887
         with zero.  */
9888
      if (const_op
9889
          && (code == EQ || code == NE || code == GE || code == GEU
9890
              || code == LT || code == LTU)
9891
          && mode_width <= HOST_BITS_PER_WIDE_INT
9892
          && exact_log2 (const_op) >= 0
9893
          && nonzero_bits (op0, mode) == (unsigned HOST_WIDE_INT) const_op)
9894
        {
9895
          code = (code == EQ || code == GE || code == GEU ? NE : EQ);
9896
          op1 = const0_rtx, const_op = 0;
9897
        }
9898
 
9899
      /* Similarly, if we are comparing a value known to be either -1 or
9900
 
9901
 
9902
      if (const_op == -1
9903
          && (code == EQ || code == NE || code == GT || code == LE
9904
              || code == GEU || code == LTU)
9905
          && num_sign_bit_copies (op0, mode) == mode_width)
9906
        {
9907
          code = (code == EQ || code == LE || code == GEU ? NE : EQ);
9908
          op1 = const0_rtx, const_op = 0;
9909
        }
9910
 
9911
      /* Do some canonicalizations based on the comparison code.  We prefer
9912
         comparisons against zero and then prefer equality comparisons.
9913
         If we can reduce the size of a constant, we will do that too.  */
9914
 
9915
      switch (code)
9916
        {
9917
        case LT:
9918
          /* < C is equivalent to <= (C - 1) */
9919
          if (const_op > 0)
9920
            {
9921
              const_op -= 1;
9922
              op1 = GEN_INT (const_op);
9923
              code = LE;
9924
              /* ... fall through to LE case below.  */
9925
            }
9926
          else
9927
            break;
9928
 
9929
        case LE:
9930
          /* <= C is equivalent to < (C + 1); we do this for C < 0  */
9931
          if (const_op < 0)
9932
            {
9933
              const_op += 1;
9934
              op1 = GEN_INT (const_op);
9935
              code = LT;
9936
            }
9937
 
9938
          /* If we are doing a <= 0 comparison on a value known to have
9939
             a zero sign bit, we can replace this with == 0.  */
9940
          else if (const_op == 0
9941
                   && mode_width <= HOST_BITS_PER_WIDE_INT
9942
                   && (nonzero_bits (op0, mode)
9943
                       & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
9944
            code = EQ;
9945
          break;
9946
 
9947
        case GE:
9948
          /* >= C is equivalent to > (C - 1).  */
9949
          if (const_op > 0)
9950
            {
9951
              const_op -= 1;
9952
              op1 = GEN_INT (const_op);
9953
              code = GT;
9954
              /* ... fall through to GT below.  */
9955
            }
9956
          else
9957
            break;
9958
 
9959
        case GT:
9960
          /* > C is equivalent to >= (C + 1); we do this for C < 0.  */
9961
          if (const_op < 0)
9962
            {
9963
              const_op += 1;
9964
              op1 = GEN_INT (const_op);
9965
              code = GE;
9966
            }
9967
 
9968
          /* If we are doing a > 0 comparison on a value known to have
9969
             a zero sign bit, we can replace this with != 0.  */
9970
          else if (const_op == 0
9971
                   && mode_width <= HOST_BITS_PER_WIDE_INT
9972
                   && (nonzero_bits (op0, mode)
9973
                       & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
9974
            code = NE;
9975
          break;
9976
 
9977
        case LTU:
9978
          /* < C is equivalent to <= (C - 1).  */
9979
          if (const_op > 0)
9980
            {
9981
              const_op -= 1;
9982
              op1 = GEN_INT (const_op);
9983
              code = LEU;
9984
              /* ... fall through ...  */
9985
            }
9986
 
9987
          /* (unsigned) < 0x80000000 is equivalent to >= 0.  */
9988
          else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9989
                   && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
9990
            {
9991
              const_op = 0, op1 = const0_rtx;
9992
              code = GE;
9993
              break;
9994
            }
9995
          else
9996
            break;
9997
 
9998
        case LEU:
9999
          /* unsigned <= 0 is equivalent to == 0 */
10000
          if (const_op == 0)
10001
            code = EQ;
10002
 
10003
          /* (unsigned) <= 0x7fffffff is equivalent to >= 0.  */
10004
          else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10005
                   && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
10006
            {
10007
              const_op = 0, op1 = const0_rtx;
10008
              code = GE;
10009
            }
10010
          break;
10011
 
10012
        case GEU:
10013
          /* >= C is equivalent to > (C - 1).  */
10014
          if (const_op > 1)
10015
            {
10016
              const_op -= 1;
10017
              op1 = GEN_INT (const_op);
10018
              code = GTU;
10019
              /* ... fall through ...  */
10020
            }
10021
 
10022
          /* (unsigned) >= 0x80000000 is equivalent to < 0.  */
10023
          else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10024
                   && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
10025
            {
10026
              const_op = 0, op1 = const0_rtx;
10027
              code = LT;
10028
              break;
10029
            }
10030
          else
10031
            break;
10032
 
10033
        case GTU:
10034
          /* unsigned > 0 is equivalent to != 0 */
10035
          if (const_op == 0)
10036
            code = NE;
10037
 
10038
          /* (unsigned) > 0x7fffffff is equivalent to < 0.  */
10039
          else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10040
                   && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
10041
            {
10042
              const_op = 0, op1 = const0_rtx;
10043
              code = LT;
10044
            }
10045
          break;
10046
 
10047
        default:
10048
          break;
10049
        }
10050
 
10051
      /* Compute some predicates to simplify code below.  */
10052
 
10053
      equality_comparison_p = (code == EQ || code == NE);
10054
      sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
10055
      unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
10056
                               || code == GEU);
10057
 
10058
      /* If this is a sign bit comparison and we can do arithmetic in
10059
         MODE, say that we will only be needing the sign bit of OP0.  */
10060
      if (sign_bit_comparison_p
10061
          && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10062
        op0 = force_to_mode (op0, mode,
10063
                             ((HOST_WIDE_INT) 1
10064
                              << (GET_MODE_BITSIZE (mode) - 1)),
10065
                             NULL_RTX, 0);
10066
 
10067
      /* Now try cases based on the opcode of OP0.  If none of the cases
10068
         does a "continue", we exit this loop immediately after the
10069
         switch.  */
10070
 
10071
      switch (GET_CODE (op0))
10072
        {
10073
        case ZERO_EXTRACT:
10074
          /* If we are extracting a single bit from a variable position in
10075
             a constant that has only a single bit set and are comparing it
10076
             with zero, we can convert this into an equality comparison
10077
             between the position and the location of the single bit.  */
10078
          /* Except we can't if SHIFT_COUNT_TRUNCATED is set, since we might
10079
             have already reduced the shift count modulo the word size.  */
10080
          if (!SHIFT_COUNT_TRUNCATED
10081
              && GET_CODE (XEXP (op0, 0)) == CONST_INT
10082
              && XEXP (op0, 1) == const1_rtx
10083
              && equality_comparison_p && const_op == 0
10084
              && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
10085
            {
10086
              if (BITS_BIG_ENDIAN)
10087
                {
10088
                  enum machine_mode new_mode
10089
                    = mode_for_extraction (EP_extzv, 1);
10090
                  if (new_mode == MAX_MACHINE_MODE)
10091
                    i = BITS_PER_WORD - 1 - i;
10092
                  else
10093
                    {
10094
                      mode = new_mode;
10095
                      i = (GET_MODE_BITSIZE (mode) - 1 - i);
10096
                    }
10097
                }
10098
 
10099
              op0 = XEXP (op0, 2);
10100
              op1 = GEN_INT (i);
10101
              const_op = i;
10102
 
10103
              /* Result is nonzero iff shift count is equal to I.  */
10104
              code = reverse_condition (code);
10105
              continue;
10106
            }
10107
 
10108
          /* ... fall through ...  */
10109
 
10110
        case SIGN_EXTRACT:
10111
          tem = expand_compound_operation (op0);
10112
          if (tem != op0)
10113
            {
10114
              op0 = tem;
10115
              continue;
10116
            }
10117
          break;
10118
 
10119
        case NOT:
10120
          /* If testing for equality, we can take the NOT of the constant.  */
10121
          if (equality_comparison_p
10122
              && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
10123
            {
10124
              op0 = XEXP (op0, 0);
10125
              op1 = tem;
10126
              continue;
10127
            }
10128
 
10129
          /* If just looking at the sign bit, reverse the sense of the
10130
             comparison.  */
10131
          if (sign_bit_comparison_p)
10132
            {
10133
              op0 = XEXP (op0, 0);
10134
              code = (code == GE ? LT : GE);
10135
              continue;
10136
            }
10137
          break;
10138
 
10139
        case NEG:
10140
          /* If testing for equality, we can take the NEG of the constant.  */
10141
          if (equality_comparison_p
10142
              && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
10143
            {
10144
              op0 = XEXP (op0, 0);
10145
              op1 = tem;
10146
              continue;
10147
            }
10148
 
10149
          /* The remaining cases only apply to comparisons with zero.  */
10150
          if (const_op != 0)
10151
            break;
10152
 
10153
          /* When X is ABS or is known positive,
10154
             (neg X) is < 0 if and only if X != 0.  */
10155
 
10156
          if (sign_bit_comparison_p
10157
              && (GET_CODE (XEXP (op0, 0)) == ABS
10158
                  || (mode_width <= HOST_BITS_PER_WIDE_INT
10159
                      && (nonzero_bits (XEXP (op0, 0), mode)
10160
                          & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
10161
            {
10162
              op0 = XEXP (op0, 0);
10163
              code = (code == LT ? NE : EQ);
10164
              continue;
10165
            }
10166
 
10167
          /* If we have NEG of something whose two high-order bits are the
10168
             same, we know that "(-a) < 0" is equivalent to "a > 0".  */
10169
          if (num_sign_bit_copies (op0, mode) >= 2)
10170
            {
10171
              op0 = XEXP (op0, 0);
10172
              code = swap_condition (code);
10173
              continue;
10174
            }
10175
          break;
10176
 
10177
        case ROTATE:
10178
          /* If we are testing equality and our count is a constant, we
10179
             can perform the inverse operation on our RHS.  */
10180
          if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
10181
              && (tem = simplify_binary_operation (ROTATERT, mode,
10182
                                                   op1, XEXP (op0, 1))) != 0)
10183
            {
10184
              op0 = XEXP (op0, 0);
10185
              op1 = tem;
10186
              continue;
10187
            }
10188
 
10189
          /* If we are doing a < 0 or >= 0 comparison, it means we are testing
10190
             a particular bit.  Convert it to an AND of a constant of that
10191
             bit.  This will be converted into a ZERO_EXTRACT.  */
10192
          if (const_op == 0 && sign_bit_comparison_p
10193
              && GET_CODE (XEXP (op0, 1)) == CONST_INT
10194
              && mode_width <= HOST_BITS_PER_WIDE_INT)
10195
            {
10196
              op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10197
                                            ((HOST_WIDE_INT) 1
10198
                                             << (mode_width - 1
10199
                                                 - INTVAL (XEXP (op0, 1)))));
10200
              code = (code == LT ? NE : EQ);
10201
              continue;
10202
            }
10203
 
10204
          /* Fall through.  */
10205
 
10206
        case ABS:
10207
          /* ABS is ignorable inside an equality comparison with zero.  */
10208
          if (const_op == 0 && equality_comparison_p)
10209
            {
10210
              op0 = XEXP (op0, 0);
10211
              continue;
10212
            }
10213
          break;
10214
 
10215
        case SIGN_EXTEND:
10216
          /* Can simplify (compare (zero/sign_extend FOO) CONST) to
10217
             (compare FOO CONST) if CONST fits in FOO's mode and we
10218
             are either testing inequality or have an unsigned
10219
             comparison with ZERO_EXTEND or a signed comparison with
10220
             SIGN_EXTEND.  But don't do it if we don't have a compare
10221
             insn of the given mode, since we'd have to revert it
10222
             later on, and then we wouldn't know whether to sign- or
10223
             zero-extend.  */
10224
          mode = GET_MODE (XEXP (op0, 0));
10225
          if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
10226
              && ! unsigned_comparison_p
10227
              && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10228
              && ((unsigned HOST_WIDE_INT) const_op
10229
                  < (((unsigned HOST_WIDE_INT) 1
10230
                      << (GET_MODE_BITSIZE (mode) - 1))))
10231
              && cmp_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
10232
            {
10233
              op0 = XEXP (op0, 0);
10234
              continue;
10235
            }
10236
          break;
10237
 
10238
        case SUBREG:
10239
          /* Check for the case where we are comparing A - C1 with C2, that is
10240
 
10241
               (subreg:MODE (plus (A) (-C1))) op (C2)
10242
 
10243
             with C1 a constant, and try to lift the SUBREG, i.e. to do the
10244
             comparison in the wider mode.  One of the following two conditions
10245
             must be true in order for this to be valid:
10246
 
10247
               1. The mode extension results in the same bit pattern being added
10248
                  on both sides and the comparison is equality or unsigned.  As
10249
                  C2 has been truncated to fit in MODE, the pattern can only be
10250
                  all 0s or all 1s.
10251
 
10252
               2. The mode extension results in the sign bit being copied on
10253
                  each side.
10254
 
10255
             The difficulty here is that we have predicates for A but not for
10256
             (A - C1) so we need to check that C1 is within proper bounds so
10257
             as to perturbate A as little as possible.  */
10258
 
10259
          if (mode_width <= HOST_BITS_PER_WIDE_INT
10260
              && subreg_lowpart_p (op0)
10261
              && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) > mode_width
10262
              && GET_CODE (SUBREG_REG (op0)) == PLUS
10263
              && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT)
10264
            {
10265
              enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
10266
              rtx a = XEXP (SUBREG_REG (op0), 0);
10267
              HOST_WIDE_INT c1 = -INTVAL (XEXP (SUBREG_REG (op0), 1));
10268
 
10269
              if ((c1 > 0
10270
                   && (unsigned HOST_WIDE_INT) c1
10271
                       < (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)
10272
                   && (equality_comparison_p || unsigned_comparison_p)
10273
                   /* (A - C1) zero-extends if it is positive and sign-extends
10274
                      if it is negative, C2 both zero- and sign-extends.  */
10275
                   && ((0 == (nonzero_bits (a, inner_mode)
10276
                              & ~GET_MODE_MASK (mode))
10277
                        && const_op >= 0)
10278
                       /* (A - C1) sign-extends if it is positive and 1-extends
10279
                          if it is negative, C2 both sign- and 1-extends.  */
10280
                       || (num_sign_bit_copies (a, inner_mode)
10281
                           > (unsigned int) (GET_MODE_BITSIZE (inner_mode)
10282
                                             - mode_width)
10283
                           && const_op < 0)))
10284
                  || ((unsigned HOST_WIDE_INT) c1
10285
                       < (unsigned HOST_WIDE_INT) 1 << (mode_width - 2)
10286
                      /* (A - C1) always sign-extends, like C2.  */
10287
                      && num_sign_bit_copies (a, inner_mode)
10288
                         > (unsigned int) (GET_MODE_BITSIZE (inner_mode)
10289
                                           - (mode_width - 1))))
10290
                {
10291
                  op0 = SUBREG_REG (op0);
10292
                  continue;
10293
                }
10294
            }
10295
 
10296
          /* If the inner mode is narrower and we are extracting the low part,
10297
             we can treat the SUBREG as if it were a ZERO_EXTEND.  */
10298
          if (subreg_lowpart_p (op0)
10299
              && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
10300
            /* Fall through */ ;
10301
          else
10302
            break;
10303
 
10304
          /* ... fall through ...  */
10305
 
10306
        case ZERO_EXTEND:
10307
          mode = GET_MODE (XEXP (op0, 0));
10308
          if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
10309
              && (unsigned_comparison_p || equality_comparison_p)
10310
              && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10311
              && ((unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode))
10312
              && cmp_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
10313
            {
10314
              op0 = XEXP (op0, 0);
10315
              continue;
10316
            }
10317
          break;
10318
 
10319
        case PLUS:
10320
          /* (eq (plus X A) B) -> (eq X (minus B A)).  We can only do
10321
             this for equality comparisons due to pathological cases involving
10322
             overflows.  */
10323
          if (equality_comparison_p
10324
              && 0 != (tem = simplify_binary_operation (MINUS, mode,
10325
                                                        op1, XEXP (op0, 1))))
10326
            {
10327
              op0 = XEXP (op0, 0);
10328
              op1 = tem;
10329
              continue;
10330
            }
10331
 
10332
          /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0.  */
10333
          if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
10334
              && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
10335
            {
10336
              op0 = XEXP (XEXP (op0, 0), 0);
10337
              code = (code == LT ? EQ : NE);
10338
              continue;
10339
            }
10340
          break;
10341
 
10342
        case MINUS:
10343
          /* We used to optimize signed comparisons against zero, but that
10344
             was incorrect.  Unsigned comparisons against zero (GTU, LEU)
10345
             arrive here as equality comparisons, or (GEU, LTU) are
10346
             optimized away.  No need to special-case them.  */
10347
 
10348
          /* (eq (minus A B) C) -> (eq A (plus B C)) or
10349
             (eq B (minus A C)), whichever simplifies.  We can only do
10350
             this for equality comparisons due to pathological cases involving
10351
             overflows.  */
10352
          if (equality_comparison_p
10353
              && 0 != (tem = simplify_binary_operation (PLUS, mode,
10354
                                                        XEXP (op0, 1), op1)))
10355
            {
10356
              op0 = XEXP (op0, 0);
10357
              op1 = tem;
10358
              continue;
10359
            }
10360
 
10361
          if (equality_comparison_p
10362
              && 0 != (tem = simplify_binary_operation (MINUS, mode,
10363
                                                        XEXP (op0, 0), op1)))
10364
            {
10365
              op0 = XEXP (op0, 1);
10366
              op1 = tem;
10367
              continue;
10368
            }
10369
 
10370
          /* The sign bit of (minus (ashiftrt X C) X), where C is the number
10371
             of bits in X minus 1, is one iff X > 0.  */
10372
          if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
10373
              && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10374
              && (unsigned HOST_WIDE_INT) INTVAL (XEXP (XEXP (op0, 0), 1))
10375
                 == mode_width - 1
10376
              && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10377
            {
10378
              op0 = XEXP (op0, 1);
10379
              code = (code == GE ? LE : GT);
10380
              continue;
10381
            }
10382
          break;
10383
 
10384
        case XOR:
10385
          /* (eq (xor A B) C) -> (eq A (xor B C)).  This is a simplification
10386
             if C is zero or B is a constant.  */
10387
          if (equality_comparison_p
10388
              && 0 != (tem = simplify_binary_operation (XOR, mode,
10389
                                                        XEXP (op0, 1), op1)))
10390
            {
10391
              op0 = XEXP (op0, 0);
10392
              op1 = tem;
10393
              continue;
10394
            }
10395
          break;
10396
 
10397
        case EQ:  case NE:
10398
        case UNEQ:  case LTGT:
10399
        case LT:  case LTU:  case UNLT:  case LE:  case LEU:  case UNLE:
10400
        case GT:  case GTU:  case UNGT:  case GE:  case GEU:  case UNGE:
10401
        case UNORDERED: case ORDERED:
10402
          /* We can't do anything if OP0 is a condition code value, rather
10403
             than an actual data value.  */
10404
          if (const_op != 0
10405
              || CC0_P (XEXP (op0, 0))
10406
              || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
10407
            break;
10408
 
10409
          /* Get the two operands being compared.  */
10410
          if (GET_CODE (XEXP (op0, 0)) == COMPARE)
10411
            tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
10412
          else
10413
            tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
10414
 
10415
          /* Check for the cases where we simply want the result of the
10416
             earlier test or the opposite of that result.  */
10417
          if (code == NE || code == EQ
10418
              || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
10419
                  && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10420
                  && (STORE_FLAG_VALUE
10421
                      & (((HOST_WIDE_INT) 1
10422
                          << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
10423
                  && (code == LT || code == GE)))
10424
            {
10425
              enum rtx_code new_code;
10426
              if (code == LT || code == NE)
10427
                new_code = GET_CODE (op0);
10428
              else
10429
                new_code = reversed_comparison_code (op0, NULL);
10430
 
10431
              if (new_code != UNKNOWN)
10432
                {
10433
                  code = new_code;
10434
                  op0 = tem;
10435
                  op1 = tem1;
10436
                  continue;
10437
                }
10438
            }
10439
          break;
10440
 
10441
        case IOR:
10442
          /* The sign bit of (ior (plus X (const_int -1)) X) is nonzero
10443
             iff X <= 0.  */
10444
          if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
10445
              && XEXP (XEXP (op0, 0), 1) == constm1_rtx
10446
              && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10447
            {
10448
              op0 = XEXP (op0, 1);
10449
              code = (code == GE ? GT : LE);
10450
              continue;
10451
            }
10452
          break;
10453
 
10454
        case AND:
10455
          /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1).  This
10456
             will be converted to a ZERO_EXTRACT later.  */
10457
          if (const_op == 0 && equality_comparison_p
10458
              && GET_CODE (XEXP (op0, 0)) == ASHIFT
10459
              && XEXP (XEXP (op0, 0), 0) == const1_rtx)
10460
            {
10461
              op0 = simplify_and_const_int
10462
                (op0, mode, gen_rtx_LSHIFTRT (mode,
10463
                                              XEXP (op0, 1),
10464
                                              XEXP (XEXP (op0, 0), 1)),
10465
                 (HOST_WIDE_INT) 1);
10466
              continue;
10467
            }
10468
 
10469
          /* If we are comparing (and (lshiftrt X C1) C2) for equality with
10470
             zero and X is a comparison and C1 and C2 describe only bits set
10471
             in STORE_FLAG_VALUE, we can compare with X.  */
10472
          if (const_op == 0 && equality_comparison_p
10473
              && mode_width <= HOST_BITS_PER_WIDE_INT
10474
              && GET_CODE (XEXP (op0, 1)) == CONST_INT
10475
              && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
10476
              && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10477
              && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
10478
              && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
10479
            {
10480
              mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
10481
                      << INTVAL (XEXP (XEXP (op0, 0), 1)));
10482
              if ((~STORE_FLAG_VALUE & mask) == 0
10483
                  && (COMPARISON_P (XEXP (XEXP (op0, 0), 0))
10484
                      || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
10485
                          && COMPARISON_P (tem))))
10486
                {
10487
                  op0 = XEXP (XEXP (op0, 0), 0);
10488
                  continue;
10489
                }
10490
            }
10491
 
10492
          /* If we are doing an equality comparison of an AND of a bit equal
10493
             to the sign bit, replace this with a LT or GE comparison of
10494
             the underlying value.  */
10495
          if (equality_comparison_p
10496
              && const_op == 0
10497
              && GET_CODE (XEXP (op0, 1)) == CONST_INT
10498
              && mode_width <= HOST_BITS_PER_WIDE_INT
10499
              && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
10500
                  == (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)))
10501
            {
10502
              op0 = XEXP (op0, 0);
10503
              code = (code == EQ ? GE : LT);
10504
              continue;
10505
            }
10506
 
10507
          /* If this AND operation is really a ZERO_EXTEND from a narrower
10508
             mode, the constant fits within that mode, and this is either an
10509
             equality or unsigned comparison, try to do this comparison in
10510
             the narrower mode.  */
10511
          if ((equality_comparison_p || unsigned_comparison_p)
10512
              && GET_CODE (XEXP (op0, 1)) == CONST_INT
10513
              && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
10514
                                   & GET_MODE_MASK (mode))
10515
                                  + 1)) >= 0
10516
              && const_op >> i == 0
10517
              && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
10518
            {
10519
              op0 = gen_lowpart (tmode, XEXP (op0, 0));
10520
              continue;
10521
            }
10522
 
10523
          /* If this is (and:M1 (subreg:M2 X 0) (const_int C1)) where C1
10524
             fits in both M1 and M2 and the SUBREG is either paradoxical
10525
             or represents the low part, permute the SUBREG and the AND
10526
             and try again.  */
10527
          if (GET_CODE (XEXP (op0, 0)) == SUBREG)
10528
            {
10529
              unsigned HOST_WIDE_INT c1;
10530
              tmode = GET_MODE (SUBREG_REG (XEXP (op0, 0)));
10531
              /* Require an integral mode, to avoid creating something like
10532
                 (AND:SF ...).  */
10533
              if (SCALAR_INT_MODE_P (tmode)
10534
                  /* It is unsafe to commute the AND into the SUBREG if the
10535
                     SUBREG is paradoxical and WORD_REGISTER_OPERATIONS is
10536
                     not defined.  As originally written the upper bits
10537
                     have a defined value due to the AND operation.
10538
                     However, if we commute the AND inside the SUBREG then
10539
                     they no longer have defined values and the meaning of
10540
                     the code has been changed.  */
10541
                  && (0
10542
#ifdef WORD_REGISTER_OPERATIONS
10543
                      || (mode_width > GET_MODE_BITSIZE (tmode)
10544
                          && mode_width <= BITS_PER_WORD)
10545
#endif
10546
                      || (mode_width <= GET_MODE_BITSIZE (tmode)
10547
                          && subreg_lowpart_p (XEXP (op0, 0))))
10548
                  && GET_CODE (XEXP (op0, 1)) == CONST_INT
10549
                  && mode_width <= HOST_BITS_PER_WIDE_INT
10550
                  && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT
10551
                  && ((c1 = INTVAL (XEXP (op0, 1))) & ~mask) == 0
10552
                  && (c1 & ~GET_MODE_MASK (tmode)) == 0
10553
                  && c1 != mask
10554
                  && c1 != GET_MODE_MASK (tmode))
10555
                {
10556
                  op0 = simplify_gen_binary (AND, tmode,
10557
                                             SUBREG_REG (XEXP (op0, 0)),
10558
                                             gen_int_mode (c1, tmode));
10559
                  op0 = gen_lowpart (mode, op0);
10560
                  continue;
10561
                }
10562
            }
10563
 
10564
          /* Convert (ne (and (not X) 1) 0) to (eq (and X 1) 0).  */
10565
          if (const_op == 0 && equality_comparison_p
10566
              && XEXP (op0, 1) == const1_rtx
10567
              && GET_CODE (XEXP (op0, 0)) == NOT)
10568
            {
10569
              op0 = simplify_and_const_int
10570
                (NULL_RTX, mode, XEXP (XEXP (op0, 0), 0), (HOST_WIDE_INT) 1);
10571
              code = (code == NE ? EQ : NE);
10572
              continue;
10573
            }
10574
 
10575
          /* Convert (ne (and (lshiftrt (not X)) 1) 0) to
10576
             (eq (and (lshiftrt X) 1) 0).
10577
             Also handle the case where (not X) is expressed using xor.  */
10578
          if (const_op == 0 && equality_comparison_p
10579
              && XEXP (op0, 1) == const1_rtx
10580
              && GET_CODE (XEXP (op0, 0)) == LSHIFTRT)
10581
            {
10582
              rtx shift_op = XEXP (XEXP (op0, 0), 0);
10583
              rtx shift_count = XEXP (XEXP (op0, 0), 1);
10584
 
10585
              if (GET_CODE (shift_op) == NOT
10586
                  || (GET_CODE (shift_op) == XOR
10587
                      && GET_CODE (XEXP (shift_op, 1)) == CONST_INT
10588
                      && GET_CODE (shift_count) == CONST_INT
10589
                      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
10590
                      && (INTVAL (XEXP (shift_op, 1))
10591
                          == (HOST_WIDE_INT) 1 << INTVAL (shift_count))))
10592
                {
10593
                  op0 = simplify_and_const_int
10594
                    (NULL_RTX, mode,
10595
                     gen_rtx_LSHIFTRT (mode, XEXP (shift_op, 0), shift_count),
10596
                     (HOST_WIDE_INT) 1);
10597
                  code = (code == NE ? EQ : NE);
10598
                  continue;
10599
                }
10600
            }
10601
          break;
10602
 
10603
        case ASHIFT:
10604
          /* If we have (compare (ashift FOO N) (const_int C)) and
10605
             the high order N bits of FOO (N+1 if an inequality comparison)
10606
             are known to be zero, we can do this by comparing FOO with C
10607
             shifted right N bits so long as the low-order N bits of C are
10608
             zero.  */
10609
          if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10610
              && INTVAL (XEXP (op0, 1)) >= 0
10611
              && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
10612
                  < HOST_BITS_PER_WIDE_INT)
10613
              && ((const_op
10614
                   & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
10615
              && mode_width <= HOST_BITS_PER_WIDE_INT
10616
              && (nonzero_bits (XEXP (op0, 0), mode)
10617
                  & ~(mask >> (INTVAL (XEXP (op0, 1))
10618
                               + ! equality_comparison_p))) == 0)
10619
            {
10620
              /* We must perform a logical shift, not an arithmetic one,
10621
                 as we want the top N bits of C to be zero.  */
10622
              unsigned HOST_WIDE_INT temp = const_op & GET_MODE_MASK (mode);
10623
 
10624
              temp >>= INTVAL (XEXP (op0, 1));
10625
              op1 = gen_int_mode (temp, mode);
10626
              op0 = XEXP (op0, 0);
10627
              continue;
10628
            }
10629
 
10630
          /* If we are doing a sign bit comparison, it means we are testing
10631
             a particular bit.  Convert it to the appropriate AND.  */
10632
          if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
10633
              && mode_width <= HOST_BITS_PER_WIDE_INT)
10634
            {
10635
              op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10636
                                            ((HOST_WIDE_INT) 1
10637
                                             << (mode_width - 1
10638
                                                 - INTVAL (XEXP (op0, 1)))));
10639
              code = (code == LT ? NE : EQ);
10640
              continue;
10641
            }
10642
 
10643
          /* If this an equality comparison with zero and we are shifting
10644
             the low bit to the sign bit, we can convert this to an AND of the
10645
             low-order bit.  */
10646
          if (const_op == 0 && equality_comparison_p
10647
              && GET_CODE (XEXP (op0, 1)) == CONST_INT
10648
              && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
10649
                 == mode_width - 1)
10650
            {
10651
              op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10652
                                            (HOST_WIDE_INT) 1);
10653
              continue;
10654
            }
10655
          break;
10656
 
10657
        case ASHIFTRT:
10658
          /* If this is an equality comparison with zero, we can do this
10659
             as a logical shift, which might be much simpler.  */
10660
          if (equality_comparison_p && const_op == 0
10661
              && GET_CODE (XEXP (op0, 1)) == CONST_INT)
10662
            {
10663
              op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
10664
                                          XEXP (op0, 0),
10665
                                          INTVAL (XEXP (op0, 1)));
10666
              continue;
10667
            }
10668
 
10669
          /* If OP0 is a sign extension and CODE is not an unsigned comparison,
10670
             do the comparison in a narrower mode.  */
10671
          if (! unsigned_comparison_p
10672
              && GET_CODE (XEXP (op0, 1)) == CONST_INT
10673
              && GET_CODE (XEXP (op0, 0)) == ASHIFT
10674
              && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
10675
              && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
10676
                                         MODE_INT, 1)) != BLKmode
10677
              && (((unsigned HOST_WIDE_INT) const_op
10678
                   + (GET_MODE_MASK (tmode) >> 1) + 1)
10679
                  <= GET_MODE_MASK (tmode)))
10680
            {
10681
              op0 = gen_lowpart (tmode, XEXP (XEXP (op0, 0), 0));
10682
              continue;
10683
            }
10684
 
10685
          /* Likewise if OP0 is a PLUS of a sign extension with a
10686
             constant, which is usually represented with the PLUS
10687
             between the shifts.  */
10688
          if (! unsigned_comparison_p
10689
              && GET_CODE (XEXP (op0, 1)) == CONST_INT
10690
              && GET_CODE (XEXP (op0, 0)) == PLUS
10691
              && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10692
              && GET_CODE (XEXP (XEXP (op0, 0), 0)) == ASHIFT
10693
              && XEXP (op0, 1) == XEXP (XEXP (XEXP (op0, 0), 0), 1)
10694
              && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
10695
                                         MODE_INT, 1)) != BLKmode
10696
              && (((unsigned HOST_WIDE_INT) const_op
10697
                   + (GET_MODE_MASK (tmode) >> 1) + 1)
10698
                  <= GET_MODE_MASK (tmode)))
10699
            {
10700
              rtx inner = XEXP (XEXP (XEXP (op0, 0), 0), 0);
10701
              rtx add_const = XEXP (XEXP (op0, 0), 1);
10702
              rtx new_const = simplify_gen_binary (ASHIFTRT, GET_MODE (op0),
10703
                                                   add_const, XEXP (op0, 1));
10704
 
10705
              op0 = simplify_gen_binary (PLUS, tmode,
10706
                                         gen_lowpart (tmode, inner),
10707
                                         new_const);
10708
              continue;
10709
            }
10710
 
10711
          /* ... fall through ...  */
10712
        case LSHIFTRT:
10713
          /* If we have (compare (xshiftrt FOO N) (const_int C)) and
10714
             the low order N bits of FOO are known to be zero, we can do this
10715
             by comparing FOO with C shifted left N bits so long as no
10716
             overflow occurs.  */
10717
          if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10718
              && INTVAL (XEXP (op0, 1)) >= 0
10719
              && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
10720
              && mode_width <= HOST_BITS_PER_WIDE_INT
10721
              && (nonzero_bits (XEXP (op0, 0), mode)
10722
                  & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
10723
              && (((unsigned HOST_WIDE_INT) const_op
10724
                   + (GET_CODE (op0) != LSHIFTRT
10725
                      ? ((GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1)) >> 1)
10726
                         + 1)
10727
                      : 0))
10728
                  <= GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1))))
10729
            {
10730
              /* If the shift was logical, then we must make the condition
10731
                 unsigned.  */
10732
              if (GET_CODE (op0) == LSHIFTRT)
10733
                code = unsigned_condition (code);
10734
 
10735
              const_op <<= INTVAL (XEXP (op0, 1));
10736
              op1 = GEN_INT (const_op);
10737
              op0 = XEXP (op0, 0);
10738
              continue;
10739
            }
10740
 
10741
          /* If we are using this shift to extract just the sign bit, we
10742
             can replace this with an LT or GE comparison.  */
10743
          if (const_op == 0
10744
              && (equality_comparison_p || sign_bit_comparison_p)
10745
              && GET_CODE (XEXP (op0, 1)) == CONST_INT
10746
              && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
10747
                 == mode_width - 1)
10748
            {
10749
              op0 = XEXP (op0, 0);
10750
              code = (code == NE || code == GT ? LT : GE);
10751
              continue;
10752
            }
10753
          break;
10754
 
10755
        default:
10756
          break;
10757
        }
10758
 
10759
      break;
10760
    }
10761
 
10762
  /* Now make any compound operations involved in this comparison.  Then,
10763
     check for an outmost SUBREG on OP0 that is not doing anything or is
10764
     paradoxical.  The latter transformation must only be performed when
10765
     it is known that the "extra" bits will be the same in op0 and op1 or
10766
     that they don't matter.  There are three cases to consider:
10767
 
10768
     1. SUBREG_REG (op0) is a register.  In this case the bits are don't
10769
     care bits and we can assume they have any convenient value.  So
10770
     making the transformation is safe.
10771
 
10772
     2. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is not defined.
10773
     In this case the upper bits of op0 are undefined.  We should not make
10774
     the simplification in that case as we do not know the contents of
10775
     those bits.
10776
 
10777
     3. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is defined and not
10778
     UNKNOWN.  In that case we know those bits are zeros or ones.  We must
10779
     also be sure that they are the same as the upper bits of op1.
10780
 
10781
     We can never remove a SUBREG for a non-equality comparison because
10782
     the sign bit is in a different place in the underlying object.  */
10783
 
10784
  op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
10785
  op1 = make_compound_operation (op1, SET);
10786
 
10787
  if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
10788
      && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10789
      && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op0))) == MODE_INT
10790
      && (code == NE || code == EQ))
10791
    {
10792
      if (GET_MODE_SIZE (GET_MODE (op0))
10793
          > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))
10794
        {
10795
          /* For paradoxical subregs, allow case 1 as above.  Case 3 isn't
10796
             implemented.  */
10797
          if (REG_P (SUBREG_REG (op0)))
10798
            {
10799
              op0 = SUBREG_REG (op0);
10800
              op1 = gen_lowpart (GET_MODE (op0), op1);
10801
            }
10802
        }
10803
      else if ((GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
10804
                <= HOST_BITS_PER_WIDE_INT)
10805
               && (nonzero_bits (SUBREG_REG (op0),
10806
                                 GET_MODE (SUBREG_REG (op0)))
10807
                   & ~GET_MODE_MASK (GET_MODE (op0))) == 0)
10808
        {
10809
          tem = gen_lowpart (GET_MODE (SUBREG_REG (op0)), op1);
10810
 
10811
          if ((nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
10812
               & ~GET_MODE_MASK (GET_MODE (op0))) == 0)
10813
            op0 = SUBREG_REG (op0), op1 = tem;
10814
        }
10815
    }
10816
 
10817
  /* We now do the opposite procedure: Some machines don't have compare
10818
     insns in all modes.  If OP0's mode is an integer mode smaller than a
10819
     word and we can't do a compare in that mode, see if there is a larger
10820
     mode for which we can do the compare.  There are a number of cases in
10821
     which we can use the wider mode.  */
10822
 
10823
  mode = GET_MODE (op0);
10824
  if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
10825
      && GET_MODE_SIZE (mode) < UNITS_PER_WORD
10826
      && ! have_insn_for (COMPARE, mode))
10827
    for (tmode = GET_MODE_WIDER_MODE (mode);
10828
         (tmode != VOIDmode
10829
          && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
10830
         tmode = GET_MODE_WIDER_MODE (tmode))
10831
      if (have_insn_for (COMPARE, tmode))
10832
        {
10833
          int zero_extended;
10834
 
10835
          /* If the only nonzero bits in OP0 and OP1 are those in the
10836
             narrower mode and this is an equality or unsigned comparison,
10837
             we can use the wider mode.  Similarly for sign-extended
10838
             values, in which case it is true for all comparisons.  */
10839
          zero_extended = ((code == EQ || code == NE
10840
                            || code == GEU || code == GTU
10841
                            || code == LEU || code == LTU)
10842
                           && (nonzero_bits (op0, tmode)
10843
                               & ~GET_MODE_MASK (mode)) == 0
10844
                           && ((GET_CODE (op1) == CONST_INT
10845
                                || (nonzero_bits (op1, tmode)
10846
                                    & ~GET_MODE_MASK (mode)) == 0)));
10847
 
10848
          if (zero_extended
10849
              || ((num_sign_bit_copies (op0, tmode)
10850
                   > (unsigned int) (GET_MODE_BITSIZE (tmode)
10851
                                     - GET_MODE_BITSIZE (mode)))
10852
                  && (num_sign_bit_copies (op1, tmode)
10853
                      > (unsigned int) (GET_MODE_BITSIZE (tmode)
10854
                                        - GET_MODE_BITSIZE (mode)))))
10855
            {
10856
              /* If OP0 is an AND and we don't have an AND in MODE either,
10857
                 make a new AND in the proper mode.  */
10858
              if (GET_CODE (op0) == AND
10859
                  && !have_insn_for (AND, mode))
10860
                op0 = simplify_gen_binary (AND, tmode,
10861
                                           gen_lowpart (tmode,
10862
                                                        XEXP (op0, 0)),
10863
                                           gen_lowpart (tmode,
10864
                                                        XEXP (op0, 1)));
10865
 
10866
              op0 = gen_lowpart (tmode, op0);
10867
              if (zero_extended && GET_CODE (op1) == CONST_INT)
10868
                op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (mode));
10869
              op1 = gen_lowpart (tmode, op1);
10870
              break;
10871
            }
10872
 
10873
          /* If this is a test for negative, we can make an explicit
10874
             test of the sign bit.  */
10875
 
10876
          if (op1 == const0_rtx && (code == LT || code == GE)
10877
              && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10878
            {
10879
              op0 = simplify_gen_binary (AND, tmode,
10880
                                         gen_lowpart (tmode, op0),
10881
                                         GEN_INT ((HOST_WIDE_INT) 1
10882
                                                  << (GET_MODE_BITSIZE (mode)
10883
                                                      - 1)));
10884
              code = (code == LT) ? NE : EQ;
10885
              break;
10886
            }
10887
        }
10888
 
10889
#ifdef CANONICALIZE_COMPARISON
10890
  /* If this machine only supports a subset of valid comparisons, see if we
10891
     can convert an unsupported one into a supported one.  */
10892
  CANONICALIZE_COMPARISON (code, op0, op1);
10893
#endif
10894
 
10895
  *pop0 = op0;
10896
  *pop1 = op1;
10897
 
10898
  return code;
10899
}
10900
 
10901
/* Utility function for record_value_for_reg.  Count number of
10902
   rtxs in X.  */
10903
static int
10904
count_rtxs (rtx x)
10905
{
10906
  enum rtx_code code = GET_CODE (x);
10907
  const char *fmt;
10908
  int i, ret = 1;
10909
 
10910
  if (GET_RTX_CLASS (code) == '2'
10911
      || GET_RTX_CLASS (code) == 'c')
10912
    {
10913
      rtx x0 = XEXP (x, 0);
10914
      rtx x1 = XEXP (x, 1);
10915
 
10916
      if (x0 == x1)
10917
        return 1 + 2 * count_rtxs (x0);
10918
 
10919
      if ((GET_RTX_CLASS (GET_CODE (x1)) == '2'
10920
           || GET_RTX_CLASS (GET_CODE (x1)) == 'c')
10921
          && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
10922
        return 2 + 2 * count_rtxs (x0)
10923
               + count_rtxs (x == XEXP (x1, 0)
10924
                             ? XEXP (x1, 1) : XEXP (x1, 0));
10925
 
10926
      if ((GET_RTX_CLASS (GET_CODE (x0)) == '2'
10927
           || GET_RTX_CLASS (GET_CODE (x0)) == 'c')
10928
          && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
10929
        return 2 + 2 * count_rtxs (x1)
10930
               + count_rtxs (x == XEXP (x0, 0)
10931
                             ? XEXP (x0, 1) : XEXP (x0, 0));
10932
    }
10933
 
10934
  fmt = GET_RTX_FORMAT (code);
10935
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10936
    if (fmt[i] == 'e')
10937
      ret += count_rtxs (XEXP (x, i));
10938
 
10939
  return ret;
10940
}
10941
 
10942
/* Utility function for following routine.  Called when X is part of a value
10943
   being stored into last_set_value.  Sets last_set_table_tick
10944
   for each register mentioned.  Similar to mention_regs in cse.c  */
10945
 
10946
static void
10947
update_table_tick (rtx x)
10948
{
10949
  enum rtx_code code = GET_CODE (x);
10950
  const char *fmt = GET_RTX_FORMAT (code);
10951
  int i;
10952
 
10953
  if (code == REG)
10954
    {
10955
      unsigned int regno = REGNO (x);
10956
      unsigned int endregno
10957
        = regno + (regno < FIRST_PSEUDO_REGISTER
10958
                   ? hard_regno_nregs[regno][GET_MODE (x)] : 1);
10959
      unsigned int r;
10960
 
10961
      for (r = regno; r < endregno; r++)
10962
        reg_stat[r].last_set_table_tick = label_tick;
10963
 
10964
      return;
10965
    }
10966
 
10967
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10968
    /* Note that we can't have an "E" in values stored; see
10969
       get_last_value_validate.  */
10970
    if (fmt[i] == 'e')
10971
      {
10972
        /* Check for identical subexpressions.  If x contains
10973
           identical subexpression we only have to traverse one of
10974
           them.  */
10975
        if (i == 0 && ARITHMETIC_P (x))
10976
          {
10977
            /* Note that at this point x1 has already been
10978
               processed.  */
10979
            rtx x0 = XEXP (x, 0);
10980
            rtx x1 = XEXP (x, 1);
10981
 
10982
            /* If x0 and x1 are identical then there is no need to
10983
               process x0.  */
10984
            if (x0 == x1)
10985
              break;
10986
 
10987
            /* If x0 is identical to a subexpression of x1 then while
10988
               processing x1, x0 has already been processed.  Thus we
10989
               are done with x.  */
10990
            if (ARITHMETIC_P (x1)
10991
                && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
10992
              break;
10993
 
10994
            /* If x1 is identical to a subexpression of x0 then we
10995
               still have to process the rest of x0.  */
10996
            if (ARITHMETIC_P (x0)
10997
                && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
10998
              {
10999
                update_table_tick (XEXP (x0, x1 == XEXP (x0, 0) ? 1 : 0));
11000
                break;
11001
              }
11002
          }
11003
 
11004
        update_table_tick (XEXP (x, i));
11005
      }
11006
}
11007
 
11008
/* Record that REG is set to VALUE in insn INSN.  If VALUE is zero, we
11009
   are saying that the register is clobbered and we no longer know its
11010
   value.  If INSN is zero, don't update reg_stat[].last_set; this is
11011
   only permitted with VALUE also zero and is used to invalidate the
11012
   register.  */
11013
 
11014
static void
11015
record_value_for_reg (rtx reg, rtx insn, rtx value)
11016
{
11017
  unsigned int regno = REGNO (reg);
11018
  unsigned int endregno
11019
    = regno + (regno < FIRST_PSEUDO_REGISTER
11020
               ? hard_regno_nregs[regno][GET_MODE (reg)] : 1);
11021
  unsigned int i;
11022
 
11023
  /* If VALUE contains REG and we have a previous value for REG, substitute
11024
     the previous value.  */
11025
  if (value && insn && reg_overlap_mentioned_p (reg, value))
11026
    {
11027
      rtx tem;
11028
 
11029
      /* Set things up so get_last_value is allowed to see anything set up to
11030
         our insn.  */
11031
      subst_low_cuid = INSN_CUID (insn);
11032
      tem = get_last_value (reg);
11033
 
11034
      /* If TEM is simply a binary operation with two CLOBBERs as operands,
11035
         it isn't going to be useful and will take a lot of time to process,
11036
         so just use the CLOBBER.  */
11037
 
11038
      if (tem)
11039
        {
11040
          if (ARITHMETIC_P (tem)
11041
              && GET_CODE (XEXP (tem, 0)) == CLOBBER
11042
              && GET_CODE (XEXP (tem, 1)) == CLOBBER)
11043
            tem = XEXP (tem, 0);
11044
          else if (count_occurrences (value, reg, 1) >= 2)
11045
            {
11046
              /* If there are two or more occurrences of REG in VALUE,
11047
                 prevent the value from growing too much.  */
11048
              if (count_rtxs (tem) > MAX_LAST_VALUE_RTL)
11049
                tem = gen_rtx_CLOBBER (GET_MODE (tem), const0_rtx);
11050
            }
11051
 
11052
          value = replace_rtx (copy_rtx (value), reg, tem);
11053
        }
11054
    }
11055
 
11056
  /* For each register modified, show we don't know its value, that
11057
     we don't know about its bitwise content, that its value has been
11058
     updated, and that we don't know the location of the death of the
11059
     register.  */
11060
  for (i = regno; i < endregno; i++)
11061
    {
11062
      if (insn)
11063
        reg_stat[i].last_set = insn;
11064
 
11065
      reg_stat[i].last_set_value = 0;
11066
      reg_stat[i].last_set_mode = 0;
11067
      reg_stat[i].last_set_nonzero_bits = 0;
11068
      reg_stat[i].last_set_sign_bit_copies = 0;
11069
      reg_stat[i].last_death = 0;
11070
    }
11071
 
11072
  /* Mark registers that are being referenced in this value.  */
11073
  if (value)
11074
    update_table_tick (value);
11075
 
11076
  /* Now update the status of each register being set.
11077
     If someone is using this register in this block, set this register
11078
     to invalid since we will get confused between the two lives in this
11079
     basic block.  This makes using this register always invalid.  In cse, we
11080
     scan the table to invalidate all entries using this register, but this
11081
     is too much work for us.  */
11082
 
11083
  for (i = regno; i < endregno; i++)
11084
    {
11085
      reg_stat[i].last_set_label = label_tick;
11086
      if (value && reg_stat[i].last_set_table_tick == label_tick)
11087
        reg_stat[i].last_set_invalid = 1;
11088
      else
11089
        reg_stat[i].last_set_invalid = 0;
11090
    }
11091
 
11092
  /* The value being assigned might refer to X (like in "x++;").  In that
11093
     case, we must replace it with (clobber (const_int 0)) to prevent
11094
     infinite loops.  */
11095
  if (value && ! get_last_value_validate (&value, insn,
11096
                                          reg_stat[regno].last_set_label, 0))
11097
    {
11098
      value = copy_rtx (value);
11099
      if (! get_last_value_validate (&value, insn,
11100
                                     reg_stat[regno].last_set_label, 1))
11101
        value = 0;
11102
    }
11103
 
11104
  /* For the main register being modified, update the value, the mode, the
11105
     nonzero bits, and the number of sign bit copies.  */
11106
 
11107
  reg_stat[regno].last_set_value = value;
11108
 
11109
  if (value)
11110
    {
11111
      enum machine_mode mode = GET_MODE (reg);
11112
      subst_low_cuid = INSN_CUID (insn);
11113
      reg_stat[regno].last_set_mode = mode;
11114
      if (GET_MODE_CLASS (mode) == MODE_INT
11115
          && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
11116
        mode = nonzero_bits_mode;
11117
      reg_stat[regno].last_set_nonzero_bits = nonzero_bits (value, mode);
11118
      reg_stat[regno].last_set_sign_bit_copies
11119
        = num_sign_bit_copies (value, GET_MODE (reg));
11120
    }
11121
}
11122
 
11123
/* Called via note_stores from record_dead_and_set_regs to handle one
11124
   SET or CLOBBER in an insn.  DATA is the instruction in which the
11125
   set is occurring.  */
11126
 
11127
static void
11128
record_dead_and_set_regs_1 (rtx dest, rtx setter, void *data)
11129
{
11130
  rtx record_dead_insn = (rtx) data;
11131
 
11132
  if (GET_CODE (dest) == SUBREG)
11133
    dest = SUBREG_REG (dest);
11134
 
11135
  if (REG_P (dest))
11136
    {
11137
      /* If we are setting the whole register, we know its value.  Otherwise
11138
         show that we don't know the value.  We can handle SUBREG in
11139
         some cases.  */
11140
      if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
11141
        record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
11142
      else if (GET_CODE (setter) == SET
11143
               && GET_CODE (SET_DEST (setter)) == SUBREG
11144
               && SUBREG_REG (SET_DEST (setter)) == dest
11145
               && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
11146
               && subreg_lowpart_p (SET_DEST (setter)))
11147
        record_value_for_reg (dest, record_dead_insn,
11148
                              gen_lowpart (GET_MODE (dest),
11149
                                                       SET_SRC (setter)));
11150
      else
11151
        record_value_for_reg (dest, record_dead_insn, NULL_RTX);
11152
    }
11153
  else if (MEM_P (dest)
11154
           /* Ignore pushes, they clobber nothing.  */
11155
           && ! push_operand (dest, GET_MODE (dest)))
11156
    mem_last_set = INSN_CUID (record_dead_insn);
11157
}
11158
 
11159
/* Update the records of when each REG was most recently set or killed
11160
   for the things done by INSN.  This is the last thing done in processing
11161
   INSN in the combiner loop.
11162
 
11163
   We update reg_stat[], in particular fields last_set, last_set_value,
11164
   last_set_mode, last_set_nonzero_bits, last_set_sign_bit_copies,
11165
   last_death, and also the similar information mem_last_set (which insn
11166
   most recently modified memory) and last_call_cuid (which insn was the
11167
   most recent subroutine call).  */
11168
 
11169
static void
11170
record_dead_and_set_regs (rtx insn)
11171
{
11172
  rtx link;
11173
  unsigned int i;
11174
 
11175
  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
11176
    {
11177
      if (REG_NOTE_KIND (link) == REG_DEAD
11178
          && REG_P (XEXP (link, 0)))
11179
        {
11180
          unsigned int regno = REGNO (XEXP (link, 0));
11181
          unsigned int endregno
11182
            = regno + (regno < FIRST_PSEUDO_REGISTER
11183
                       ? hard_regno_nregs[regno][GET_MODE (XEXP (link, 0))]
11184
                       : 1);
11185
 
11186
          for (i = regno; i < endregno; i++)
11187
            reg_stat[i].last_death = insn;
11188
        }
11189
      else if (REG_NOTE_KIND (link) == REG_INC)
11190
        record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
11191
    }
11192
 
11193
  if (CALL_P (insn))
11194
    {
11195
      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
11196
        if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
11197
          {
11198
            reg_stat[i].last_set_value = 0;
11199
            reg_stat[i].last_set_mode = 0;
11200
            reg_stat[i].last_set_nonzero_bits = 0;
11201
            reg_stat[i].last_set_sign_bit_copies = 0;
11202
            reg_stat[i].last_death = 0;
11203
          }
11204
 
11205
      last_call_cuid = mem_last_set = INSN_CUID (insn);
11206
 
11207
      /* Don't bother recording what this insn does.  It might set the
11208
         return value register, but we can't combine into a call
11209
         pattern anyway, so there's no point trying (and it may cause
11210
         a crash, if e.g. we wind up asking for last_set_value of a
11211
         SUBREG of the return value register).  */
11212
      return;
11213
    }
11214
 
11215
  note_stores (PATTERN (insn), record_dead_and_set_regs_1, insn);
11216
}
11217
 
11218
/* If a SUBREG has the promoted bit set, it is in fact a property of the
11219
   register present in the SUBREG, so for each such SUBREG go back and
11220
   adjust nonzero and sign bit information of the registers that are
11221
   known to have some zero/sign bits set.
11222
 
11223
   This is needed because when combine blows the SUBREGs away, the
11224
   information on zero/sign bits is lost and further combines can be
11225
   missed because of that.  */
11226
 
11227
static void
11228
record_promoted_value (rtx insn, rtx subreg)
11229
{
11230
  rtx links, set;
11231
  unsigned int regno = REGNO (SUBREG_REG (subreg));
11232
  enum machine_mode mode = GET_MODE (subreg);
11233
 
11234
  if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
11235
    return;
11236
 
11237
  for (links = LOG_LINKS (insn); links;)
11238
    {
11239
      insn = XEXP (links, 0);
11240
      set = single_set (insn);
11241
 
11242
      if (! set || !REG_P (SET_DEST (set))
11243
          || REGNO (SET_DEST (set)) != regno
11244
          || GET_MODE (SET_DEST (set)) != GET_MODE (SUBREG_REG (subreg)))
11245
        {
11246
          links = XEXP (links, 1);
11247
          continue;
11248
        }
11249
 
11250
      if (reg_stat[regno].last_set == insn)
11251
        {
11252
          if (SUBREG_PROMOTED_UNSIGNED_P (subreg) > 0)
11253
            reg_stat[regno].last_set_nonzero_bits &= GET_MODE_MASK (mode);
11254
        }
11255
 
11256
      if (REG_P (SET_SRC (set)))
11257
        {
11258
          regno = REGNO (SET_SRC (set));
11259
          links = LOG_LINKS (insn);
11260
        }
11261
      else
11262
        break;
11263
    }
11264
}
11265
 
11266
/* Scan X for promoted SUBREGs.  For each one found,
11267
   note what it implies to the registers used in it.  */
11268
 
11269
static void
11270
check_promoted_subreg (rtx insn, rtx x)
11271
{
11272
  if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
11273
      && REG_P (SUBREG_REG (x)))
11274
    record_promoted_value (insn, x);
11275
  else
11276
    {
11277
      const char *format = GET_RTX_FORMAT (GET_CODE (x));
11278
      int i, j;
11279
 
11280
      for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
11281
        switch (format[i])
11282
          {
11283
          case 'e':
11284
            check_promoted_subreg (insn, XEXP (x, i));
11285
            break;
11286
          case 'V':
11287
          case 'E':
11288
            if (XVEC (x, i) != 0)
11289
              for (j = 0; j < XVECLEN (x, i); j++)
11290
                check_promoted_subreg (insn, XVECEXP (x, i, j));
11291
            break;
11292
          }
11293
    }
11294
}
11295
 
11296
/* Utility routine for the following function.  Verify that all the registers
11297
   mentioned in *LOC are valid when *LOC was part of a value set when
11298
   label_tick == TICK.  Return 0 if some are not.
11299
 
11300
   If REPLACE is nonzero, replace the invalid reference with
11301
   (clobber (const_int 0)) and return 1.  This replacement is useful because
11302
   we often can get useful information about the form of a value (e.g., if
11303
   it was produced by a shift that always produces -1 or 0) even though
11304
   we don't know exactly what registers it was produced from.  */
11305
 
11306
static int
11307
get_last_value_validate (rtx *loc, rtx insn, int tick, int replace)
11308
{
11309
  rtx x = *loc;
11310
  const char *fmt = GET_RTX_FORMAT (GET_CODE (x));
11311
  int len = GET_RTX_LENGTH (GET_CODE (x));
11312
  int i;
11313
 
11314
  if (REG_P (x))
11315
    {
11316
      unsigned int regno = REGNO (x);
11317
      unsigned int endregno
11318
        = regno + (regno < FIRST_PSEUDO_REGISTER
11319
                   ? hard_regno_nregs[regno][GET_MODE (x)] : 1);
11320
      unsigned int j;
11321
 
11322
      for (j = regno; j < endregno; j++)
11323
        if (reg_stat[j].last_set_invalid
11324
            /* If this is a pseudo-register that was only set once and not
11325
               live at the beginning of the function, it is always valid.  */
11326
            || (! (regno >= FIRST_PSEUDO_REGISTER
11327
                   && REG_N_SETS (regno) == 1
11328
                   && (! REGNO_REG_SET_P
11329
                       (ENTRY_BLOCK_PTR->next_bb->il.rtl->global_live_at_start,
11330
                        regno)))
11331
                && reg_stat[j].last_set_label > tick))
11332
          {
11333
            if (replace)
11334
              *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
11335
            return replace;
11336
          }
11337
 
11338
      return 1;
11339
    }
11340
  /* If this is a memory reference, make sure that there were
11341
     no stores after it that might have clobbered the value.  We don't
11342
     have alias info, so we assume any store invalidates it.  */
11343
  else if (MEM_P (x) && !MEM_READONLY_P (x)
11344
           && INSN_CUID (insn) <= mem_last_set)
11345
    {
11346
      if (replace)
11347
        *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
11348
      return replace;
11349
    }
11350
 
11351
  for (i = 0; i < len; i++)
11352
    {
11353
      if (fmt[i] == 'e')
11354
        {
11355
          /* Check for identical subexpressions.  If x contains
11356
             identical subexpression we only have to traverse one of
11357
             them.  */
11358
          if (i == 1 && ARITHMETIC_P (x))
11359
            {
11360
              /* Note that at this point x0 has already been checked
11361
                 and found valid.  */
11362
              rtx x0 = XEXP (x, 0);
11363
              rtx x1 = XEXP (x, 1);
11364
 
11365
              /* If x0 and x1 are identical then x is also valid.  */
11366
              if (x0 == x1)
11367
                return 1;
11368
 
11369
              /* If x1 is identical to a subexpression of x0 then
11370
                 while checking x0, x1 has already been checked.  Thus
11371
                 it is valid and so as x.  */
11372
              if (ARITHMETIC_P (x0)
11373
                  && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
11374
                return 1;
11375
 
11376
              /* If x0 is identical to a subexpression of x1 then x is
11377
                 valid iff the rest of x1 is valid.  */
11378
              if (ARITHMETIC_P (x1)
11379
                  && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
11380
                return
11381
                  get_last_value_validate (&XEXP (x1,
11382
                                                  x0 == XEXP (x1, 0) ? 1 : 0),
11383
                                           insn, tick, replace);
11384
            }
11385
 
11386
          if (get_last_value_validate (&XEXP (x, i), insn, tick,
11387
                                       replace) == 0)
11388
            return 0;
11389
        }
11390
      /* Don't bother with these.  They shouldn't occur anyway.  */
11391
      else if (fmt[i] == 'E')
11392
        return 0;
11393
    }
11394
 
11395
  /* If we haven't found a reason for it to be invalid, it is valid.  */
11396
  return 1;
11397
}
11398
 
11399
/* Get the last value assigned to X, if known.  Some registers
11400
   in the value may be replaced with (clobber (const_int 0)) if their value
11401
   is known longer known reliably.  */
11402
 
11403
static rtx
11404
get_last_value (rtx x)
11405
{
11406
  unsigned int regno;
11407
  rtx value;
11408
 
11409
  /* If this is a non-paradoxical SUBREG, get the value of its operand and
11410
     then convert it to the desired mode.  If this is a paradoxical SUBREG,
11411
     we cannot predict what values the "extra" bits might have.  */
11412
  if (GET_CODE (x) == SUBREG
11413
      && subreg_lowpart_p (x)
11414
      && (GET_MODE_SIZE (GET_MODE (x))
11415
          <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
11416
      && (value = get_last_value (SUBREG_REG (x))) != 0)
11417
    return gen_lowpart (GET_MODE (x), value);
11418
 
11419
  if (!REG_P (x))
11420
    return 0;
11421
 
11422
  regno = REGNO (x);
11423
  value = reg_stat[regno].last_set_value;
11424
 
11425
  /* If we don't have a value, or if it isn't for this basic block and
11426
     it's either a hard register, set more than once, or it's a live
11427
     at the beginning of the function, return 0.
11428
 
11429
     Because if it's not live at the beginning of the function then the reg
11430
     is always set before being used (is never used without being set).
11431
     And, if it's set only once, and it's always set before use, then all
11432
     uses must have the same last value, even if it's not from this basic
11433
     block.  */
11434
 
11435
  if (value == 0
11436
      || (reg_stat[regno].last_set_label != label_tick
11437
          && (regno < FIRST_PSEUDO_REGISTER
11438
              || REG_N_SETS (regno) != 1
11439
              || (REGNO_REG_SET_P
11440
                  (ENTRY_BLOCK_PTR->next_bb->il.rtl->global_live_at_start,
11441
                   regno)))))
11442
    return 0;
11443
 
11444
  /* If the value was set in a later insn than the ones we are processing,
11445
     we can't use it even if the register was only set once.  */
11446
  if (INSN_CUID (reg_stat[regno].last_set) >= subst_low_cuid)
11447
    return 0;
11448
 
11449
  /* If the value has all its registers valid, return it.  */
11450
  if (get_last_value_validate (&value, reg_stat[regno].last_set,
11451
                               reg_stat[regno].last_set_label, 0))
11452
    return value;
11453
 
11454
  /* Otherwise, make a copy and replace any invalid register with
11455
     (clobber (const_int 0)).  If that fails for some reason, return 0.  */
11456
 
11457
  value = copy_rtx (value);
11458
  if (get_last_value_validate (&value, reg_stat[regno].last_set,
11459
                               reg_stat[regno].last_set_label, 1))
11460
    return value;
11461
 
11462
  return 0;
11463
}
11464
 
11465
/* Return nonzero if expression X refers to a REG or to memory
11466
   that is set in an instruction more recent than FROM_CUID.  */
11467
 
11468
static int
11469
use_crosses_set_p (rtx x, int from_cuid)
11470
{
11471
  const char *fmt;
11472
  int i;
11473
  enum rtx_code code = GET_CODE (x);
11474
 
11475
  if (code == REG)
11476
    {
11477
      unsigned int regno = REGNO (x);
11478
      unsigned endreg = regno + (regno < FIRST_PSEUDO_REGISTER
11479
                                 ? hard_regno_nregs[regno][GET_MODE (x)] : 1);
11480
 
11481
#ifdef PUSH_ROUNDING
11482
      /* Don't allow uses of the stack pointer to be moved,
11483
         because we don't know whether the move crosses a push insn.  */
11484
      if (regno == STACK_POINTER_REGNUM && PUSH_ARGS)
11485
        return 1;
11486
#endif
11487
      for (; regno < endreg; regno++)
11488
        if (reg_stat[regno].last_set
11489
            && INSN_CUID (reg_stat[regno].last_set) > from_cuid)
11490
          return 1;
11491
      return 0;
11492
    }
11493
 
11494
  if (code == MEM && mem_last_set > from_cuid)
11495
    return 1;
11496
 
11497
  fmt = GET_RTX_FORMAT (code);
11498
 
11499
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11500
    {
11501
      if (fmt[i] == 'E')
11502
        {
11503
          int j;
11504
          for (j = XVECLEN (x, i) - 1; j >= 0; j--)
11505
            if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
11506
              return 1;
11507
        }
11508
      else if (fmt[i] == 'e'
11509
               && use_crosses_set_p (XEXP (x, i), from_cuid))
11510
        return 1;
11511
    }
11512
  return 0;
11513
}
11514
 
11515
/* Define three variables used for communication between the following
11516
   routines.  */
11517
 
11518
static unsigned int reg_dead_regno, reg_dead_endregno;
11519
static int reg_dead_flag;
11520
 
11521
/* Function called via note_stores from reg_dead_at_p.
11522
 
11523
   If DEST is within [reg_dead_regno, reg_dead_endregno), set
11524
   reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET.  */
11525
 
11526
static void
11527
reg_dead_at_p_1 (rtx dest, rtx x, void *data ATTRIBUTE_UNUSED)
11528
{
11529
  unsigned int regno, endregno;
11530
 
11531
  if (!REG_P (dest))
11532
    return;
11533
 
11534
  regno = REGNO (dest);
11535
  endregno = regno + (regno < FIRST_PSEUDO_REGISTER
11536
                      ? hard_regno_nregs[regno][GET_MODE (dest)] : 1);
11537
 
11538
  if (reg_dead_endregno > regno && reg_dead_regno < endregno)
11539
    reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
11540
}
11541
 
11542
/* Return nonzero if REG is known to be dead at INSN.
11543
 
11544
   We scan backwards from INSN.  If we hit a REG_DEAD note or a CLOBBER
11545
   referencing REG, it is dead.  If we hit a SET referencing REG, it is
11546
   live.  Otherwise, see if it is live or dead at the start of the basic
11547
   block we are in.  Hard regs marked as being live in NEWPAT_USED_REGS
11548
   must be assumed to be always live.  */
11549
 
11550
static int
11551
reg_dead_at_p (rtx reg, rtx insn)
11552
{
11553
  basic_block block;
11554
  unsigned int i;
11555
 
11556
  /* Set variables for reg_dead_at_p_1.  */
11557
  reg_dead_regno = REGNO (reg);
11558
  reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
11559
                                        ? hard_regno_nregs[reg_dead_regno]
11560
                                                          [GET_MODE (reg)]
11561
                                        : 1);
11562
 
11563
  reg_dead_flag = 0;
11564
 
11565
  /* Check that reg isn't mentioned in NEWPAT_USED_REGS.  For fixed registers
11566
     we allow the machine description to decide whether use-and-clobber
11567
     patterns are OK.  */
11568
  if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
11569
    {
11570
      for (i = reg_dead_regno; i < reg_dead_endregno; i++)
11571
        if (!fixed_regs[i] && TEST_HARD_REG_BIT (newpat_used_regs, i))
11572
          return 0;
11573
    }
11574
 
11575
  /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
11576
     beginning of function.  */
11577
  for (; insn && !LABEL_P (insn) && !BARRIER_P (insn);
11578
       insn = prev_nonnote_insn (insn))
11579
    {
11580
      note_stores (PATTERN (insn), reg_dead_at_p_1, NULL);
11581
      if (reg_dead_flag)
11582
        return reg_dead_flag == 1 ? 1 : 0;
11583
 
11584
      if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
11585
        return 1;
11586
    }
11587
 
11588
  /* Get the basic block that we were in.  */
11589
  if (insn == 0)
11590
    block = ENTRY_BLOCK_PTR->next_bb;
11591
  else
11592
    {
11593
      FOR_EACH_BB (block)
11594
        if (insn == BB_HEAD (block))
11595
          break;
11596
 
11597
      if (block == EXIT_BLOCK_PTR)
11598
        return 0;
11599
    }
11600
 
11601
  for (i = reg_dead_regno; i < reg_dead_endregno; i++)
11602
    if (REGNO_REG_SET_P (block->il.rtl->global_live_at_start, i))
11603
      return 0;
11604
 
11605
  return 1;
11606
}
11607
 
11608
/* Note hard registers in X that are used.  This code is similar to
11609
   that in flow.c, but much simpler since we don't care about pseudos.  */
11610
 
11611
static void
11612
mark_used_regs_combine (rtx x)
11613
{
11614
  RTX_CODE code = GET_CODE (x);
11615
  unsigned int regno;
11616
  int i;
11617
 
11618
  switch (code)
11619
    {
11620
    case LABEL_REF:
11621
    case SYMBOL_REF:
11622
    case CONST_INT:
11623
    case CONST:
11624
    case CONST_DOUBLE:
11625
    case CONST_VECTOR:
11626
    case PC:
11627
    case ADDR_VEC:
11628
    case ADDR_DIFF_VEC:
11629
    case ASM_INPUT:
11630
#ifdef HAVE_cc0
11631
    /* CC0 must die in the insn after it is set, so we don't need to take
11632
       special note of it here.  */
11633
    case CC0:
11634
#endif
11635
      return;
11636
 
11637
    case CLOBBER:
11638
      /* If we are clobbering a MEM, mark any hard registers inside the
11639
         address as used.  */
11640
      if (MEM_P (XEXP (x, 0)))
11641
        mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
11642
      return;
11643
 
11644
    case REG:
11645
      regno = REGNO (x);
11646
      /* A hard reg in a wide mode may really be multiple registers.
11647
         If so, mark all of them just like the first.  */
11648
      if (regno < FIRST_PSEUDO_REGISTER)
11649
        {
11650
          unsigned int endregno, r;
11651
 
11652
          /* None of this applies to the stack, frame or arg pointers.  */
11653
          if (regno == STACK_POINTER_REGNUM
11654
#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
11655
              || regno == HARD_FRAME_POINTER_REGNUM
11656
#endif
11657
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
11658
              || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
11659
#endif
11660
              || regno == FRAME_POINTER_REGNUM)
11661
            return;
11662
 
11663
          endregno = regno + hard_regno_nregs[regno][GET_MODE (x)];
11664
          for (r = regno; r < endregno; r++)
11665
            SET_HARD_REG_BIT (newpat_used_regs, r);
11666
        }
11667
      return;
11668
 
11669
    case SET:
11670
      {
11671
        /* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
11672
           the address.  */
11673
        rtx testreg = SET_DEST (x);
11674
 
11675
        while (GET_CODE (testreg) == SUBREG
11676
               || GET_CODE (testreg) == ZERO_EXTRACT
11677
               || GET_CODE (testreg) == STRICT_LOW_PART)
11678
          testreg = XEXP (testreg, 0);
11679
 
11680
        if (MEM_P (testreg))
11681
          mark_used_regs_combine (XEXP (testreg, 0));
11682
 
11683
        mark_used_regs_combine (SET_SRC (x));
11684
      }
11685
      return;
11686
 
11687
    default:
11688
      break;
11689
    }
11690
 
11691
  /* Recursively scan the operands of this expression.  */
11692
 
11693
  {
11694
    const char *fmt = GET_RTX_FORMAT (code);
11695
 
11696
    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11697
      {
11698
        if (fmt[i] == 'e')
11699
          mark_used_regs_combine (XEXP (x, i));
11700
        else if (fmt[i] == 'E')
11701
          {
11702
            int j;
11703
 
11704
            for (j = 0; j < XVECLEN (x, i); j++)
11705
              mark_used_regs_combine (XVECEXP (x, i, j));
11706
          }
11707
      }
11708
  }
11709
}
11710
 
11711
/* Remove register number REGNO from the dead registers list of INSN.
11712
 
11713
   Return the note used to record the death, if there was one.  */
11714
 
11715
rtx
11716
remove_death (unsigned int regno, rtx insn)
11717
{
11718
  rtx note = find_regno_note (insn, REG_DEAD, regno);
11719
 
11720
  if (note)
11721
    {
11722
      REG_N_DEATHS (regno)--;
11723
      remove_note (insn, note);
11724
    }
11725
 
11726
  return note;
11727
}
11728
 
11729
/* For each register (hardware or pseudo) used within expression X, if its
11730
   death is in an instruction with cuid between FROM_CUID (inclusive) and
11731
   TO_INSN (exclusive), put a REG_DEAD note for that register in the
11732
   list headed by PNOTES.
11733
 
11734
   That said, don't move registers killed by maybe_kill_insn.
11735
 
11736
   This is done when X is being merged by combination into TO_INSN.  These
11737
   notes will then be distributed as needed.  */
11738
 
11739
static void
11740
move_deaths (rtx x, rtx maybe_kill_insn, int from_cuid, rtx to_insn,
11741
             rtx *pnotes)
11742
{
11743
  const char *fmt;
11744
  int len, i;
11745
  enum rtx_code code = GET_CODE (x);
11746
 
11747
  if (code == REG)
11748
    {
11749
      unsigned int regno = REGNO (x);
11750
      rtx where_dead = reg_stat[regno].last_death;
11751
      rtx before_dead, after_dead;
11752
 
11753
      /* Don't move the register if it gets killed in between from and to.  */
11754
      if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
11755
          && ! reg_referenced_p (x, maybe_kill_insn))
11756
        return;
11757
 
11758
      /* WHERE_DEAD could be a USE insn made by combine, so first we
11759
         make sure that we have insns with valid INSN_CUID values.  */
11760
      before_dead = where_dead;
11761
      while (before_dead && INSN_UID (before_dead) > max_uid_cuid)
11762
        before_dead = PREV_INSN (before_dead);
11763
 
11764
      after_dead = where_dead;
11765
      while (after_dead && INSN_UID (after_dead) > max_uid_cuid)
11766
        after_dead = NEXT_INSN (after_dead);
11767
 
11768
      if (before_dead && after_dead
11769
          && INSN_CUID (before_dead) >= from_cuid
11770
          && (INSN_CUID (after_dead) < INSN_CUID (to_insn)
11771
              || (where_dead != after_dead
11772
                  && INSN_CUID (after_dead) == INSN_CUID (to_insn))))
11773
        {
11774
          rtx note = remove_death (regno, where_dead);
11775
 
11776
          /* It is possible for the call above to return 0.  This can occur
11777
             when last_death points to I2 or I1 that we combined with.
11778
             In that case make a new note.
11779
 
11780
             We must also check for the case where X is a hard register
11781
             and NOTE is a death note for a range of hard registers
11782
             including X.  In that case, we must put REG_DEAD notes for
11783
             the remaining registers in place of NOTE.  */
11784
 
11785
          if (note != 0 && regno < FIRST_PSEUDO_REGISTER
11786
              && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
11787
                  > GET_MODE_SIZE (GET_MODE (x))))
11788
            {
11789
              unsigned int deadregno = REGNO (XEXP (note, 0));
11790
              unsigned int deadend
11791
                = (deadregno + hard_regno_nregs[deadregno]
11792
                                               [GET_MODE (XEXP (note, 0))]);
11793
              unsigned int ourend
11794
                = regno + hard_regno_nregs[regno][GET_MODE (x)];
11795
              unsigned int i;
11796
 
11797
              for (i = deadregno; i < deadend; i++)
11798
                if (i < regno || i >= ourend)
11799
                  REG_NOTES (where_dead)
11800
                    = gen_rtx_EXPR_LIST (REG_DEAD,
11801
                                         regno_reg_rtx[i],
11802
                                         REG_NOTES (where_dead));
11803
            }
11804
 
11805
          /* If we didn't find any note, or if we found a REG_DEAD note that
11806
             covers only part of the given reg, and we have a multi-reg hard
11807
             register, then to be safe we must check for REG_DEAD notes
11808
             for each register other than the first.  They could have
11809
             their own REG_DEAD notes lying around.  */
11810
          else if ((note == 0
11811
                    || (note != 0
11812
                        && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
11813
                            < GET_MODE_SIZE (GET_MODE (x)))))
11814
                   && regno < FIRST_PSEUDO_REGISTER
11815
                   && hard_regno_nregs[regno][GET_MODE (x)] > 1)
11816
            {
11817
              unsigned int ourend
11818
                = regno + hard_regno_nregs[regno][GET_MODE (x)];
11819
              unsigned int i, offset;
11820
              rtx oldnotes = 0;
11821
 
11822
              if (note)
11823
                offset = hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))];
11824
              else
11825
                offset = 1;
11826
 
11827
              for (i = regno + offset; i < ourend; i++)
11828
                move_deaths (regno_reg_rtx[i],
11829
                             maybe_kill_insn, from_cuid, to_insn, &oldnotes);
11830
            }
11831
 
11832
          if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
11833
            {
11834
              XEXP (note, 1) = *pnotes;
11835
              *pnotes = note;
11836
            }
11837
          else
11838
            *pnotes = gen_rtx_EXPR_LIST (REG_DEAD, x, *pnotes);
11839
 
11840
          REG_N_DEATHS (regno)++;
11841
        }
11842
 
11843
      return;
11844
    }
11845
 
11846
  else if (GET_CODE (x) == SET)
11847
    {
11848
      rtx dest = SET_DEST (x);
11849
 
11850
      move_deaths (SET_SRC (x), maybe_kill_insn, from_cuid, to_insn, pnotes);
11851
 
11852
      /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
11853
         that accesses one word of a multi-word item, some
11854
         piece of everything register in the expression is used by
11855
         this insn, so remove any old death.  */
11856
      /* ??? So why do we test for equality of the sizes?  */
11857
 
11858
      if (GET_CODE (dest) == ZERO_EXTRACT
11859
          || GET_CODE (dest) == STRICT_LOW_PART
11860
          || (GET_CODE (dest) == SUBREG
11861
              && (((GET_MODE_SIZE (GET_MODE (dest))
11862
                    + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
11863
                  == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
11864
                       + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
11865
        {
11866
          move_deaths (dest, maybe_kill_insn, from_cuid, to_insn, pnotes);
11867
          return;
11868
        }
11869
 
11870
      /* If this is some other SUBREG, we know it replaces the entire
11871
         value, so use that as the destination.  */
11872
      if (GET_CODE (dest) == SUBREG)
11873
        dest = SUBREG_REG (dest);
11874
 
11875
      /* If this is a MEM, adjust deaths of anything used in the address.
11876
         For a REG (the only other possibility), the entire value is
11877
         being replaced so the old value is not used in this insn.  */
11878
 
11879
      if (MEM_P (dest))
11880
        move_deaths (XEXP (dest, 0), maybe_kill_insn, from_cuid,
11881
                     to_insn, pnotes);
11882
      return;
11883
    }
11884
 
11885
  else if (GET_CODE (x) == CLOBBER)
11886
    return;
11887
 
11888
  len = GET_RTX_LENGTH (code);
11889
  fmt = GET_RTX_FORMAT (code);
11890
 
11891
  for (i = 0; i < len; i++)
11892
    {
11893
      if (fmt[i] == 'E')
11894
        {
11895
          int j;
11896
          for (j = XVECLEN (x, i) - 1; j >= 0; j--)
11897
            move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_cuid,
11898
                         to_insn, pnotes);
11899
        }
11900
      else if (fmt[i] == 'e')
11901
        move_deaths (XEXP (x, i), maybe_kill_insn, from_cuid, to_insn, pnotes);
11902
    }
11903
}
11904
 
11905
/* Return 1 if X is the target of a bit-field assignment in BODY, the
11906
   pattern of an insn.  X must be a REG.  */
11907
 
11908
static int
11909
reg_bitfield_target_p (rtx x, rtx body)
11910
{
11911
  int i;
11912
 
11913
  if (GET_CODE (body) == SET)
11914
    {
11915
      rtx dest = SET_DEST (body);
11916
      rtx target;
11917
      unsigned int regno, tregno, endregno, endtregno;
11918
 
11919
      if (GET_CODE (dest) == ZERO_EXTRACT)
11920
        target = XEXP (dest, 0);
11921
      else if (GET_CODE (dest) == STRICT_LOW_PART)
11922
        target = SUBREG_REG (XEXP (dest, 0));
11923
      else
11924
        return 0;
11925
 
11926
      if (GET_CODE (target) == SUBREG)
11927
        target = SUBREG_REG (target);
11928
 
11929
      if (!REG_P (target))
11930
        return 0;
11931
 
11932
      tregno = REGNO (target), regno = REGNO (x);
11933
      if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
11934
        return target == x;
11935
 
11936
      endtregno = tregno + hard_regno_nregs[tregno][GET_MODE (target)];
11937
      endregno = regno + hard_regno_nregs[regno][GET_MODE (x)];
11938
 
11939
      return endregno > tregno && regno < endtregno;
11940
    }
11941
 
11942
  else if (GET_CODE (body) == PARALLEL)
11943
    for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
11944
      if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
11945
        return 1;
11946
 
11947
  return 0;
11948
}
11949
 
11950
/* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
11951
   as appropriate.  I3 and I2 are the insns resulting from the combination
11952
   insns including FROM (I2 may be zero).
11953
 
11954
   ELIM_I2 and ELIM_I1 are either zero or registers that we know will
11955
   not need REG_DEAD notes because they are being substituted for.  This
11956
   saves searching in the most common cases.
11957
 
11958
   Each note in the list is either ignored or placed on some insns, depending
11959
   on the type of note.  */
11960
 
11961
static void
11962
distribute_notes (rtx notes, rtx from_insn, rtx i3, rtx i2, rtx elim_i2,
11963
                  rtx elim_i1)
11964
{
11965
  rtx note, next_note;
11966
  rtx tem;
11967
 
11968
  for (note = notes; note; note = next_note)
11969
    {
11970
      rtx place = 0, place2 = 0;
11971
 
11972
      /* If this NOTE references a pseudo register, ensure it references
11973
         the latest copy of that register.  */
11974
      if (XEXP (note, 0) && REG_P (XEXP (note, 0))
11975
          && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
11976
        XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
11977
 
11978
      next_note = XEXP (note, 1);
11979
      switch (REG_NOTE_KIND (note))
11980
        {
11981
        case REG_BR_PROB:
11982
        case REG_BR_PRED:
11983
          /* Doesn't matter much where we put this, as long as it's somewhere.
11984
             It is preferable to keep these notes on branches, which is most
11985
             likely to be i3.  */
11986
          place = i3;
11987
          break;
11988
 
11989
        case REG_VALUE_PROFILE:
11990
          /* Just get rid of this note, as it is unused later anyway.  */
11991
          break;
11992
 
11993
        case REG_NON_LOCAL_GOTO:
11994
          if (JUMP_P (i3))
11995
            place = i3;
11996
          else
11997
            {
11998
              gcc_assert (i2 && JUMP_P (i2));
11999
              place = i2;
12000
            }
12001
          break;
12002
 
12003
        case REG_EH_REGION:
12004
          /* These notes must remain with the call or trapping instruction.  */
12005
          if (CALL_P (i3))
12006
            place = i3;
12007
          else if (i2 && CALL_P (i2))
12008
            place = i2;
12009
          else
12010
            {
12011
              gcc_assert (flag_non_call_exceptions);
12012
              if (may_trap_p (i3))
12013
                place = i3;
12014
              else if (i2 && may_trap_p (i2))
12015
                place = i2;
12016
              /* ??? Otherwise assume we've combined things such that we
12017
                 can now prove that the instructions can't trap.  Drop the
12018
                 note in this case.  */
12019
            }
12020
          break;
12021
 
12022
        case REG_NORETURN:
12023
        case REG_SETJMP:
12024
          /* These notes must remain with the call.  It should not be
12025
             possible for both I2 and I3 to be a call.  */
12026
          if (CALL_P (i3))
12027
            place = i3;
12028
          else
12029
            {
12030
              gcc_assert (i2 && CALL_P (i2));
12031
              place = i2;
12032
            }
12033
          break;
12034
 
12035
        case REG_UNUSED:
12036
          /* Any clobbers for i3 may still exist, and so we must process
12037
             REG_UNUSED notes from that insn.
12038
 
12039
             Any clobbers from i2 or i1 can only exist if they were added by
12040
             recog_for_combine.  In that case, recog_for_combine created the
12041
             necessary REG_UNUSED notes.  Trying to keep any original
12042
             REG_UNUSED notes from these insns can cause incorrect output
12043
             if it is for the same register as the original i3 dest.
12044
             In that case, we will notice that the register is set in i3,
12045
             and then add a REG_UNUSED note for the destination of i3, which
12046
             is wrong.  However, it is possible to have REG_UNUSED notes from
12047
             i2 or i1 for register which were both used and clobbered, so
12048
             we keep notes from i2 or i1 if they will turn into REG_DEAD
12049
             notes.  */
12050
 
12051
          /* If this register is set or clobbered in I3, put the note there
12052
             unless there is one already.  */
12053
          if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
12054
            {
12055
              if (from_insn != i3)
12056
                break;
12057
 
12058
              if (! (REG_P (XEXP (note, 0))
12059
                     ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
12060
                     : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
12061
                place = i3;
12062
            }
12063
          /* Otherwise, if this register is used by I3, then this register
12064
             now dies here, so we must put a REG_DEAD note here unless there
12065
             is one already.  */
12066
          else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
12067
                   && ! (REG_P (XEXP (note, 0))
12068
                         ? find_regno_note (i3, REG_DEAD,
12069
                                            REGNO (XEXP (note, 0)))
12070
                         : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
12071
            {
12072
              PUT_REG_NOTE_KIND (note, REG_DEAD);
12073
              place = i3;
12074
            }
12075
          break;
12076
 
12077
        case REG_EQUAL:
12078
        case REG_EQUIV:
12079
        case REG_NOALIAS:
12080
          /* These notes say something about results of an insn.  We can
12081
             only support them if they used to be on I3 in which case they
12082
             remain on I3.  Otherwise they are ignored.
12083
 
12084
             If the note refers to an expression that is not a constant, we
12085
             must also ignore the note since we cannot tell whether the
12086
             equivalence is still true.  It might be possible to do
12087
             slightly better than this (we only have a problem if I2DEST
12088
             or I1DEST is present in the expression), but it doesn't
12089
             seem worth the trouble.  */
12090
 
12091
          if (from_insn == i3
12092
              && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
12093
            place = i3;
12094
          break;
12095
 
12096
        case REG_INC:
12097
        case REG_NO_CONFLICT:
12098
          /* These notes say something about how a register is used.  They must
12099
             be present on any use of the register in I2 or I3.  */
12100
          if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
12101
            place = i3;
12102
 
12103
          if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
12104
            {
12105
              if (place)
12106
                place2 = i2;
12107
              else
12108
                place = i2;
12109
            }
12110
          break;
12111
 
12112
        case REG_LABEL:
12113
          /* This can show up in several ways -- either directly in the
12114
             pattern, or hidden off in the constant pool with (or without?)
12115
             a REG_EQUAL note.  */
12116
          /* ??? Ignore the without-reg_equal-note problem for now.  */
12117
          if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3))
12118
              || ((tem = find_reg_note (i3, REG_EQUAL, NULL_RTX))
12119
                  && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12120
                  && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0)))
12121
            place = i3;
12122
 
12123
          if (i2
12124
              && (reg_mentioned_p (XEXP (note, 0), PATTERN (i2))
12125
                  || ((tem = find_reg_note (i2, REG_EQUAL, NULL_RTX))
12126
                      && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12127
                      && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0))))
12128
            {
12129
              if (place)
12130
                place2 = i2;
12131
              else
12132
                place = i2;
12133
            }
12134
 
12135
          /* Don't attach REG_LABEL note to a JUMP_INSN.  Add
12136
             a JUMP_LABEL instead or decrement LABEL_NUSES.  */
12137
          if (place && JUMP_P (place))
12138
            {
12139
              rtx label = JUMP_LABEL (place);
12140
 
12141
              if (!label)
12142
                JUMP_LABEL (place) = XEXP (note, 0);
12143
              else
12144
                {
12145
                  gcc_assert (label == XEXP (note, 0));
12146
                  if (LABEL_P (label))
12147
                    LABEL_NUSES (label)--;
12148
                }
12149
              place = 0;
12150
            }
12151
          if (place2 && JUMP_P (place2))
12152
            {
12153
              rtx label = JUMP_LABEL (place2);
12154
 
12155
              if (!label)
12156
                JUMP_LABEL (place2) = XEXP (note, 0);
12157
              else
12158
                {
12159
                  gcc_assert (label == XEXP (note, 0));
12160
                  if (LABEL_P (label))
12161
                    LABEL_NUSES (label)--;
12162
                }
12163
              place2 = 0;
12164
            }
12165
          break;
12166
 
12167
        case REG_NONNEG:
12168
          /* This note says something about the value of a register prior
12169
             to the execution of an insn.  It is too much trouble to see
12170
             if the note is still correct in all situations.  It is better
12171
             to simply delete it.  */
12172
          break;
12173
 
12174
        case REG_RETVAL:
12175
          /* If the insn previously containing this note still exists,
12176
             put it back where it was.  Otherwise move it to the previous
12177
             insn.  Adjust the corresponding REG_LIBCALL note.  */
12178
          if (!NOTE_P (from_insn))
12179
            place = from_insn;
12180
          else
12181
            {
12182
              tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
12183
              place = prev_real_insn (from_insn);
12184
              if (tem && place)
12185
                XEXP (tem, 0) = place;
12186
              /* If we're deleting the last remaining instruction of a
12187
                 libcall sequence, don't add the notes.  */
12188
              else if (XEXP (note, 0) == from_insn)
12189
                tem = place = 0;
12190
              /* Don't add the dangling REG_RETVAL note.  */
12191
              else if (! tem)
12192
                place = 0;
12193
            }
12194
          break;
12195
 
12196
        case REG_LIBCALL:
12197
          /* This is handled similarly to REG_RETVAL.  */
12198
          if (!NOTE_P (from_insn))
12199
            place = from_insn;
12200
          else
12201
            {
12202
              tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
12203
              place = next_real_insn (from_insn);
12204
              if (tem && place)
12205
                XEXP (tem, 0) = place;
12206
              /* If we're deleting the last remaining instruction of a
12207
                 libcall sequence, don't add the notes.  */
12208
              else if (XEXP (note, 0) == from_insn)
12209
                tem = place = 0;
12210
              /* Don't add the dangling REG_LIBCALL note.  */
12211
              else if (! tem)
12212
                place = 0;
12213
            }
12214
          break;
12215
 
12216
        case REG_DEAD:
12217
          /* If the register is used as an input in I3, it dies there.
12218
             Similarly for I2, if it is nonzero and adjacent to I3.
12219
 
12220
             If the register is not used as an input in either I3 or I2
12221
             and it is not one of the registers we were supposed to eliminate,
12222
             there are two possibilities.  We might have a non-adjacent I2
12223
             or we might have somehow eliminated an additional register
12224
             from a computation.  For example, we might have had A & B where
12225
             we discover that B will always be zero.  In this case we will
12226
             eliminate the reference to A.
12227
 
12228
             In both cases, we must search to see if we can find a previous
12229
             use of A and put the death note there.  */
12230
 
12231
          if (from_insn
12232
              && CALL_P (from_insn)
12233
              && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
12234
            place = from_insn;
12235
          else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
12236
            place = i3;
12237
          else if (i2 != 0 && next_nonnote_insn (i2) == i3
12238
                   && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12239
            place = i2;
12240
 
12241
          if (place == 0
12242
              && (rtx_equal_p (XEXP (note, 0), elim_i2)
12243
                  || rtx_equal_p (XEXP (note, 0), elim_i1)))
12244
            break;
12245
 
12246
          if (place == 0)
12247
            {
12248
              basic_block bb = this_basic_block;
12249
 
12250
              /* You might think you could search back from FROM_INSN
12251
                 rather than from I3, but combine tries to split invalid
12252
                 combined instructions.  This can result in the old I2
12253
                 or I1 moving later in the insn sequence.  */
12254
              for (tem = PREV_INSN (i3); place == 0; tem = PREV_INSN (tem))
12255
                {
12256
                  if (! INSN_P (tem))
12257
                    {
12258
                      if (tem == BB_HEAD (bb))
12259
                        break;
12260
                      continue;
12261
                    }
12262
 
12263
                  /* If the register is being set at TEM, see if that is all
12264
                     TEM is doing.  If so, delete TEM.  Otherwise, make this
12265
                     into a REG_UNUSED note instead. Don't delete sets to
12266
                     global register vars.  */
12267
                  if ((REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER
12268
                       || !global_regs[REGNO (XEXP (note, 0))])
12269
                      && reg_set_p (XEXP (note, 0), PATTERN (tem)))
12270
                    {
12271
                      rtx set = single_set (tem);
12272
                      rtx inner_dest = 0;
12273
#ifdef HAVE_cc0
12274
                      rtx cc0_setter = NULL_RTX;
12275
#endif
12276
 
12277
                      if (set != 0)
12278
                        for (inner_dest = SET_DEST (set);
12279
                             (GET_CODE (inner_dest) == STRICT_LOW_PART
12280
                              || GET_CODE (inner_dest) == SUBREG
12281
                              || GET_CODE (inner_dest) == ZERO_EXTRACT);
12282
                             inner_dest = XEXP (inner_dest, 0))
12283
                          ;
12284
 
12285
                      /* Verify that it was the set, and not a clobber that
12286
                         modified the register.
12287
 
12288
                         CC0 targets must be careful to maintain setter/user
12289
                         pairs.  If we cannot delete the setter due to side
12290
                         effects, mark the user with an UNUSED note instead
12291
                         of deleting it.  */
12292
 
12293
                      if (set != 0 && ! side_effects_p (SET_SRC (set))
12294
                          && rtx_equal_p (XEXP (note, 0), inner_dest)
12295
#ifdef HAVE_cc0
12296
                          && (! reg_mentioned_p (cc0_rtx, SET_SRC (set))
12297
                              || ((cc0_setter = prev_cc0_setter (tem)) != NULL
12298
                                  && sets_cc0_p (PATTERN (cc0_setter)) > 0))
12299
#endif
12300
                          )
12301
                        {
12302
                          /* Move the notes and links of TEM elsewhere.
12303
                             This might delete other dead insns recursively.
12304
                             First set the pattern to something that won't use
12305
                             any register.  */
12306
                          rtx old_notes = REG_NOTES (tem);
12307
 
12308
                          PATTERN (tem) = pc_rtx;
12309
                          REG_NOTES (tem) = NULL;
12310
 
12311
                          distribute_notes (old_notes, tem, tem, NULL_RTX,
12312
                                            NULL_RTX, NULL_RTX);
12313
                          distribute_links (LOG_LINKS (tem));
12314
 
12315
                          SET_INSN_DELETED (tem);
12316
 
12317
#ifdef HAVE_cc0
12318
                          /* Delete the setter too.  */
12319
                          if (cc0_setter)
12320
                            {
12321
                              PATTERN (cc0_setter) = pc_rtx;
12322
                              old_notes = REG_NOTES (cc0_setter);
12323
                              REG_NOTES (cc0_setter) = NULL;
12324
 
12325
                              distribute_notes (old_notes, cc0_setter,
12326
                                                cc0_setter, NULL_RTX,
12327
                                                NULL_RTX, NULL_RTX);
12328
                              distribute_links (LOG_LINKS (cc0_setter));
12329
 
12330
                              SET_INSN_DELETED (cc0_setter);
12331
                            }
12332
#endif
12333
                        }
12334
                      else
12335
                        {
12336
                          PUT_REG_NOTE_KIND (note, REG_UNUSED);
12337
 
12338
                          /*  If there isn't already a REG_UNUSED note, put one
12339
                              here.  Do not place a REG_DEAD note, even if
12340
                              the register is also used here; that would not
12341
                              match the algorithm used in lifetime analysis
12342
                              and can cause the consistency check in the
12343
                              scheduler to fail.  */
12344
                          if (! find_regno_note (tem, REG_UNUSED,
12345
                                                 REGNO (XEXP (note, 0))))
12346
                            place = tem;
12347
                          break;
12348
                        }
12349
                    }
12350
                  else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
12351
                           || (CALL_P (tem)
12352
                               && find_reg_fusage (tem, USE, XEXP (note, 0))))
12353
                    {
12354
                      /* This may not be the correct place for the death
12355
                         note if FROM_INSN is before TEM, and the reg is
12356
                         set between FROM_INSN and TEM.  The reg might
12357
                         die two or more times.  An existing death note
12358
                         means we are looking at the wrong live range.  */
12359
                      if (from_insn
12360
                          && INSN_CUID (from_insn) < INSN_CUID (tem)
12361
                          && find_regno_note (tem, REG_DEAD,
12362
                                              REGNO (XEXP (note, 0))))
12363
                        {
12364
                          tem = from_insn;
12365
                          if (tem == BB_HEAD (bb))
12366
                            break;
12367
                          continue;
12368
                        }
12369
 
12370
                      place = tem;
12371
 
12372
                      /* If we are doing a 3->2 combination, and we have a
12373
                         register which formerly died in i3 and was not used
12374
                         by i2, which now no longer dies in i3 and is used in
12375
                         i2 but does not die in i2, and place is between i2
12376
                         and i3, then we may need to move a link from place to
12377
                         i2.  */
12378
                      if (i2 && INSN_UID (place) <= max_uid_cuid
12379
                          && INSN_CUID (place) > INSN_CUID (i2)
12380
                          && from_insn
12381
                          && INSN_CUID (from_insn) > INSN_CUID (i2)
12382
                          && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12383
                        {
12384
                          rtx links = LOG_LINKS (place);
12385
                          LOG_LINKS (place) = 0;
12386
                          distribute_links (links);
12387
                        }
12388
                      break;
12389
                    }
12390
 
12391
                  if (tem == BB_HEAD (bb))
12392
                    break;
12393
                }
12394
 
12395
              /* We haven't found an insn for the death note and it
12396
                 is still a REG_DEAD note, but we have hit the beginning
12397
                 of the block.  If the existing life info says the reg
12398
                 was dead, there's nothing left to do.  Otherwise, we'll
12399
                 need to do a global life update after combine.  */
12400
              if (REG_NOTE_KIND (note) == REG_DEAD && place == 0
12401
                  && REGNO_REG_SET_P (bb->il.rtl->global_live_at_start,
12402
                                      REGNO (XEXP (note, 0))))
12403
                SET_BIT (refresh_blocks, this_basic_block->index);
12404
            }
12405
 
12406
          /* If the register is set or already dead at PLACE, we needn't do
12407
             anything with this note if it is still a REG_DEAD note.
12408
             We check here if it is set at all, not if is it totally replaced,
12409
             which is what `dead_or_set_p' checks, so also check for it being
12410
             set partially.  */
12411
 
12412
          if (place && REG_NOTE_KIND (note) == REG_DEAD)
12413
            {
12414
              unsigned int regno = REGNO (XEXP (note, 0));
12415
 
12416
              /* Similarly, if the instruction on which we want to place
12417
                 the note is a noop, we'll need do a global live update
12418
                 after we remove them in delete_noop_moves.  */
12419
              if (noop_move_p (place))
12420
                SET_BIT (refresh_blocks, this_basic_block->index);
12421
 
12422
              if (dead_or_set_p (place, XEXP (note, 0))
12423
                  || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
12424
                {
12425
                  /* Unless the register previously died in PLACE, clear
12426
                     last_death.  [I no longer understand why this is
12427
                     being done.] */
12428
                  if (reg_stat[regno].last_death != place)
12429
                    reg_stat[regno].last_death = 0;
12430
                  place = 0;
12431
                }
12432
              else
12433
                reg_stat[regno].last_death = place;
12434
 
12435
              /* If this is a death note for a hard reg that is occupying
12436
                 multiple registers, ensure that we are still using all
12437
                 parts of the object.  If we find a piece of the object
12438
                 that is unused, we must arrange for an appropriate REG_DEAD
12439
                 note to be added for it.  However, we can't just emit a USE
12440
                 and tag the note to it, since the register might actually
12441
                 be dead; so we recourse, and the recursive call then finds
12442
                 the previous insn that used this register.  */
12443
 
12444
              if (place && regno < FIRST_PSEUDO_REGISTER
12445
                  && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] > 1)
12446
                {
12447
                  unsigned int endregno
12448
                    = regno + hard_regno_nregs[regno]
12449
                                              [GET_MODE (XEXP (note, 0))];
12450
                  int all_used = 1;
12451
                  unsigned int i;
12452
 
12453
                  for (i = regno; i < endregno; i++)
12454
                    if ((! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
12455
                         && ! find_regno_fusage (place, USE, i))
12456
                        || dead_or_set_regno_p (place, i))
12457
                      all_used = 0;
12458
 
12459
                  if (! all_used)
12460
                    {
12461
                      /* Put only REG_DEAD notes for pieces that are
12462
                         not already dead or set.  */
12463
 
12464
                      for (i = regno; i < endregno;
12465
                           i += hard_regno_nregs[i][reg_raw_mode[i]])
12466
                        {
12467
                          rtx piece = regno_reg_rtx[i];
12468
                          basic_block bb = this_basic_block;
12469
 
12470
                          if (! dead_or_set_p (place, piece)
12471
                              && ! reg_bitfield_target_p (piece,
12472
                                                          PATTERN (place)))
12473
                            {
12474
                              rtx new_note
12475
                                = gen_rtx_EXPR_LIST (REG_DEAD, piece, NULL_RTX);
12476
 
12477
                              distribute_notes (new_note, place, place,
12478
                                                NULL_RTX, NULL_RTX, NULL_RTX);
12479
                            }
12480
                          else if (! refers_to_regno_p (i, i + 1,
12481
                                                        PATTERN (place), 0)
12482
                                   && ! find_regno_fusage (place, USE, i))
12483
                            for (tem = PREV_INSN (place); ;
12484
                                 tem = PREV_INSN (tem))
12485
                              {
12486
                                if (! INSN_P (tem))
12487
                                  {
12488
                                    if (tem == BB_HEAD (bb))
12489
                                      {
12490
                                        SET_BIT (refresh_blocks,
12491
                                                 this_basic_block->index);
12492
                                        break;
12493
                                      }
12494
                                    continue;
12495
                                  }
12496
                                if (dead_or_set_p (tem, piece)
12497
                                    || reg_bitfield_target_p (piece,
12498
                                                              PATTERN (tem)))
12499
                                  {
12500
                                    REG_NOTES (tem)
12501
                                      = gen_rtx_EXPR_LIST (REG_UNUSED, piece,
12502
                                                           REG_NOTES (tem));
12503
                                    break;
12504
                                  }
12505
                              }
12506
 
12507
                        }
12508
 
12509
                      place = 0;
12510
                    }
12511
                }
12512
            }
12513
          break;
12514
 
12515
        default:
12516
          /* Any other notes should not be present at this point in the
12517
             compilation.  */
12518
          gcc_unreachable ();
12519
        }
12520
 
12521
      if (place)
12522
        {
12523
          XEXP (note, 1) = REG_NOTES (place);
12524
          REG_NOTES (place) = note;
12525
        }
12526
      else if ((REG_NOTE_KIND (note) == REG_DEAD
12527
                || REG_NOTE_KIND (note) == REG_UNUSED)
12528
               && REG_P (XEXP (note, 0)))
12529
        REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
12530
 
12531
      if (place2)
12532
        {
12533
          if ((REG_NOTE_KIND (note) == REG_DEAD
12534
               || REG_NOTE_KIND (note) == REG_UNUSED)
12535
              && REG_P (XEXP (note, 0)))
12536
            REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
12537
 
12538
          REG_NOTES (place2) = gen_rtx_fmt_ee (GET_CODE (note),
12539
                                               REG_NOTE_KIND (note),
12540
                                               XEXP (note, 0),
12541
                                               REG_NOTES (place2));
12542
        }
12543
    }
12544
}
12545
 
12546
/* Similarly to above, distribute the LOG_LINKS that used to be present on
12547
   I3, I2, and I1 to new locations.  This is also called to add a link
12548
   pointing at I3 when I3's destination is changed.  */
12549
 
12550
static void
12551
distribute_links (rtx links)
12552
{
12553
  rtx link, next_link;
12554
 
12555
  for (link = links; link; link = next_link)
12556
    {
12557
      rtx place = 0;
12558
      rtx insn;
12559
      rtx set, reg;
12560
 
12561
      next_link = XEXP (link, 1);
12562
 
12563
      /* If the insn that this link points to is a NOTE or isn't a single
12564
         set, ignore it.  In the latter case, it isn't clear what we
12565
         can do other than ignore the link, since we can't tell which
12566
         register it was for.  Such links wouldn't be used by combine
12567
         anyway.
12568
 
12569
         It is not possible for the destination of the target of the link to
12570
         have been changed by combine.  The only potential of this is if we
12571
         replace I3, I2, and I1 by I3 and I2.  But in that case the
12572
         destination of I2 also remains unchanged.  */
12573
 
12574
      if (NOTE_P (XEXP (link, 0))
12575
          || (set = single_set (XEXP (link, 0))) == 0)
12576
        continue;
12577
 
12578
      reg = SET_DEST (set);
12579
      while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
12580
             || GET_CODE (reg) == STRICT_LOW_PART)
12581
        reg = XEXP (reg, 0);
12582
 
12583
      /* A LOG_LINK is defined as being placed on the first insn that uses
12584
         a register and points to the insn that sets the register.  Start
12585
         searching at the next insn after the target of the link and stop
12586
         when we reach a set of the register or the end of the basic block.
12587
 
12588
         Note that this correctly handles the link that used to point from
12589
         I3 to I2.  Also note that not much searching is typically done here
12590
         since most links don't point very far away.  */
12591
 
12592
      for (insn = NEXT_INSN (XEXP (link, 0));
12593
           (insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR
12594
                     || BB_HEAD (this_basic_block->next_bb) != insn));
12595
           insn = NEXT_INSN (insn))
12596
        if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
12597
          {
12598
            if (reg_referenced_p (reg, PATTERN (insn)))
12599
              place = insn;
12600
            break;
12601
          }
12602
        else if (CALL_P (insn)
12603
                 && find_reg_fusage (insn, USE, reg))
12604
          {
12605
            place = insn;
12606
            break;
12607
          }
12608
        else if (INSN_P (insn) && reg_set_p (reg, insn))
12609
          break;
12610
 
12611
      /* If we found a place to put the link, place it there unless there
12612
         is already a link to the same insn as LINK at that point.  */
12613
 
12614
      if (place)
12615
        {
12616
          rtx link2;
12617
 
12618
          for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
12619
            if (XEXP (link2, 0) == XEXP (link, 0))
12620
              break;
12621
 
12622
          if (link2 == 0)
12623
            {
12624
              XEXP (link, 1) = LOG_LINKS (place);
12625
              LOG_LINKS (place) = link;
12626
 
12627
              /* Set added_links_insn to the earliest insn we added a
12628
                 link to.  */
12629
              if (added_links_insn == 0
12630
                  || INSN_CUID (added_links_insn) > INSN_CUID (place))
12631
                added_links_insn = place;
12632
            }
12633
        }
12634
    }
12635
}
12636
 
12637
/* Subroutine of unmentioned_reg_p and callback from for_each_rtx.
12638
   Check whether the expression pointer to by LOC is a register or
12639
   memory, and if so return 1 if it isn't mentioned in the rtx EXPR.
12640
   Otherwise return zero.  */
12641
 
12642
static int
12643
unmentioned_reg_p_1 (rtx *loc, void *expr)
12644
{
12645
  rtx x = *loc;
12646
 
12647
  if (x != NULL_RTX
12648
      && (REG_P (x) || MEM_P (x))
12649
      && ! reg_mentioned_p (x, (rtx) expr))
12650
    return 1;
12651
  return 0;
12652
}
12653
 
12654
/* Check for any register or memory mentioned in EQUIV that is not
12655
   mentioned in EXPR.  This is used to restrict EQUIV to "specializations"
12656
   of EXPR where some registers may have been replaced by constants.  */
12657
 
12658
static bool
12659
unmentioned_reg_p (rtx equiv, rtx expr)
12660
{
12661
  return for_each_rtx (&equiv, unmentioned_reg_p_1, expr);
12662
}
12663
 
12664
/* Compute INSN_CUID for INSN, which is an insn made by combine.  */
12665
 
12666
static int
12667
insn_cuid (rtx insn)
12668
{
12669
  while (insn != 0 && INSN_UID (insn) > max_uid_cuid
12670
         && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE)
12671
    insn = NEXT_INSN (insn);
12672
 
12673
  gcc_assert (INSN_UID (insn) <= max_uid_cuid);
12674
 
12675
  return INSN_CUID (insn);
12676
}
12677
 
12678
void
12679
dump_combine_stats (FILE *file)
12680
{
12681
  fprintf
12682
    (file,
12683
     ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
12684
     combine_attempts, combine_merges, combine_extras, combine_successes);
12685
}
12686
 
12687
void
12688
dump_combine_total_stats (FILE *file)
12689
{
12690
  fprintf
12691
    (file,
12692
     "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
12693
     total_attempts, total_merges, total_extras, total_successes);
12694
}
12695
 
12696
 
12697
static bool
12698
gate_handle_combine (void)
12699
{
12700
  return (optimize > 0);
12701
}
12702
 
12703
/* Try combining insns through substitution.  */
12704
static void
12705
rest_of_handle_combine (void)
12706
{
12707
  int rebuild_jump_labels_after_combine
12708
    = combine_instructions (get_insns (), max_reg_num ());
12709
 
12710
  /* Combining insns may have turned an indirect jump into a
12711
     direct jump.  Rebuild the JUMP_LABEL fields of jumping
12712
     instructions.  */
12713
  if (rebuild_jump_labels_after_combine)
12714
    {
12715
      timevar_push (TV_JUMP);
12716
      rebuild_jump_labels (get_insns ());
12717
      timevar_pop (TV_JUMP);
12718
 
12719
      delete_dead_jumptables ();
12720
      cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_UPDATE_LIFE);
12721
    }
12722
}
12723
 
12724
struct tree_opt_pass pass_combine =
12725
{
12726
  "combine",                            /* name */
12727
  gate_handle_combine,                  /* gate */
12728
  rest_of_handle_combine,               /* execute */
12729
  NULL,                                 /* sub */
12730
  NULL,                                 /* next */
12731
  0,                                    /* static_pass_number */
12732
  TV_COMBINE,                           /* tv_id */
12733
  0,                                    /* properties_required */
12734
  0,                                    /* properties_provided */
12735
  0,                                    /* properties_destroyed */
12736
  0,                                    /* todo_flags_start */
12737
  TODO_dump_func |
12738
  TODO_ggc_collect,                     /* todo_flags_finish */
12739
  'c'                                   /* letter */
12740
};
12741
 

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.