OpenCores
URL https://opencores.org/ocsvn/openrisc_2011-10-31/openrisc_2011-10-31/trunk

Subversion Repositories openrisc_2011-10-31

[/] [openrisc/] [trunk/] [gnu-src/] [gcc-4.2.2/] [gcc/] [combine.c] - Blame information for rev 274

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 38 julius
/* Optimize by combining instructions for GNU compiler.
2
   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3
   1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4
   Free Software Foundation, Inc.
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify it under
9
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
12
 
13
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14
WARRANTY; without even the implied warranty of MERCHANTABILITY or
15
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16
for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
/* This module is essentially the "combiner" phase of the U. of Arizona
23
   Portable Optimizer, but redone to work on our list-structured
24
   representation for RTL instead of their string representation.
25
 
26
   The LOG_LINKS of each insn identify the most recent assignment
27
   to each REG used in the insn.  It is a list of previous insns,
28
   each of which contains a SET for a REG that is used in this insn
29
   and not used or set in between.  LOG_LINKs never cross basic blocks.
30
   They were set up by the preceding pass (lifetime analysis).
31
 
32
   We try to combine each pair of insns joined by a logical link.
33
   We also try to combine triples of insns A, B and C when
34
   C has a link back to B and B has a link back to A.
35
 
36
   LOG_LINKS does not have links for use of the CC0.  They don't
37
   need to, because the insn that sets the CC0 is always immediately
38
   before the insn that tests it.  So we always regard a branch
39
   insn as having a logical link to the preceding insn.  The same is true
40
   for an insn explicitly using CC0.
41
 
42
   We check (with use_crosses_set_p) to avoid combining in such a way
43
   as to move a computation to a place where its value would be different.
44
 
45
   Combination is done by mathematically substituting the previous
46
   insn(s) values for the regs they set into the expressions in
47
   the later insns that refer to these regs.  If the result is a valid insn
48
   for our target machine, according to the machine description,
49
   we install it, delete the earlier insns, and update the data flow
50
   information (LOG_LINKS and REG_NOTES) for what we did.
51
 
52
   There are a few exceptions where the dataflow information created by
53
   flow.c aren't completely updated:
54
 
55
   - reg_live_length is not updated
56
   - reg_n_refs is not adjusted in the rare case when a register is
57
     no longer required in a computation
58
   - there are extremely rare cases (see distribute_notes) when a
59
     REG_DEAD note is lost
60
   - a LOG_LINKS entry that refers to an insn with multiple SETs may be
61
     removed because there is no way to know which register it was
62
     linking
63
 
64
   To simplify substitution, we combine only when the earlier insn(s)
65
   consist of only a single assignment.  To simplify updating afterward,
66
   we never combine when a subroutine call appears in the middle.
67
 
68
   Since we do not represent assignments to CC0 explicitly except when that
69
   is all an insn does, there is no LOG_LINKS entry in an insn that uses
70
   the condition code for the insn that set the condition code.
71
   Fortunately, these two insns must be consecutive.
72
   Therefore, every JUMP_INSN is taken to have an implicit logical link
73
   to the preceding insn.  This is not quite right, since non-jumps can
74
   also use the condition code; but in practice such insns would not
75
   combine anyway.  */
76
 
77
#include "config.h"
78
#include "system.h"
79
#include "coretypes.h"
80
#include "tm.h"
81
#include "rtl.h"
82
#include "tree.h"
83
#include "tm_p.h"
84
#include "flags.h"
85
#include "regs.h"
86
#include "hard-reg-set.h"
87
#include "basic-block.h"
88
#include "insn-config.h"
89
#include "function.h"
90
/* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
91
#include "expr.h"
92
#include "insn-attr.h"
93
#include "recog.h"
94
#include "real.h"
95
#include "toplev.h"
96
#include "target.h"
97
#include "optabs.h"
98
#include "insn-codes.h"
99
#include "rtlhooks-def.h"
100
/* Include output.h for dump_file.  */
101
#include "output.h"
102
#include "params.h"
103
#include "timevar.h"
104
#include "tree-pass.h"
105
 
106
/* Number of attempts to combine instructions in this function.  */
107
 
108
static int combine_attempts;
109
 
110
/* Number of attempts that got as far as substitution in this function.  */
111
 
112
static int combine_merges;
113
 
114
/* Number of instructions combined with added SETs in this function.  */
115
 
116
static int combine_extras;
117
 
118
/* Number of instructions combined in this function.  */
119
 
120
static int combine_successes;
121
 
122
/* Totals over entire compilation.  */
123
 
124
static int total_attempts, total_merges, total_extras, total_successes;
125
 
126
/* combine_instructions may try to replace the right hand side of the
127
   second instruction with the value of an associated REG_EQUAL note
128
   before throwing it at try_combine.  That is problematic when there
129
   is a REG_DEAD note for a register used in the old right hand side
130
   and can cause distribute_notes to do wrong things.  This is the
131
   second instruction if it has been so modified, null otherwise.  */
132
 
133
static rtx i2mod;
134
 
135
/* When I2MOD is nonnull, this is a copy of the old right hand side.  */
136
 
137
static rtx i2mod_old_rhs;
138
 
139
/* When I2MOD is nonnull, this is a copy of the new right hand side.  */
140
 
141
static rtx i2mod_new_rhs;
142
 
143
/* Vector mapping INSN_UIDs to cuids.
144
   The cuids are like uids but increase monotonically always.
145
   Combine always uses cuids so that it can compare them.
146
   But actually renumbering the uids, which we used to do,
147
   proves to be a bad idea because it makes it hard to compare
148
   the dumps produced by earlier passes with those from later passes.  */
149
 
150
static int *uid_cuid;
151
static int max_uid_cuid;
152
 
153
/* Get the cuid of an insn.  */
154
 
155
#define INSN_CUID(INSN) \
156
(INSN_UID (INSN) > max_uid_cuid ? insn_cuid (INSN) : uid_cuid[INSN_UID (INSN)])
157
 
158
/* Maximum register number, which is the size of the tables below.  */
159
 
160
static unsigned int combine_max_regno;
161
 
162
struct reg_stat {
163
  /* Record last point of death of (hard or pseudo) register n.  */
164
  rtx                           last_death;
165
 
166
  /* Record last point of modification of (hard or pseudo) register n.  */
167
  rtx                           last_set;
168
 
169
  /* The next group of fields allows the recording of the last value assigned
170
     to (hard or pseudo) register n.  We use this information to see if an
171
     operation being processed is redundant given a prior operation performed
172
     on the register.  For example, an `and' with a constant is redundant if
173
     all the zero bits are already known to be turned off.
174
 
175
     We use an approach similar to that used by cse, but change it in the
176
     following ways:
177
 
178
     (1) We do not want to reinitialize at each label.
179
     (2) It is useful, but not critical, to know the actual value assigned
180
         to a register.  Often just its form is helpful.
181
 
182
     Therefore, we maintain the following fields:
183
 
184
     last_set_value             the last value assigned
185
     last_set_label             records the value of label_tick when the
186
                                register was assigned
187
     last_set_table_tick        records the value of label_tick when a
188
                                value using the register is assigned
189
     last_set_invalid           set to nonzero when it is not valid
190
                                to use the value of this register in some
191
                                register's value
192
 
193
     To understand the usage of these tables, it is important to understand
194
     the distinction between the value in last_set_value being valid and
195
     the register being validly contained in some other expression in the
196
     table.
197
 
198
     (The next two parameters are out of date).
199
 
200
     reg_stat[i].last_set_value is valid if it is nonzero, and either
201
     reg_n_sets[i] is 1 or reg_stat[i].last_set_label == label_tick.
202
 
203
     Register I may validly appear in any expression returned for the value
204
     of another register if reg_n_sets[i] is 1.  It may also appear in the
205
     value for register J if reg_stat[j].last_set_invalid is zero, or
206
     reg_stat[i].last_set_label < reg_stat[j].last_set_label.
207
 
208
     If an expression is found in the table containing a register which may
209
     not validly appear in an expression, the register is replaced by
210
     something that won't match, (clobber (const_int 0)).  */
211
 
212
  /* Record last value assigned to (hard or pseudo) register n.  */
213
 
214
  rtx                           last_set_value;
215
 
216
  /* Record the value of label_tick when an expression involving register n
217
     is placed in last_set_value.  */
218
 
219
  int                           last_set_table_tick;
220
 
221
  /* Record the value of label_tick when the value for register n is placed in
222
     last_set_value.  */
223
 
224
  int                           last_set_label;
225
 
226
  /* These fields are maintained in parallel with last_set_value and are
227
     used to store the mode in which the register was last set, the bits
228
     that were known to be zero when it was last set, and the number of
229
     sign bits copies it was known to have when it was last set.  */
230
 
231
  unsigned HOST_WIDE_INT        last_set_nonzero_bits;
232
  char                          last_set_sign_bit_copies;
233
  ENUM_BITFIELD(machine_mode)   last_set_mode : 8;
234
 
235
  /* Set nonzero if references to register n in expressions should not be
236
     used.  last_set_invalid is set nonzero when this register is being
237
     assigned to and last_set_table_tick == label_tick.  */
238
 
239
  char                          last_set_invalid;
240
 
241
  /* Some registers that are set more than once and used in more than one
242
     basic block are nevertheless always set in similar ways.  For example,
243
     a QImode register may be loaded from memory in two places on a machine
244
     where byte loads zero extend.
245
 
246
     We record in the following fields if a register has some leading bits
247
     that are always equal to the sign bit, and what we know about the
248
     nonzero bits of a register, specifically which bits are known to be
249
     zero.
250
 
251
     If an entry is zero, it means that we don't know anything special.  */
252
 
253
  unsigned char                 sign_bit_copies;
254
 
255
  unsigned HOST_WIDE_INT        nonzero_bits;
256
 
257
  /* Record the value of the label_tick when the last truncation
258
     happened.  The field truncated_to_mode is only valid if
259
     truncation_label == label_tick.  */
260
 
261
  int                           truncation_label;
262
 
263
  /* Record the last truncation seen for this register.  If truncation
264
     is not a nop to this mode we might be able to save an explicit
265
     truncation if we know that value already contains a truncated
266
     value.  */
267
 
268
  ENUM_BITFIELD(machine_mode)   truncated_to_mode : 8;
269
};
270
 
271
static struct reg_stat *reg_stat;
272
 
273
/* Record the cuid of the last insn that invalidated memory
274
   (anything that writes memory, and subroutine calls, but not pushes).  */
275
 
276
static int mem_last_set;
277
 
278
/* Record the cuid of the last CALL_INSN
279
   so we can tell whether a potential combination crosses any calls.  */
280
 
281
static int last_call_cuid;
282
 
283
/* When `subst' is called, this is the insn that is being modified
284
   (by combining in a previous insn).  The PATTERN of this insn
285
   is still the old pattern partially modified and it should not be
286
   looked at, but this may be used to examine the successors of the insn
287
   to judge whether a simplification is valid.  */
288
 
289
static rtx subst_insn;
290
 
291
/* This is the lowest CUID that `subst' is currently dealing with.
292
   get_last_value will not return a value if the register was set at or
293
   after this CUID.  If not for this mechanism, we could get confused if
294
   I2 or I1 in try_combine were an insn that used the old value of a register
295
   to obtain a new value.  In that case, we might erroneously get the
296
   new value of the register when we wanted the old one.  */
297
 
298
static int subst_low_cuid;
299
 
300
/* This contains any hard registers that are used in newpat; reg_dead_at_p
301
   must consider all these registers to be always live.  */
302
 
303
static HARD_REG_SET newpat_used_regs;
304
 
305
/* This is an insn to which a LOG_LINKS entry has been added.  If this
306
   insn is the earlier than I2 or I3, combine should rescan starting at
307
   that location.  */
308
 
309
static rtx added_links_insn;
310
 
311
/* Basic block in which we are performing combines.  */
312
static basic_block this_basic_block;
313
 
314
/* A bitmap indicating which blocks had registers go dead at entry.
315
   After combine, we'll need to re-do global life analysis with
316
   those blocks as starting points.  */
317
static sbitmap refresh_blocks;
318
 
319
/* The following array records the insn_rtx_cost for every insn
320
   in the instruction stream.  */
321
 
322
static int *uid_insn_cost;
323
 
324
/* Length of the currently allocated uid_insn_cost array.  */
325
 
326
static int last_insn_cost;
327
 
328
/* Incremented for each label.  */
329
 
330
static int label_tick;
331
 
332
/* Mode used to compute significance in reg_stat[].nonzero_bits.  It is the
333
   largest integer mode that can fit in HOST_BITS_PER_WIDE_INT.  */
334
 
335
static enum machine_mode nonzero_bits_mode;
336
 
337
/* Nonzero when reg_stat[].nonzero_bits and reg_stat[].sign_bit_copies can
338
   be safely used.  It is zero while computing them and after combine has
339
   completed.  This former test prevents propagating values based on
340
   previously set values, which can be incorrect if a variable is modified
341
   in a loop.  */
342
 
343
static int nonzero_sign_valid;
344
 
345
 
346
/* Record one modification to rtl structure
347
   to be undone by storing old_contents into *where.  */
348
 
349
struct undo
350
{
351
  struct undo *next;
352
  enum { UNDO_RTX, UNDO_INT, UNDO_MODE } kind;
353
  union { rtx r; int i; enum machine_mode m; } old_contents;
354
  union { rtx *r; int *i; } where;
355
};
356
 
357
/* Record a bunch of changes to be undone, up to MAX_UNDO of them.
358
   num_undo says how many are currently recorded.
359
 
360
   other_insn is nonzero if we have modified some other insn in the process
361
   of working on subst_insn.  It must be verified too.  */
362
 
363
struct undobuf
364
{
365
  struct undo *undos;
366
  struct undo *frees;
367
  rtx other_insn;
368
};
369
 
370
static struct undobuf undobuf;
371
 
372
/* Number of times the pseudo being substituted for
373
   was found and replaced.  */
374
 
375
static int n_occurrences;
376
 
377
static rtx reg_nonzero_bits_for_combine (rtx, enum machine_mode, rtx,
378
                                         enum machine_mode,
379
                                         unsigned HOST_WIDE_INT,
380
                                         unsigned HOST_WIDE_INT *);
381
static rtx reg_num_sign_bit_copies_for_combine (rtx, enum machine_mode, rtx,
382
                                                enum machine_mode,
383
                                                unsigned int, unsigned int *);
384
static void do_SUBST (rtx *, rtx);
385
static void do_SUBST_INT (int *, int);
386
static void init_reg_last (void);
387
static void setup_incoming_promotions (void);
388
static void set_nonzero_bits_and_sign_copies (rtx, rtx, void *);
389
static int cant_combine_insn_p (rtx);
390
static int can_combine_p (rtx, rtx, rtx, rtx, rtx *, rtx *);
391
static int combinable_i3pat (rtx, rtx *, rtx, rtx, int, rtx *);
392
static int contains_muldiv (rtx);
393
static rtx try_combine (rtx, rtx, rtx, int *);
394
static void undo_all (void);
395
static void undo_commit (void);
396
static rtx *find_split_point (rtx *, rtx);
397
static rtx subst (rtx, rtx, rtx, int, int);
398
static rtx combine_simplify_rtx (rtx, enum machine_mode, int);
399
static rtx simplify_if_then_else (rtx);
400
static rtx simplify_set (rtx);
401
static rtx simplify_logical (rtx);
402
static rtx expand_compound_operation (rtx);
403
static rtx expand_field_assignment (rtx);
404
static rtx make_extraction (enum machine_mode, rtx, HOST_WIDE_INT,
405
                            rtx, unsigned HOST_WIDE_INT, int, int, int);
406
static rtx extract_left_shift (rtx, int);
407
static rtx make_compound_operation (rtx, enum rtx_code);
408
static int get_pos_from_mask (unsigned HOST_WIDE_INT,
409
                              unsigned HOST_WIDE_INT *);
410
static rtx canon_reg_for_combine (rtx, rtx);
411
static rtx force_to_mode (rtx, enum machine_mode,
412
                          unsigned HOST_WIDE_INT, int);
413
static rtx if_then_else_cond (rtx, rtx *, rtx *);
414
static rtx known_cond (rtx, enum rtx_code, rtx, rtx);
415
static int rtx_equal_for_field_assignment_p (rtx, rtx);
416
static rtx make_field_assignment (rtx);
417
static rtx apply_distributive_law (rtx);
418
static rtx distribute_and_simplify_rtx (rtx, int);
419
static rtx simplify_and_const_int_1 (enum machine_mode, rtx,
420
                                     unsigned HOST_WIDE_INT);
421
static rtx simplify_and_const_int (rtx, enum machine_mode, rtx,
422
                                   unsigned HOST_WIDE_INT);
423
static int merge_outer_ops (enum rtx_code *, HOST_WIDE_INT *, enum rtx_code,
424
                            HOST_WIDE_INT, enum machine_mode, int *);
425
static rtx simplify_shift_const_1 (enum rtx_code, enum machine_mode, rtx, int);
426
static rtx simplify_shift_const (rtx, enum rtx_code, enum machine_mode, rtx,
427
                                 int);
428
static int recog_for_combine (rtx *, rtx, rtx *);
429
static rtx gen_lowpart_for_combine (enum machine_mode, rtx);
430
static enum rtx_code simplify_comparison (enum rtx_code, rtx *, rtx *);
431
static void update_table_tick (rtx);
432
static void record_value_for_reg (rtx, rtx, rtx);
433
static void check_conversions (rtx, rtx);
434
static void record_dead_and_set_regs_1 (rtx, rtx, void *);
435
static void record_dead_and_set_regs (rtx);
436
static int get_last_value_validate (rtx *, rtx, int, int);
437
static rtx get_last_value (rtx);
438
static int use_crosses_set_p (rtx, int);
439
static void reg_dead_at_p_1 (rtx, rtx, void *);
440
static int reg_dead_at_p (rtx, rtx);
441
static void move_deaths (rtx, rtx, int, rtx, rtx *);
442
static int reg_bitfield_target_p (rtx, rtx);
443
static void distribute_notes (rtx, rtx, rtx, rtx, rtx, rtx);
444
static void distribute_links (rtx);
445
static void mark_used_regs_combine (rtx);
446
static int insn_cuid (rtx);
447
static void record_promoted_value (rtx, rtx);
448
static int unmentioned_reg_p_1 (rtx *, void *);
449
static bool unmentioned_reg_p (rtx, rtx);
450
static void record_truncated_value (rtx);
451
static bool reg_truncated_to_mode (enum machine_mode, rtx);
452
static rtx gen_lowpart_or_truncate (enum machine_mode, rtx);
453
 
454
 
455
/* It is not safe to use ordinary gen_lowpart in combine.
456
   See comments in gen_lowpart_for_combine.  */
457
#undef RTL_HOOKS_GEN_LOWPART
458
#define RTL_HOOKS_GEN_LOWPART              gen_lowpart_for_combine
459
 
460
/* Our implementation of gen_lowpart never emits a new pseudo.  */
461
#undef RTL_HOOKS_GEN_LOWPART_NO_EMIT
462
#define RTL_HOOKS_GEN_LOWPART_NO_EMIT      gen_lowpart_for_combine
463
 
464
#undef RTL_HOOKS_REG_NONZERO_REG_BITS
465
#define RTL_HOOKS_REG_NONZERO_REG_BITS     reg_nonzero_bits_for_combine
466
 
467
#undef RTL_HOOKS_REG_NUM_SIGN_BIT_COPIES
468
#define RTL_HOOKS_REG_NUM_SIGN_BIT_COPIES  reg_num_sign_bit_copies_for_combine
469
 
470
#undef RTL_HOOKS_REG_TRUNCATED_TO_MODE
471
#define RTL_HOOKS_REG_TRUNCATED_TO_MODE    reg_truncated_to_mode
472
 
473
static const struct rtl_hooks combine_rtl_hooks = RTL_HOOKS_INITIALIZER;
474
 
475
 
476
/* Substitute NEWVAL, an rtx expression, into INTO, a place in some
477
   insn.  The substitution can be undone by undo_all.  If INTO is already
478
   set to NEWVAL, do not record this change.  Because computing NEWVAL might
479
   also call SUBST, we have to compute it before we put anything into
480
   the undo table.  */
481
 
482
static void
483
do_SUBST (rtx *into, rtx newval)
484
{
485
  struct undo *buf;
486
  rtx oldval = *into;
487
 
488
  if (oldval == newval)
489
    return;
490
 
491
  /* We'd like to catch as many invalid transformations here as
492
     possible.  Unfortunately, there are way too many mode changes
493
     that are perfectly valid, so we'd waste too much effort for
494
     little gain doing the checks here.  Focus on catching invalid
495
     transformations involving integer constants.  */
496
  if (GET_MODE_CLASS (GET_MODE (oldval)) == MODE_INT
497
      && GET_CODE (newval) == CONST_INT)
498
    {
499
      /* Sanity check that we're replacing oldval with a CONST_INT
500
         that is a valid sign-extension for the original mode.  */
501
      gcc_assert (INTVAL (newval)
502
                  == trunc_int_for_mode (INTVAL (newval), GET_MODE (oldval)));
503
 
504
      /* Replacing the operand of a SUBREG or a ZERO_EXTEND with a
505
         CONST_INT is not valid, because after the replacement, the
506
         original mode would be gone.  Unfortunately, we can't tell
507
         when do_SUBST is called to replace the operand thereof, so we
508
         perform this test on oldval instead, checking whether an
509
         invalid replacement took place before we got here.  */
510
      gcc_assert (!(GET_CODE (oldval) == SUBREG
511
                    && GET_CODE (SUBREG_REG (oldval)) == CONST_INT));
512
      gcc_assert (!(GET_CODE (oldval) == ZERO_EXTEND
513
                    && GET_CODE (XEXP (oldval, 0)) == CONST_INT));
514
    }
515
 
516
  if (undobuf.frees)
517
    buf = undobuf.frees, undobuf.frees = buf->next;
518
  else
519
    buf = XNEW (struct undo);
520
 
521
  buf->kind = UNDO_RTX;
522
  buf->where.r = into;
523
  buf->old_contents.r = oldval;
524
  *into = newval;
525
 
526
  buf->next = undobuf.undos, undobuf.undos = buf;
527
}
528
 
529
#define SUBST(INTO, NEWVAL)     do_SUBST(&(INTO), (NEWVAL))
530
 
531
/* Similar to SUBST, but NEWVAL is an int expression.  Note that substitution
532
   for the value of a HOST_WIDE_INT value (including CONST_INT) is
533
   not safe.  */
534
 
535
static void
536
do_SUBST_INT (int *into, int newval)
537
{
538
  struct undo *buf;
539
  int oldval = *into;
540
 
541
  if (oldval == newval)
542
    return;
543
 
544
  if (undobuf.frees)
545
    buf = undobuf.frees, undobuf.frees = buf->next;
546
  else
547
    buf = XNEW (struct undo);
548
 
549
  buf->kind = UNDO_INT;
550
  buf->where.i = into;
551
  buf->old_contents.i = oldval;
552
  *into = newval;
553
 
554
  buf->next = undobuf.undos, undobuf.undos = buf;
555
}
556
 
557
#define SUBST_INT(INTO, NEWVAL)  do_SUBST_INT(&(INTO), (NEWVAL))
558
 
559
/* Similar to SUBST, but just substitute the mode.  This is used when
560
   changing the mode of a pseudo-register, so that any other
561
   references to the entry in the regno_reg_rtx array will change as
562
   well.  */
563
 
564
static void
565
do_SUBST_MODE (rtx *into, enum machine_mode newval)
566
{
567
  struct undo *buf;
568
  enum machine_mode oldval = GET_MODE (*into);
569
 
570
  if (oldval == newval)
571
    return;
572
 
573
  if (undobuf.frees)
574
    buf = undobuf.frees, undobuf.frees = buf->next;
575
  else
576
    buf = XNEW (struct undo);
577
 
578
  buf->kind = UNDO_MODE;
579
  buf->where.r = into;
580
  buf->old_contents.m = oldval;
581
  PUT_MODE (*into, newval);
582
 
583
  buf->next = undobuf.undos, undobuf.undos = buf;
584
}
585
 
586
#define SUBST_MODE(INTO, NEWVAL)  do_SUBST_MODE(&(INTO), (NEWVAL))
587
 
588
/* Subroutine of try_combine.  Determine whether the combine replacement
589
   patterns NEWPAT and NEWI2PAT are cheaper according to insn_rtx_cost
590
   that the original instruction sequence I1, I2 and I3.  Note that I1
591
   and/or NEWI2PAT may be NULL_RTX.  This function returns false, if the
592
   costs of all instructions can be estimated, and the replacements are
593
   more expensive than the original sequence.  */
594
 
595
static bool
596
combine_validate_cost (rtx i1, rtx i2, rtx i3, rtx newpat, rtx newi2pat)
597
{
598
  int i1_cost, i2_cost, i3_cost;
599
  int new_i2_cost, new_i3_cost;
600
  int old_cost, new_cost;
601
 
602
  /* Lookup the original insn_rtx_costs.  */
603
  i2_cost = INSN_UID (i2) <= last_insn_cost
604
            ? uid_insn_cost[INSN_UID (i2)] : 0;
605
  i3_cost = INSN_UID (i3) <= last_insn_cost
606
            ? uid_insn_cost[INSN_UID (i3)] : 0;
607
 
608
  if (i1)
609
    {
610
      i1_cost = INSN_UID (i1) <= last_insn_cost
611
                ? uid_insn_cost[INSN_UID (i1)] : 0;
612
      old_cost = (i1_cost > 0 && i2_cost > 0 && i3_cost > 0)
613
                 ? i1_cost + i2_cost + i3_cost : 0;
614
    }
615
  else
616
    {
617
      old_cost = (i2_cost > 0 && i3_cost > 0) ? i2_cost + i3_cost : 0;
618
      i1_cost = 0;
619
    }
620
 
621
  /* Calculate the replacement insn_rtx_costs.  */
622
  new_i3_cost = insn_rtx_cost (newpat);
623
  if (newi2pat)
624
    {
625
      new_i2_cost = insn_rtx_cost (newi2pat);
626
      new_cost = (new_i2_cost > 0 && new_i3_cost > 0)
627
                 ? new_i2_cost + new_i3_cost : 0;
628
    }
629
  else
630
    {
631
      new_cost = new_i3_cost;
632
      new_i2_cost = 0;
633
    }
634
 
635
  if (undobuf.other_insn)
636
    {
637
      int old_other_cost, new_other_cost;
638
 
639
      old_other_cost = (INSN_UID (undobuf.other_insn) <= last_insn_cost
640
                        ? uid_insn_cost[INSN_UID (undobuf.other_insn)] : 0);
641
      new_other_cost = insn_rtx_cost (PATTERN (undobuf.other_insn));
642
      if (old_other_cost > 0 && new_other_cost > 0)
643
        {
644
          old_cost += old_other_cost;
645
          new_cost += new_other_cost;
646
        }
647
      else
648
        old_cost = 0;
649
    }
650
 
651
  /* Disallow this recombination if both new_cost and old_cost are
652
     greater than zero, and new_cost is greater than old cost.  */
653
  if (old_cost > 0
654
      && new_cost > old_cost)
655
    {
656
      if (dump_file)
657
        {
658
          if (i1)
659
            {
660
              fprintf (dump_file,
661
                       "rejecting combination of insns %d, %d and %d\n",
662
                       INSN_UID (i1), INSN_UID (i2), INSN_UID (i3));
663
              fprintf (dump_file, "original costs %d + %d + %d = %d\n",
664
                       i1_cost, i2_cost, i3_cost, old_cost);
665
            }
666
          else
667
            {
668
              fprintf (dump_file,
669
                       "rejecting combination of insns %d and %d\n",
670
                       INSN_UID (i2), INSN_UID (i3));
671
              fprintf (dump_file, "original costs %d + %d = %d\n",
672
                       i2_cost, i3_cost, old_cost);
673
            }
674
 
675
          if (newi2pat)
676
            {
677
              fprintf (dump_file, "replacement costs %d + %d = %d\n",
678
                       new_i2_cost, new_i3_cost, new_cost);
679
            }
680
          else
681
            fprintf (dump_file, "replacement cost %d\n", new_cost);
682
        }
683
 
684
      return false;
685
    }
686
 
687
  /* Update the uid_insn_cost array with the replacement costs.  */
688
  uid_insn_cost[INSN_UID (i2)] = new_i2_cost;
689
  uid_insn_cost[INSN_UID (i3)] = new_i3_cost;
690
  if (i1)
691
    uid_insn_cost[INSN_UID (i1)] = 0;
692
 
693
  return true;
694
}
695
 
696
/* Main entry point for combiner.  F is the first insn of the function.
697
   NREGS is the first unused pseudo-reg number.
698
 
699
   Return nonzero if the combiner has turned an indirect jump
700
   instruction into a direct jump.  */
701
static int
702
combine_instructions (rtx f, unsigned int nregs)
703
{
704
  rtx insn, next;
705
#ifdef HAVE_cc0
706
  rtx prev;
707
#endif
708
  int i;
709
  unsigned int j = 0;
710
  rtx links, nextlinks;
711
  sbitmap_iterator sbi;
712
 
713
  int new_direct_jump_p = 0;
714
 
715
  combine_attempts = 0;
716
  combine_merges = 0;
717
  combine_extras = 0;
718
  combine_successes = 0;
719
 
720
  combine_max_regno = nregs;
721
 
722
  rtl_hooks = combine_rtl_hooks;
723
 
724
  reg_stat = XCNEWVEC (struct reg_stat, nregs);
725
 
726
  init_recog_no_volatile ();
727
 
728
  /* Compute maximum uid value so uid_cuid can be allocated.  */
729
 
730
  for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
731
    if (INSN_UID (insn) > i)
732
      i = INSN_UID (insn);
733
 
734
  uid_cuid = XNEWVEC (int, i + 1);
735
  max_uid_cuid = i;
736
 
737
  nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
738
 
739
  /* Don't use reg_stat[].nonzero_bits when computing it.  This can cause
740
     problems when, for example, we have j <<= 1 in a loop.  */
741
 
742
  nonzero_sign_valid = 0;
743
 
744
  /* Compute the mapping from uids to cuids.
745
     Cuids are numbers assigned to insns, like uids,
746
     except that cuids increase monotonically through the code.
747
 
748
     Scan all SETs and see if we can deduce anything about what
749
     bits are known to be zero for some registers and how many copies
750
     of the sign bit are known to exist for those registers.
751
 
752
     Also set any known values so that we can use it while searching
753
     for what bits are known to be set.  */
754
 
755
  label_tick = 1;
756
 
757
  setup_incoming_promotions ();
758
 
759
  refresh_blocks = sbitmap_alloc (last_basic_block);
760
  sbitmap_zero (refresh_blocks);
761
 
762
  /* Allocate array of current insn_rtx_costs.  */
763
  uid_insn_cost = XCNEWVEC (int, max_uid_cuid + 1);
764
  last_insn_cost = max_uid_cuid;
765
 
766
  for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
767
    {
768
      uid_cuid[INSN_UID (insn)] = ++i;
769
      subst_low_cuid = i;
770
      subst_insn = insn;
771
 
772
      if (INSN_P (insn))
773
        {
774
          note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies,
775
                       NULL);
776
          record_dead_and_set_regs (insn);
777
 
778
#ifdef AUTO_INC_DEC
779
          for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
780
            if (REG_NOTE_KIND (links) == REG_INC)
781
              set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX,
782
                                                NULL);
783
#endif
784
 
785
          /* Record the current insn_rtx_cost of this instruction.  */
786
          if (NONJUMP_INSN_P (insn))
787
            uid_insn_cost[INSN_UID (insn)] = insn_rtx_cost (PATTERN (insn));
788
          if (dump_file)
789
            fprintf(dump_file, "insn_cost %d: %d\n",
790
                    INSN_UID (insn), uid_insn_cost[INSN_UID (insn)]);
791
        }
792
 
793
      if (LABEL_P (insn))
794
        label_tick++;
795
    }
796
 
797
  nonzero_sign_valid = 1;
798
 
799
  /* Now scan all the insns in forward order.  */
800
 
801
  label_tick = 1;
802
  last_call_cuid = 0;
803
  mem_last_set = 0;
804
  init_reg_last ();
805
  setup_incoming_promotions ();
806
 
807
  FOR_EACH_BB (this_basic_block)
808
    {
809
      for (insn = BB_HEAD (this_basic_block);
810
           insn != NEXT_INSN (BB_END (this_basic_block));
811
           insn = next ? next : NEXT_INSN (insn))
812
        {
813
          next = 0;
814
 
815
          if (LABEL_P (insn))
816
            label_tick++;
817
 
818
          else if (INSN_P (insn))
819
            {
820
              /* See if we know about function return values before this
821
                 insn based upon SUBREG flags.  */
822
              check_conversions (insn, PATTERN (insn));
823
 
824
              /* Try this insn with each insn it links back to.  */
825
 
826
              for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
827
                if ((next = try_combine (insn, XEXP (links, 0),
828
                                         NULL_RTX, &new_direct_jump_p)) != 0)
829
                  goto retry;
830
 
831
              /* Try each sequence of three linked insns ending with this one.  */
832
 
833
              for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
834
                {
835
                  rtx link = XEXP (links, 0);
836
 
837
                  /* If the linked insn has been replaced by a note, then there
838
                     is no point in pursuing this chain any further.  */
839
                  if (NOTE_P (link))
840
                    continue;
841
 
842
                  for (nextlinks = LOG_LINKS (link);
843
                       nextlinks;
844
                       nextlinks = XEXP (nextlinks, 1))
845
                    if ((next = try_combine (insn, link,
846
                                             XEXP (nextlinks, 0),
847
                                             &new_direct_jump_p)) != 0)
848
                      goto retry;
849
                }
850
 
851
#ifdef HAVE_cc0
852
              /* Try to combine a jump insn that uses CC0
853
                 with a preceding insn that sets CC0, and maybe with its
854
                 logical predecessor as well.
855
                 This is how we make decrement-and-branch insns.
856
                 We need this special code because data flow connections
857
                 via CC0 do not get entered in LOG_LINKS.  */
858
 
859
              if (JUMP_P (insn)
860
                  && (prev = prev_nonnote_insn (insn)) != 0
861
                  && NONJUMP_INSN_P (prev)
862
                  && sets_cc0_p (PATTERN (prev)))
863
                {
864
                  if ((next = try_combine (insn, prev,
865
                                           NULL_RTX, &new_direct_jump_p)) != 0)
866
                    goto retry;
867
 
868
                  for (nextlinks = LOG_LINKS (prev); nextlinks;
869
                       nextlinks = XEXP (nextlinks, 1))
870
                    if ((next = try_combine (insn, prev,
871
                                             XEXP (nextlinks, 0),
872
                                             &new_direct_jump_p)) != 0)
873
                      goto retry;
874
                }
875
 
876
              /* Do the same for an insn that explicitly references CC0.  */
877
              if (NONJUMP_INSN_P (insn)
878
                  && (prev = prev_nonnote_insn (insn)) != 0
879
                  && NONJUMP_INSN_P (prev)
880
                  && sets_cc0_p (PATTERN (prev))
881
                  && GET_CODE (PATTERN (insn)) == SET
882
                  && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
883
                {
884
                  if ((next = try_combine (insn, prev,
885
                                           NULL_RTX, &new_direct_jump_p)) != 0)
886
                    goto retry;
887
 
888
                  for (nextlinks = LOG_LINKS (prev); nextlinks;
889
                       nextlinks = XEXP (nextlinks, 1))
890
                    if ((next = try_combine (insn, prev,
891
                                             XEXP (nextlinks, 0),
892
                                             &new_direct_jump_p)) != 0)
893
                      goto retry;
894
                }
895
 
896
              /* Finally, see if any of the insns that this insn links to
897
                 explicitly references CC0.  If so, try this insn, that insn,
898
                 and its predecessor if it sets CC0.  */
899
              for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
900
                if (NONJUMP_INSN_P (XEXP (links, 0))
901
                    && GET_CODE (PATTERN (XEXP (links, 0))) == SET
902
                    && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
903
                    && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
904
                    && NONJUMP_INSN_P (prev)
905
                    && sets_cc0_p (PATTERN (prev))
906
                    && (next = try_combine (insn, XEXP (links, 0),
907
                                            prev, &new_direct_jump_p)) != 0)
908
                  goto retry;
909
#endif
910
 
911
              /* Try combining an insn with two different insns whose results it
912
                 uses.  */
913
              for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
914
                for (nextlinks = XEXP (links, 1); nextlinks;
915
                     nextlinks = XEXP (nextlinks, 1))
916
                  if ((next = try_combine (insn, XEXP (links, 0),
917
                                           XEXP (nextlinks, 0),
918
                                           &new_direct_jump_p)) != 0)
919
                    goto retry;
920
 
921
              /* Try this insn with each REG_EQUAL note it links back to.  */
922
              for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
923
                {
924
                  rtx set, note;
925
                  rtx temp = XEXP (links, 0);
926
                  if ((set = single_set (temp)) != 0
927
                      && (note = find_reg_equal_equiv_note (temp)) != 0
928
                      && (note = XEXP (note, 0), GET_CODE (note)) != EXPR_LIST
929
                      /* Avoid using a register that may already been marked
930
                         dead by an earlier instruction.  */
931
                      && ! unmentioned_reg_p (note, SET_SRC (set))
932
                      && (GET_MODE (note) == VOIDmode
933
                          ? SCALAR_INT_MODE_P (GET_MODE (SET_DEST (set)))
934
                          : GET_MODE (SET_DEST (set)) == GET_MODE (note)))
935
                    {
936
                      /* Temporarily replace the set's source with the
937
                         contents of the REG_EQUAL note.  The insn will
938
                         be deleted or recognized by try_combine.  */
939
                      rtx orig = SET_SRC (set);
940
                      SET_SRC (set) = note;
941
                      i2mod = temp;
942
                      i2mod_old_rhs = copy_rtx (orig);
943
                      i2mod_new_rhs = copy_rtx (note);
944
                      next = try_combine (insn, i2mod, NULL_RTX,
945
                                          &new_direct_jump_p);
946
                      i2mod = NULL_RTX;
947
                      if (next)
948
                        goto retry;
949
                      SET_SRC (set) = orig;
950
                    }
951
                }
952
 
953
              if (!NOTE_P (insn))
954
                record_dead_and_set_regs (insn);
955
 
956
            retry:
957
              ;
958
            }
959
        }
960
    }
961
  clear_bb_flags ();
962
 
963
  EXECUTE_IF_SET_IN_SBITMAP (refresh_blocks, 0, j, sbi)
964
    BASIC_BLOCK (j)->flags |= BB_DIRTY;
965
  new_direct_jump_p |= purge_all_dead_edges ();
966
  delete_noop_moves ();
967
 
968
  update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
969
                                    PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
970
                                    | PROP_KILL_DEAD_CODE);
971
 
972
  /* Clean up.  */
973
  sbitmap_free (refresh_blocks);
974
  free (uid_insn_cost);
975
  free (reg_stat);
976
  free (uid_cuid);
977
 
978
  {
979
    struct undo *undo, *next;
980
    for (undo = undobuf.frees; undo; undo = next)
981
      {
982
        next = undo->next;
983
        free (undo);
984
      }
985
    undobuf.frees = 0;
986
  }
987
 
988
  total_attempts += combine_attempts;
989
  total_merges += combine_merges;
990
  total_extras += combine_extras;
991
  total_successes += combine_successes;
992
 
993
  nonzero_sign_valid = 0;
994
  rtl_hooks = general_rtl_hooks;
995
 
996
  /* Make recognizer allow volatile MEMs again.  */
997
  init_recog ();
998
 
999
  return new_direct_jump_p;
1000
}
1001
 
1002
/* Wipe the last_xxx fields of reg_stat in preparation for another pass.  */
1003
 
1004
static void
1005
init_reg_last (void)
1006
{
1007
  unsigned int i;
1008
  for (i = 0; i < combine_max_regno; i++)
1009
    memset (reg_stat + i, 0, offsetof (struct reg_stat, sign_bit_copies));
1010
}
1011
 
1012
/* Set up any promoted values for incoming argument registers.  */
1013
 
1014
static void
1015
setup_incoming_promotions (void)
1016
{
1017
  unsigned int regno;
1018
  rtx reg;
1019
  enum machine_mode mode;
1020
  int unsignedp;
1021
  rtx first = get_insns ();
1022
 
1023
  if (targetm.calls.promote_function_args (TREE_TYPE (cfun->decl)))
1024
    {
1025
      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1026
        /* Check whether this register can hold an incoming pointer
1027
           argument.  FUNCTION_ARG_REGNO_P tests outgoing register
1028
           numbers, so translate if necessary due to register windows.  */
1029
        if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (regno))
1030
            && (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
1031
          {
1032
            record_value_for_reg
1033
              (reg, first, gen_rtx_fmt_e ((unsignedp ? ZERO_EXTEND
1034
                                           : SIGN_EXTEND),
1035
                                          GET_MODE (reg),
1036
                                          gen_rtx_CLOBBER (mode, const0_rtx)));
1037
          }
1038
    }
1039
}
1040
 
1041
/* Called via note_stores.  If X is a pseudo that is narrower than
1042
   HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
1043
 
1044
   If we are setting only a portion of X and we can't figure out what
1045
   portion, assume all bits will be used since we don't know what will
1046
   be happening.
1047
 
1048
   Similarly, set how many bits of X are known to be copies of the sign bit
1049
   at all locations in the function.  This is the smallest number implied
1050
   by any set of X.  */
1051
 
1052
static void
1053
set_nonzero_bits_and_sign_copies (rtx x, rtx set,
1054
                                  void *data ATTRIBUTE_UNUSED)
1055
{
1056
  unsigned int num;
1057
 
1058
  if (REG_P (x)
1059
      && REGNO (x) >= FIRST_PSEUDO_REGISTER
1060
      /* If this register is undefined at the start of the file, we can't
1061
         say what its contents were.  */
1062
      && ! REGNO_REG_SET_P
1063
         (ENTRY_BLOCK_PTR->next_bb->il.rtl->global_live_at_start, REGNO (x))
1064
      && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
1065
    {
1066
      if (set == 0 || GET_CODE (set) == CLOBBER)
1067
        {
1068
          reg_stat[REGNO (x)].nonzero_bits = GET_MODE_MASK (GET_MODE (x));
1069
          reg_stat[REGNO (x)].sign_bit_copies = 1;
1070
          return;
1071
        }
1072
 
1073
      /* If this is a complex assignment, see if we can convert it into a
1074
         simple assignment.  */
1075
      set = expand_field_assignment (set);
1076
 
1077
      /* If this is a simple assignment, or we have a paradoxical SUBREG,
1078
         set what we know about X.  */
1079
 
1080
      if (SET_DEST (set) == x
1081
          || (GET_CODE (SET_DEST (set)) == SUBREG
1082
              && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
1083
                  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
1084
              && SUBREG_REG (SET_DEST (set)) == x))
1085
        {
1086
          rtx src = SET_SRC (set);
1087
 
1088
#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
1089
          /* If X is narrower than a word and SRC is a non-negative
1090
             constant that would appear negative in the mode of X,
1091
             sign-extend it for use in reg_stat[].nonzero_bits because some
1092
             machines (maybe most) will actually do the sign-extension
1093
             and this is the conservative approach.
1094
 
1095
             ??? For 2.5, try to tighten up the MD files in this regard
1096
             instead of this kludge.  */
1097
 
1098
          if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
1099
              && GET_CODE (src) == CONST_INT
1100
              && INTVAL (src) > 0
1101
              && 0 != (INTVAL (src)
1102
                       & ((HOST_WIDE_INT) 1
1103
                          << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
1104
            src = GEN_INT (INTVAL (src)
1105
                           | ((HOST_WIDE_INT) (-1)
1106
                              << GET_MODE_BITSIZE (GET_MODE (x))));
1107
#endif
1108
 
1109
          /* Don't call nonzero_bits if it cannot change anything.  */
1110
          if (reg_stat[REGNO (x)].nonzero_bits != ~(unsigned HOST_WIDE_INT) 0)
1111
            reg_stat[REGNO (x)].nonzero_bits
1112
              |= nonzero_bits (src, nonzero_bits_mode);
1113
          num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
1114
          if (reg_stat[REGNO (x)].sign_bit_copies == 0
1115
              || reg_stat[REGNO (x)].sign_bit_copies > num)
1116
            reg_stat[REGNO (x)].sign_bit_copies = num;
1117
        }
1118
      else
1119
        {
1120
          reg_stat[REGNO (x)].nonzero_bits = GET_MODE_MASK (GET_MODE (x));
1121
          reg_stat[REGNO (x)].sign_bit_copies = 1;
1122
        }
1123
    }
1124
}
1125
 
1126
/* See if INSN can be combined into I3.  PRED and SUCC are optionally
1127
   insns that were previously combined into I3 or that will be combined
1128
   into the merger of INSN and I3.
1129
 
1130
   Return 0 if the combination is not allowed for any reason.
1131
 
1132
   If the combination is allowed, *PDEST will be set to the single
1133
   destination of INSN and *PSRC to the single source, and this function
1134
   will return 1.  */
1135
 
1136
static int
1137
can_combine_p (rtx insn, rtx i3, rtx pred ATTRIBUTE_UNUSED, rtx succ,
1138
               rtx *pdest, rtx *psrc)
1139
{
1140
  int i;
1141
  rtx set = 0, src, dest;
1142
  rtx p;
1143
#ifdef AUTO_INC_DEC
1144
  rtx link;
1145
#endif
1146
  int all_adjacent = (succ ? (next_active_insn (insn) == succ
1147
                              && next_active_insn (succ) == i3)
1148
                      : next_active_insn (insn) == i3);
1149
 
1150
  /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
1151
     or a PARALLEL consisting of such a SET and CLOBBERs.
1152
 
1153
     If INSN has CLOBBER parallel parts, ignore them for our processing.
1154
     By definition, these happen during the execution of the insn.  When it
1155
     is merged with another insn, all bets are off.  If they are, in fact,
1156
     needed and aren't also supplied in I3, they may be added by
1157
     recog_for_combine.  Otherwise, it won't match.
1158
 
1159
     We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
1160
     note.
1161
 
1162
     Get the source and destination of INSN.  If more than one, can't
1163
     combine.  */
1164
 
1165
  if (GET_CODE (PATTERN (insn)) == SET)
1166
    set = PATTERN (insn);
1167
  else if (GET_CODE (PATTERN (insn)) == PARALLEL
1168
           && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1169
    {
1170
      for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1171
        {
1172
          rtx elt = XVECEXP (PATTERN (insn), 0, i);
1173
          rtx note;
1174
 
1175
          switch (GET_CODE (elt))
1176
            {
1177
            /* This is important to combine floating point insns
1178
               for the SH4 port.  */
1179
            case USE:
1180
              /* Combining an isolated USE doesn't make sense.
1181
                 We depend here on combinable_i3pat to reject them.  */
1182
              /* The code below this loop only verifies that the inputs of
1183
                 the SET in INSN do not change.  We call reg_set_between_p
1184
                 to verify that the REG in the USE does not change between
1185
                 I3 and INSN.
1186
                 If the USE in INSN was for a pseudo register, the matching
1187
                 insn pattern will likely match any register; combining this
1188
                 with any other USE would only be safe if we knew that the
1189
                 used registers have identical values, or if there was
1190
                 something to tell them apart, e.g. different modes.  For
1191
                 now, we forgo such complicated tests and simply disallow
1192
                 combining of USES of pseudo registers with any other USE.  */
1193
              if (REG_P (XEXP (elt, 0))
1194
                  && GET_CODE (PATTERN (i3)) == PARALLEL)
1195
                {
1196
                  rtx i3pat = PATTERN (i3);
1197
                  int i = XVECLEN (i3pat, 0) - 1;
1198
                  unsigned int regno = REGNO (XEXP (elt, 0));
1199
 
1200
                  do
1201
                    {
1202
                      rtx i3elt = XVECEXP (i3pat, 0, i);
1203
 
1204
                      if (GET_CODE (i3elt) == USE
1205
                          && REG_P (XEXP (i3elt, 0))
1206
                          && (REGNO (XEXP (i3elt, 0)) == regno
1207
                              ? reg_set_between_p (XEXP (elt, 0),
1208
                                                   PREV_INSN (insn), i3)
1209
                              : regno >= FIRST_PSEUDO_REGISTER))
1210
                        return 0;
1211
                    }
1212
                  while (--i >= 0);
1213
                }
1214
              break;
1215
 
1216
              /* We can ignore CLOBBERs.  */
1217
            case CLOBBER:
1218
              break;
1219
 
1220
            case SET:
1221
              /* Ignore SETs whose result isn't used but not those that
1222
                 have side-effects.  */
1223
              if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
1224
                  && (!(note = find_reg_note (insn, REG_EH_REGION, NULL_RTX))
1225
                      || INTVAL (XEXP (note, 0)) <= 0)
1226
                  && ! side_effects_p (elt))
1227
                break;
1228
 
1229
              /* If we have already found a SET, this is a second one and
1230
                 so we cannot combine with this insn.  */
1231
              if (set)
1232
                return 0;
1233
 
1234
              set = elt;
1235
              break;
1236
 
1237
            default:
1238
              /* Anything else means we can't combine.  */
1239
              return 0;
1240
            }
1241
        }
1242
 
1243
      if (set == 0
1244
          /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
1245
             so don't do anything with it.  */
1246
          || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
1247
        return 0;
1248
    }
1249
  else
1250
    return 0;
1251
 
1252
  if (set == 0)
1253
    return 0;
1254
 
1255
  set = expand_field_assignment (set);
1256
  src = SET_SRC (set), dest = SET_DEST (set);
1257
 
1258
  /* Don't eliminate a store in the stack pointer.  */
1259
  if (dest == stack_pointer_rtx
1260
      /* Don't combine with an insn that sets a register to itself if it has
1261
         a REG_EQUAL note.  This may be part of a REG_NO_CONFLICT sequence.  */
1262
      || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
1263
      /* Can't merge an ASM_OPERANDS.  */
1264
      || GET_CODE (src) == ASM_OPERANDS
1265
      /* Can't merge a function call.  */
1266
      || GET_CODE (src) == CALL
1267
      /* Don't eliminate a function call argument.  */
1268
      || (CALL_P (i3)
1269
          && (find_reg_fusage (i3, USE, dest)
1270
              || (REG_P (dest)
1271
                  && REGNO (dest) < FIRST_PSEUDO_REGISTER
1272
                  && global_regs[REGNO (dest)])))
1273
      /* Don't substitute into an incremented register.  */
1274
      || FIND_REG_INC_NOTE (i3, dest)
1275
      || (succ && FIND_REG_INC_NOTE (succ, dest))
1276
      /* Don't substitute into a non-local goto, this confuses CFG.  */
1277
      || (JUMP_P (i3) && find_reg_note (i3, REG_NON_LOCAL_GOTO, NULL_RTX))
1278
#if 0
1279
      /* Don't combine the end of a libcall into anything.  */
1280
      /* ??? This gives worse code, and appears to be unnecessary, since no
1281
         pass after flow uses REG_LIBCALL/REG_RETVAL notes.  Local-alloc does
1282
         use REG_RETVAL notes for noconflict blocks, but other code here
1283
         makes sure that those insns don't disappear.  */
1284
      || find_reg_note (insn, REG_RETVAL, NULL_RTX)
1285
#endif
1286
      /* Make sure that DEST is not used after SUCC but before I3.  */
1287
      || (succ && ! all_adjacent
1288
          && reg_used_between_p (dest, succ, i3))
1289
      /* Make sure that the value that is to be substituted for the register
1290
         does not use any registers whose values alter in between.  However,
1291
         If the insns are adjacent, a use can't cross a set even though we
1292
         think it might (this can happen for a sequence of insns each setting
1293
         the same destination; last_set of that register might point to
1294
         a NOTE).  If INSN has a REG_EQUIV note, the register is always
1295
         equivalent to the memory so the substitution is valid even if there
1296
         are intervening stores.  Also, don't move a volatile asm or
1297
         UNSPEC_VOLATILE across any other insns.  */
1298
      || (! all_adjacent
1299
          && (((!MEM_P (src)
1300
                || ! find_reg_note (insn, REG_EQUIV, src))
1301
               && use_crosses_set_p (src, INSN_CUID (insn)))
1302
              || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
1303
              || GET_CODE (src) == UNSPEC_VOLATILE))
1304
      /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
1305
         better register allocation by not doing the combine.  */
1306
      || find_reg_note (i3, REG_NO_CONFLICT, dest)
1307
      || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
1308
      /* Don't combine across a CALL_INSN, because that would possibly
1309
         change whether the life span of some REGs crosses calls or not,
1310
         and it is a pain to update that information.
1311
         Exception: if source is a constant, moving it later can't hurt.
1312
         Accept that special case, because it helps -fforce-addr a lot.  */
1313
      || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
1314
    return 0;
1315
 
1316
  /* DEST must either be a REG or CC0.  */
1317
  if (REG_P (dest))
1318
    {
1319
      /* If register alignment is being enforced for multi-word items in all
1320
         cases except for parameters, it is possible to have a register copy
1321
         insn referencing a hard register that is not allowed to contain the
1322
         mode being copied and which would not be valid as an operand of most
1323
         insns.  Eliminate this problem by not combining with such an insn.
1324
 
1325
         Also, on some machines we don't want to extend the life of a hard
1326
         register.  */
1327
 
1328
      if (REG_P (src)
1329
          && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
1330
               && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
1331
              /* Don't extend the life of a hard register unless it is
1332
                 user variable (if we have few registers) or it can't
1333
                 fit into the desired register (meaning something special
1334
                 is going on).
1335
                 Also avoid substituting a return register into I3, because
1336
                 reload can't handle a conflict with constraints of other
1337
                 inputs.  */
1338
              || (REGNO (src) < FIRST_PSEUDO_REGISTER
1339
                  && ! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src)))))
1340
        return 0;
1341
    }
1342
  else if (GET_CODE (dest) != CC0)
1343
    return 0;
1344
 
1345
 
1346
  if (GET_CODE (PATTERN (i3)) == PARALLEL)
1347
    for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
1348
      if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER)
1349
        {
1350
          /* Don't substitute for a register intended as a clobberable
1351
             operand.  */
1352
          rtx reg = XEXP (XVECEXP (PATTERN (i3), 0, i), 0);
1353
          if (rtx_equal_p (reg, dest))
1354
            return 0;
1355
 
1356
          /* If the clobber represents an earlyclobber operand, we must not
1357
             substitute an expression containing the clobbered register.
1358
             As we do not analyze the constraint strings here, we have to
1359
             make the conservative assumption.  However, if the register is
1360
             a fixed hard reg, the clobber cannot represent any operand;
1361
             we leave it up to the machine description to either accept or
1362
             reject use-and-clobber patterns.  */
1363
          if (!REG_P (reg)
1364
              || REGNO (reg) >= FIRST_PSEUDO_REGISTER
1365
              || !fixed_regs[REGNO (reg)])
1366
            if (reg_overlap_mentioned_p (reg, src))
1367
              return 0;
1368
        }
1369
 
1370
  /* If INSN contains anything volatile, or is an `asm' (whether volatile
1371
     or not), reject, unless nothing volatile comes between it and I3 */
1372
 
1373
  if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
1374
    {
1375
      /* Make sure succ doesn't contain a volatile reference.  */
1376
      if (succ != 0 && volatile_refs_p (PATTERN (succ)))
1377
        return 0;
1378
 
1379
      for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1380
        if (INSN_P (p) && p != succ && volatile_refs_p (PATTERN (p)))
1381
          return 0;
1382
    }
1383
 
1384
  /* If INSN is an asm, and DEST is a hard register, reject, since it has
1385
     to be an explicit register variable, and was chosen for a reason.  */
1386
 
1387
  if (GET_CODE (src) == ASM_OPERANDS
1388
      && REG_P (dest) && REGNO (dest) < FIRST_PSEUDO_REGISTER)
1389
    return 0;
1390
 
1391
  /* If there are any volatile insns between INSN and I3, reject, because
1392
     they might affect machine state.  */
1393
 
1394
  for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1395
    if (INSN_P (p) && p != succ && volatile_insn_p (PATTERN (p)))
1396
      return 0;
1397
 
1398
  /* If INSN contains an autoincrement or autodecrement, make sure that
1399
     register is not used between there and I3, and not already used in
1400
     I3 either.  Neither must it be used in PRED or SUCC, if they exist.
1401
     Also insist that I3 not be a jump; if it were one
1402
     and the incremented register were spilled, we would lose.  */
1403
 
1404
#ifdef AUTO_INC_DEC
1405
  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1406
    if (REG_NOTE_KIND (link) == REG_INC
1407
        && (JUMP_P (i3)
1408
            || reg_used_between_p (XEXP (link, 0), insn, i3)
1409
            || (pred != NULL_RTX
1410
                && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (pred)))
1411
            || (succ != NULL_RTX
1412
                && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (succ)))
1413
            || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
1414
      return 0;
1415
#endif
1416
 
1417
#ifdef HAVE_cc0
1418
  /* Don't combine an insn that follows a CC0-setting insn.
1419
     An insn that uses CC0 must not be separated from the one that sets it.
1420
     We do, however, allow I2 to follow a CC0-setting insn if that insn
1421
     is passed as I1; in that case it will be deleted also.
1422
     We also allow combining in this case if all the insns are adjacent
1423
     because that would leave the two CC0 insns adjacent as well.
1424
     It would be more logical to test whether CC0 occurs inside I1 or I2,
1425
     but that would be much slower, and this ought to be equivalent.  */
1426
 
1427
  p = prev_nonnote_insn (insn);
1428
  if (p && p != pred && NONJUMP_INSN_P (p) && sets_cc0_p (PATTERN (p))
1429
      && ! all_adjacent)
1430
    return 0;
1431
#endif
1432
 
1433
  /* If we get here, we have passed all the tests and the combination is
1434
     to be allowed.  */
1435
 
1436
  *pdest = dest;
1437
  *psrc = src;
1438
 
1439
  return 1;
1440
}
1441
 
1442
/* LOC is the location within I3 that contains its pattern or the component
1443
   of a PARALLEL of the pattern.  We validate that it is valid for combining.
1444
 
1445
   One problem is if I3 modifies its output, as opposed to replacing it
1446
   entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1447
   so would produce an insn that is not equivalent to the original insns.
1448
 
1449
   Consider:
1450
 
1451
         (set (reg:DI 101) (reg:DI 100))
1452
         (set (subreg:SI (reg:DI 101) 0) <foo>)
1453
 
1454
   This is NOT equivalent to:
1455
 
1456
         (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
1457
                    (set (reg:DI 101) (reg:DI 100))])
1458
 
1459
   Not only does this modify 100 (in which case it might still be valid
1460
   if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
1461
 
1462
   We can also run into a problem if I2 sets a register that I1
1463
   uses and I1 gets directly substituted into I3 (not via I2).  In that
1464
   case, we would be getting the wrong value of I2DEST into I3, so we
1465
   must reject the combination.  This case occurs when I2 and I1 both
1466
   feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1467
   If I1_NOT_IN_SRC is nonzero, it means that finding I1 in the source
1468
   of a SET must prevent combination from occurring.
1469
 
1470
   Before doing the above check, we first try to expand a field assignment
1471
   into a set of logical operations.
1472
 
1473
   If PI3_DEST_KILLED is nonzero, it is a pointer to a location in which
1474
   we place a register that is both set and used within I3.  If more than one
1475
   such register is detected, we fail.
1476
 
1477
   Return 1 if the combination is valid, zero otherwise.  */
1478
 
1479
static int
1480
combinable_i3pat (rtx i3, rtx *loc, rtx i2dest, rtx i1dest,
1481
                  int i1_not_in_src, rtx *pi3dest_killed)
1482
{
1483
  rtx x = *loc;
1484
 
1485
  if (GET_CODE (x) == SET)
1486
    {
1487
      rtx set = x ;
1488
      rtx dest = SET_DEST (set);
1489
      rtx src = SET_SRC (set);
1490
      rtx inner_dest = dest;
1491
      rtx subdest;
1492
 
1493
      while (GET_CODE (inner_dest) == STRICT_LOW_PART
1494
             || GET_CODE (inner_dest) == SUBREG
1495
             || GET_CODE (inner_dest) == ZERO_EXTRACT)
1496
        inner_dest = XEXP (inner_dest, 0);
1497
 
1498
      /* Check for the case where I3 modifies its output, as discussed
1499
         above.  We don't want to prevent pseudos from being combined
1500
         into the address of a MEM, so only prevent the combination if
1501
         i1 or i2 set the same MEM.  */
1502
      if ((inner_dest != dest &&
1503
           (!MEM_P (inner_dest)
1504
            || rtx_equal_p (i2dest, inner_dest)
1505
            || (i1dest && rtx_equal_p (i1dest, inner_dest)))
1506
           && (reg_overlap_mentioned_p (i2dest, inner_dest)
1507
               || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
1508
 
1509
          /* This is the same test done in can_combine_p except we can't test
1510
             all_adjacent; we don't have to, since this instruction will stay
1511
             in place, thus we are not considering increasing the lifetime of
1512
             INNER_DEST.
1513
 
1514
             Also, if this insn sets a function argument, combining it with
1515
             something that might need a spill could clobber a previous
1516
             function argument; the all_adjacent test in can_combine_p also
1517
             checks this; here, we do a more specific test for this case.  */
1518
 
1519
          || (REG_P (inner_dest)
1520
              && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1521
              && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1522
                                        GET_MODE (inner_dest))))
1523
          || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1524
        return 0;
1525
 
1526
      /* If DEST is used in I3, it is being killed in this insn, so
1527
         record that for later.  We have to consider paradoxical
1528
         subregs here, since they kill the whole register, but we
1529
         ignore partial subregs, STRICT_LOW_PART, etc.
1530
         Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1531
         STACK_POINTER_REGNUM, since these are always considered to be
1532
         live.  Similarly for ARG_POINTER_REGNUM if it is fixed.  */
1533
      subdest = dest;
1534
      if (GET_CODE (subdest) == SUBREG
1535
          && (GET_MODE_SIZE (GET_MODE (subdest))
1536
              >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (subdest)))))
1537
        subdest = SUBREG_REG (subdest);
1538
      if (pi3dest_killed
1539
          && REG_P (subdest)
1540
          && reg_referenced_p (subdest, PATTERN (i3))
1541
          && REGNO (subdest) != FRAME_POINTER_REGNUM
1542
#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1543
          && REGNO (subdest) != HARD_FRAME_POINTER_REGNUM
1544
#endif
1545
#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1546
          && (REGNO (subdest) != ARG_POINTER_REGNUM
1547
              || ! fixed_regs [REGNO (subdest)])
1548
#endif
1549
          && REGNO (subdest) != STACK_POINTER_REGNUM)
1550
        {
1551
          if (*pi3dest_killed)
1552
            return 0;
1553
 
1554
          *pi3dest_killed = subdest;
1555
        }
1556
    }
1557
 
1558
  else if (GET_CODE (x) == PARALLEL)
1559
    {
1560
      int i;
1561
 
1562
      for (i = 0; i < XVECLEN (x, 0); i++)
1563
        if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1564
                                i1_not_in_src, pi3dest_killed))
1565
          return 0;
1566
    }
1567
 
1568
  return 1;
1569
}
1570
 
1571
/* Return 1 if X is an arithmetic expression that contains a multiplication
1572
   and division.  We don't count multiplications by powers of two here.  */
1573
 
1574
static int
1575
contains_muldiv (rtx x)
1576
{
1577
  switch (GET_CODE (x))
1578
    {
1579
    case MOD:  case DIV:  case UMOD:  case UDIV:
1580
      return 1;
1581
 
1582
    case MULT:
1583
      return ! (GET_CODE (XEXP (x, 1)) == CONST_INT
1584
                && exact_log2 (INTVAL (XEXP (x, 1))) >= 0);
1585
    default:
1586
      if (BINARY_P (x))
1587
        return contains_muldiv (XEXP (x, 0))
1588
            || contains_muldiv (XEXP (x, 1));
1589
 
1590
      if (UNARY_P (x))
1591
        return contains_muldiv (XEXP (x, 0));
1592
 
1593
      return 0;
1594
    }
1595
}
1596
 
1597
/* Determine whether INSN can be used in a combination.  Return nonzero if
1598
   not.  This is used in try_combine to detect early some cases where we
1599
   can't perform combinations.  */
1600
 
1601
static int
1602
cant_combine_insn_p (rtx insn)
1603
{
1604
  rtx set;
1605
  rtx src, dest;
1606
 
1607
  /* If this isn't really an insn, we can't do anything.
1608
     This can occur when flow deletes an insn that it has merged into an
1609
     auto-increment address.  */
1610
  if (! INSN_P (insn))
1611
    return 1;
1612
 
1613
  /* Never combine loads and stores involving hard regs that are likely
1614
     to be spilled.  The register allocator can usually handle such
1615
     reg-reg moves by tying.  If we allow the combiner to make
1616
     substitutions of likely-spilled regs, reload might die.
1617
     As an exception, we allow combinations involving fixed regs; these are
1618
     not available to the register allocator so there's no risk involved.  */
1619
 
1620
  set = single_set (insn);
1621
  if (! set)
1622
    return 0;
1623
  src = SET_SRC (set);
1624
  dest = SET_DEST (set);
1625
  if (GET_CODE (src) == SUBREG)
1626
    src = SUBREG_REG (src);
1627
  if (GET_CODE (dest) == SUBREG)
1628
    dest = SUBREG_REG (dest);
1629
  if (REG_P (src) && REG_P (dest)
1630
      && ((REGNO (src) < FIRST_PSEUDO_REGISTER
1631
           && ! fixed_regs[REGNO (src)]
1632
           && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (src))))
1633
          || (REGNO (dest) < FIRST_PSEUDO_REGISTER
1634
              && ! fixed_regs[REGNO (dest)]
1635
              && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (dest))))))
1636
    return 1;
1637
 
1638
  return 0;
1639
}
1640
 
1641
struct likely_spilled_retval_info
1642
{
1643
  unsigned regno, nregs;
1644
  unsigned mask;
1645
};
1646
 
1647
/* Called via note_stores by likely_spilled_retval_p.  Remove from info->mask
1648
   hard registers that are known to be written to / clobbered in full.  */
1649
static void
1650
likely_spilled_retval_1 (rtx x, rtx set, void *data)
1651
{
1652
  struct likely_spilled_retval_info *info = data;
1653
  unsigned regno, nregs;
1654
  unsigned new_mask;
1655
 
1656
  if (!REG_P (XEXP (set, 0)))
1657
    return;
1658
  regno = REGNO (x);
1659
  if (regno >= info->regno + info->nregs)
1660
    return;
1661
  nregs = hard_regno_nregs[regno][GET_MODE (x)];
1662
  if (regno + nregs <= info->regno)
1663
    return;
1664
  new_mask = (2U << (nregs - 1)) - 1;
1665
  if (regno < info->regno)
1666
    new_mask >>= info->regno - regno;
1667
  else
1668
    new_mask <<= regno - info->regno;
1669
  info->mask &= new_mask;
1670
}
1671
 
1672
/* Return nonzero iff part of the return value is live during INSN, and
1673
   it is likely spilled.  This can happen when more than one insn is needed
1674
   to copy the return value, e.g. when we consider to combine into the
1675
   second copy insn for a complex value.  */
1676
 
1677
static int
1678
likely_spilled_retval_p (rtx insn)
1679
{
1680
  rtx use = BB_END (this_basic_block);
1681
  rtx reg, p;
1682
  unsigned regno, nregs;
1683
  /* We assume here that no machine mode needs more than
1684
     32 hard registers when the value overlaps with a register
1685
     for which FUNCTION_VALUE_REGNO_P is true.  */
1686
  unsigned mask;
1687
  struct likely_spilled_retval_info info;
1688
 
1689
  if (!NONJUMP_INSN_P (use) || GET_CODE (PATTERN (use)) != USE || insn == use)
1690
    return 0;
1691
  reg = XEXP (PATTERN (use), 0);
1692
  if (!REG_P (reg) || !FUNCTION_VALUE_REGNO_P (REGNO (reg)))
1693
    return 0;
1694
  regno = REGNO (reg);
1695
  nregs = hard_regno_nregs[regno][GET_MODE (reg)];
1696
  if (nregs == 1)
1697
    return 0;
1698
  mask = (2U << (nregs - 1)) - 1;
1699
 
1700
  /* Disregard parts of the return value that are set later.  */
1701
  info.regno = regno;
1702
  info.nregs = nregs;
1703
  info.mask = mask;
1704
  for (p = PREV_INSN (use); info.mask && p != insn; p = PREV_INSN (p))
1705
    note_stores (PATTERN (insn), likely_spilled_retval_1, &info);
1706
  mask = info.mask;
1707
 
1708
  /* Check if any of the (probably) live return value registers is
1709
     likely spilled.  */
1710
  nregs --;
1711
  do
1712
    {
1713
      if ((mask & 1 << nregs)
1714
          && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno + nregs)))
1715
        return 1;
1716
    } while (nregs--);
1717
  return 0;
1718
}
1719
 
1720
/* Adjust INSN after we made a change to its destination.
1721
 
1722
   Changing the destination can invalidate notes that say something about
1723
   the results of the insn and a LOG_LINK pointing to the insn.  */
1724
 
1725
static void
1726
adjust_for_new_dest (rtx insn)
1727
{
1728
  rtx *loc;
1729
 
1730
  /* For notes, be conservative and simply remove them.  */
1731
  loc = &REG_NOTES (insn);
1732
  while (*loc)
1733
    {
1734
      enum reg_note kind = REG_NOTE_KIND (*loc);
1735
      if (kind == REG_EQUAL || kind == REG_EQUIV)
1736
        *loc = XEXP (*loc, 1);
1737
      else
1738
        loc = &XEXP (*loc, 1);
1739
    }
1740
 
1741
  /* The new insn will have a destination that was previously the destination
1742
     of an insn just above it.  Call distribute_links to make a LOG_LINK from
1743
     the next use of that destination.  */
1744
  distribute_links (gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX));
1745
}
1746
 
1747
/* Return TRUE if combine can reuse reg X in mode MODE.
1748
   ADDED_SETS is nonzero if the original set is still required.  */
1749
static bool
1750
can_change_dest_mode (rtx x, int added_sets, enum machine_mode mode)
1751
{
1752
  unsigned int regno;
1753
 
1754
  if (!REG_P(x))
1755
    return false;
1756
 
1757
  regno = REGNO (x);
1758
  /* Allow hard registers if the new mode is legal, and occupies no more
1759
     registers than the old mode.  */
1760
  if (regno < FIRST_PSEUDO_REGISTER)
1761
    return (HARD_REGNO_MODE_OK (regno, mode)
1762
            && (hard_regno_nregs[regno][GET_MODE (x)]
1763
                >= hard_regno_nregs[regno][mode]));
1764
 
1765
  /* Or a pseudo that is only used once.  */
1766
  return (REG_N_SETS (regno) == 1 && !added_sets
1767
          && !REG_USERVAR_P (x));
1768
}
1769
 
1770
 
1771
/* Check whether X, the destination of a set, refers to part of
1772
   the register specified by REG.  */
1773
 
1774
static bool
1775
reg_subword_p (rtx x, rtx reg)
1776
{
1777
  /* Check that reg is an integer mode register.  */
1778
  if (!REG_P (reg) || GET_MODE_CLASS (GET_MODE (reg)) != MODE_INT)
1779
    return false;
1780
 
1781
  if (GET_CODE (x) == STRICT_LOW_PART
1782
      || GET_CODE (x) == ZERO_EXTRACT)
1783
    x = XEXP (x, 0);
1784
 
1785
  return GET_CODE (x) == SUBREG
1786
         && SUBREG_REG (x) == reg
1787
         && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT;
1788
}
1789
 
1790
 
1791
/* Try to combine the insns I1 and I2 into I3.
1792
   Here I1 and I2 appear earlier than I3.
1793
   I1 can be zero; then we combine just I2 into I3.
1794
 
1795
   If we are combining three insns and the resulting insn is not recognized,
1796
   try splitting it into two insns.  If that happens, I2 and I3 are retained
1797
   and I1 is pseudo-deleted by turning it into a NOTE.  Otherwise, I1 and I2
1798
   are pseudo-deleted.
1799
 
1800
   Return 0 if the combination does not work.  Then nothing is changed.
1801
   If we did the combination, return the insn at which combine should
1802
   resume scanning.
1803
 
1804
   Set NEW_DIRECT_JUMP_P to a nonzero value if try_combine creates a
1805
   new direct jump instruction.  */
1806
 
1807
static rtx
1808
try_combine (rtx i3, rtx i2, rtx i1, int *new_direct_jump_p)
1809
{
1810
  /* New patterns for I3 and I2, respectively.  */
1811
  rtx newpat, newi2pat = 0;
1812
  rtvec newpat_vec_with_clobbers = 0;
1813
  int substed_i2 = 0, substed_i1 = 0;
1814
  /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead.  */
1815
  int added_sets_1, added_sets_2;
1816
  /* Total number of SETs to put into I3.  */
1817
  int total_sets;
1818
  /* Nonzero if I2's body now appears in I3.  */
1819
  int i2_is_used;
1820
  /* INSN_CODEs for new I3, new I2, and user of condition code.  */
1821
  int insn_code_number, i2_code_number = 0, other_code_number = 0;
1822
  /* Contains I3 if the destination of I3 is used in its source, which means
1823
     that the old life of I3 is being killed.  If that usage is placed into
1824
     I2 and not in I3, a REG_DEAD note must be made.  */
1825
  rtx i3dest_killed = 0;
1826
  /* SET_DEST and SET_SRC of I2 and I1.  */
1827
  rtx i2dest, i2src, i1dest = 0, i1src = 0;
1828
  /* PATTERN (I1) and PATTERN (I2), or a copy of it in certain cases.  */
1829
  rtx i1pat = 0, i2pat = 0;
1830
  /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC.  */
1831
  int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
1832
  int i2dest_killed = 0, i1dest_killed = 0;
1833
  int i1_feeds_i3 = 0;
1834
  /* Notes that must be added to REG_NOTES in I3 and I2.  */
1835
  rtx new_i3_notes, new_i2_notes;
1836
  /* Notes that we substituted I3 into I2 instead of the normal case.  */
1837
  int i3_subst_into_i2 = 0;
1838
  /* Notes that I1, I2 or I3 is a MULT operation.  */
1839
  int have_mult = 0;
1840
  int swap_i2i3 = 0;
1841
 
1842
  int maxreg;
1843
  rtx temp;
1844
  rtx link;
1845
  int i;
1846
 
1847
  /* Exit early if one of the insns involved can't be used for
1848
     combinations.  */
1849
  if (cant_combine_insn_p (i3)
1850
      || cant_combine_insn_p (i2)
1851
      || (i1 && cant_combine_insn_p (i1))
1852
      || likely_spilled_retval_p (i3)
1853
      /* We also can't do anything if I3 has a
1854
         REG_LIBCALL note since we don't want to disrupt the contiguity of a
1855
         libcall.  */
1856
#if 0
1857
      /* ??? This gives worse code, and appears to be unnecessary, since no
1858
         pass after flow uses REG_LIBCALL/REG_RETVAL notes.  */
1859
      || find_reg_note (i3, REG_LIBCALL, NULL_RTX)
1860
#endif
1861
      )
1862
    return 0;
1863
 
1864
  combine_attempts++;
1865
  undobuf.other_insn = 0;
1866
 
1867
  /* Reset the hard register usage information.  */
1868
  CLEAR_HARD_REG_SET (newpat_used_regs);
1869
 
1870
  /* If I1 and I2 both feed I3, they can be in any order.  To simplify the
1871
     code below, set I1 to be the earlier of the two insns.  */
1872
  if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1873
    temp = i1, i1 = i2, i2 = temp;
1874
 
1875
  added_links_insn = 0;
1876
 
1877
  /* First check for one important special-case that the code below will
1878
     not handle.  Namely, the case where I1 is zero, I2 is a PARALLEL
1879
     and I3 is a SET whose SET_SRC is a SET_DEST in I2.  In that case,
1880
     we may be able to replace that destination with the destination of I3.
1881
     This occurs in the common code where we compute both a quotient and
1882
     remainder into a structure, in which case we want to do the computation
1883
     directly into the structure to avoid register-register copies.
1884
 
1885
     Note that this case handles both multiple sets in I2 and also
1886
     cases where I2 has a number of CLOBBER or PARALLELs.
1887
 
1888
     We make very conservative checks below and only try to handle the
1889
     most common cases of this.  For example, we only handle the case
1890
     where I2 and I3 are adjacent to avoid making difficult register
1891
     usage tests.  */
1892
 
1893
  if (i1 == 0 && NONJUMP_INSN_P (i3) && GET_CODE (PATTERN (i3)) == SET
1894
      && REG_P (SET_SRC (PATTERN (i3)))
1895
      && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1896
      && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1897
      && GET_CODE (PATTERN (i2)) == PARALLEL
1898
      && ! side_effects_p (SET_DEST (PATTERN (i3)))
1899
      /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1900
         below would need to check what is inside (and reg_overlap_mentioned_p
1901
         doesn't support those codes anyway).  Don't allow those destinations;
1902
         the resulting insn isn't likely to be recognized anyway.  */
1903
      && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1904
      && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
1905
      && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1906
                                    SET_DEST (PATTERN (i3)))
1907
      && next_real_insn (i2) == i3)
1908
    {
1909
      rtx p2 = PATTERN (i2);
1910
 
1911
      /* Make sure that the destination of I3,
1912
         which we are going to substitute into one output of I2,
1913
         is not used within another output of I2.  We must avoid making this:
1914
         (parallel [(set (mem (reg 69)) ...)
1915
                    (set (reg 69) ...)])
1916
         which is not well-defined as to order of actions.
1917
         (Besides, reload can't handle output reloads for this.)
1918
 
1919
         The problem can also happen if the dest of I3 is a memory ref,
1920
         if another dest in I2 is an indirect memory ref.  */
1921
      for (i = 0; i < XVECLEN (p2, 0); i++)
1922
        if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1923
             || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
1924
            && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1925
                                        SET_DEST (XVECEXP (p2, 0, i))))
1926
          break;
1927
 
1928
      if (i == XVECLEN (p2, 0))
1929
        for (i = 0; i < XVECLEN (p2, 0); i++)
1930
          if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1931
               || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
1932
              && SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1933
            {
1934
              combine_merges++;
1935
 
1936
              subst_insn = i3;
1937
              subst_low_cuid = INSN_CUID (i2);
1938
 
1939
              added_sets_2 = added_sets_1 = 0;
1940
              i2dest = SET_SRC (PATTERN (i3));
1941
              i2dest_killed = dead_or_set_p (i2, i2dest);
1942
 
1943
              /* Replace the dest in I2 with our dest and make the resulting
1944
                 insn the new pattern for I3.  Then skip to where we
1945
                 validate the pattern.  Everything was set up above.  */
1946
              SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1947
                     SET_DEST (PATTERN (i3)));
1948
 
1949
              newpat = p2;
1950
              i3_subst_into_i2 = 1;
1951
              goto validate_replacement;
1952
            }
1953
    }
1954
 
1955
  /* If I2 is setting a pseudo to a constant and I3 is setting some
1956
     sub-part of it to another constant, merge them by making a new
1957
     constant.  */
1958
  if (i1 == 0
1959
      && (temp = single_set (i2)) != 0
1960
      && (GET_CODE (SET_SRC (temp)) == CONST_INT
1961
          || GET_CODE (SET_SRC (temp)) == CONST_DOUBLE)
1962
      && GET_CODE (PATTERN (i3)) == SET
1963
      && (GET_CODE (SET_SRC (PATTERN (i3))) == CONST_INT
1964
          || GET_CODE (SET_SRC (PATTERN (i3))) == CONST_DOUBLE)
1965
      && reg_subword_p (SET_DEST (PATTERN (i3)), SET_DEST (temp)))
1966
    {
1967
      rtx dest = SET_DEST (PATTERN (i3));
1968
      int offset = -1;
1969
      int width = 0;
1970
 
1971
      if (GET_CODE (dest) == ZERO_EXTRACT)
1972
        {
1973
          if (GET_CODE (XEXP (dest, 1)) == CONST_INT
1974
              && GET_CODE (XEXP (dest, 2)) == CONST_INT)
1975
            {
1976
              width = INTVAL (XEXP (dest, 1));
1977
              offset = INTVAL (XEXP (dest, 2));
1978
              dest = XEXP (dest, 0);
1979
              if (BITS_BIG_ENDIAN)
1980
                offset = GET_MODE_BITSIZE (GET_MODE (dest)) - width - offset;
1981
            }
1982
        }
1983
      else
1984
        {
1985
          if (GET_CODE (dest) == STRICT_LOW_PART)
1986
            dest = XEXP (dest, 0);
1987
          width = GET_MODE_BITSIZE (GET_MODE (dest));
1988
          offset = 0;
1989
        }
1990
 
1991
      if (offset >= 0)
1992
        {
1993
          /* If this is the low part, we're done.  */
1994
          if (subreg_lowpart_p (dest))
1995
            ;
1996
          /* Handle the case where inner is twice the size of outer.  */
1997
          else if (GET_MODE_BITSIZE (GET_MODE (SET_DEST (temp)))
1998
                   == 2 * GET_MODE_BITSIZE (GET_MODE (dest)))
1999
            offset += GET_MODE_BITSIZE (GET_MODE (dest));
2000
          /* Otherwise give up for now.  */
2001
          else
2002
            offset = -1;
2003
        }
2004
 
2005
      if (offset >= 0)
2006
        {
2007
          HOST_WIDE_INT mhi, ohi, ihi;
2008
          HOST_WIDE_INT mlo, olo, ilo;
2009
          rtx inner = SET_SRC (PATTERN (i3));
2010
          rtx outer = SET_SRC (temp);
2011
 
2012
          if (GET_CODE (outer) == CONST_INT)
2013
            {
2014
              olo = INTVAL (outer);
2015
              ohi = olo < 0 ? -1 : 0;
2016
            }
2017
          else
2018
            {
2019
              olo = CONST_DOUBLE_LOW (outer);
2020
              ohi = CONST_DOUBLE_HIGH (outer);
2021
            }
2022
 
2023
          if (GET_CODE (inner) == CONST_INT)
2024
            {
2025
              ilo = INTVAL (inner);
2026
              ihi = ilo < 0 ? -1 : 0;
2027
            }
2028
          else
2029
            {
2030
              ilo = CONST_DOUBLE_LOW (inner);
2031
              ihi = CONST_DOUBLE_HIGH (inner);
2032
            }
2033
 
2034
          if (width < HOST_BITS_PER_WIDE_INT)
2035
            {
2036
              mlo = ((unsigned HOST_WIDE_INT) 1 << width) - 1;
2037
              mhi = 0;
2038
            }
2039
          else if (width < HOST_BITS_PER_WIDE_INT * 2)
2040
            {
2041
              mhi = ((unsigned HOST_WIDE_INT) 1
2042
                     << (width - HOST_BITS_PER_WIDE_INT)) - 1;
2043
              mlo = -1;
2044
            }
2045
          else
2046
            {
2047
              mlo = -1;
2048
              mhi = -1;
2049
            }
2050
 
2051
          ilo &= mlo;
2052
          ihi &= mhi;
2053
 
2054
          if (offset >= HOST_BITS_PER_WIDE_INT)
2055
            {
2056
              mhi = mlo << (offset - HOST_BITS_PER_WIDE_INT);
2057
              mlo = 0;
2058
              ihi = ilo << (offset - HOST_BITS_PER_WIDE_INT);
2059
              ilo = 0;
2060
            }
2061
          else if (offset > 0)
2062
            {
2063
              mhi = (mhi << offset) | ((unsigned HOST_WIDE_INT) mlo
2064
                                       >> (HOST_BITS_PER_WIDE_INT - offset));
2065
              mlo = mlo << offset;
2066
              ihi = (ihi << offset) | ((unsigned HOST_WIDE_INT) ilo
2067
                                       >> (HOST_BITS_PER_WIDE_INT - offset));
2068
              ilo = ilo << offset;
2069
            }
2070
 
2071
          olo = (olo & ~mlo) | ilo;
2072
          ohi = (ohi & ~mhi) | ihi;
2073
 
2074
          combine_merges++;
2075
          subst_insn = i3;
2076
          subst_low_cuid = INSN_CUID (i2);
2077
          added_sets_2 = added_sets_1 = 0;
2078
          i2dest = SET_DEST (temp);
2079
          i2dest_killed = dead_or_set_p (i2, i2dest);
2080
 
2081
          SUBST (SET_SRC (temp),
2082
                 immed_double_const (olo, ohi, GET_MODE (SET_DEST (temp))));
2083
 
2084
          newpat = PATTERN (i2);
2085
          goto validate_replacement;
2086
        }
2087
    }
2088
 
2089
#ifndef HAVE_cc0
2090
  /* If we have no I1 and I2 looks like:
2091
        (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
2092
                   (set Y OP)])
2093
     make up a dummy I1 that is
2094
        (set Y OP)
2095
     and change I2 to be
2096
        (set (reg:CC X) (compare:CC Y (const_int 0)))
2097
 
2098
     (We can ignore any trailing CLOBBERs.)
2099
 
2100
     This undoes a previous combination and allows us to match a branch-and-
2101
     decrement insn.  */
2102
 
2103
  if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
2104
      && XVECLEN (PATTERN (i2), 0) >= 2
2105
      && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
2106
      && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
2107
          == MODE_CC)
2108
      && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
2109
      && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
2110
      && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
2111
      && REG_P (SET_DEST (XVECEXP (PATTERN (i2), 0, 1)))
2112
      && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
2113
                      SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
2114
    {
2115
      for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
2116
        if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
2117
          break;
2118
 
2119
      if (i == 1)
2120
        {
2121
          /* We make I1 with the same INSN_UID as I2.  This gives it
2122
             the same INSN_CUID for value tracking.  Our fake I1 will
2123
             never appear in the insn stream so giving it the same INSN_UID
2124
             as I2 will not cause a problem.  */
2125
 
2126
          i1 = gen_rtx_INSN (VOIDmode, INSN_UID (i2), NULL_RTX, i2,
2127
                             BLOCK_FOR_INSN (i2), INSN_LOCATOR (i2),
2128
                             XVECEXP (PATTERN (i2), 0, 1), -1, NULL_RTX,
2129
                             NULL_RTX);
2130
 
2131
          SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
2132
          SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
2133
                 SET_DEST (PATTERN (i1)));
2134
        }
2135
    }
2136
#endif
2137
 
2138
  /* Verify that I2 and I1 are valid for combining.  */
2139
  if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
2140
      || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
2141
    {
2142
      undo_all ();
2143
      return 0;
2144
    }
2145
 
2146
  /* Record whether I2DEST is used in I2SRC and similarly for the other
2147
     cases.  Knowing this will help in register status updating below.  */
2148
  i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
2149
  i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
2150
  i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
2151
  i2dest_killed = dead_or_set_p (i2, i2dest);
2152
  i1dest_killed = i1 && dead_or_set_p (i1, i1dest);
2153
 
2154
  /* See if I1 directly feeds into I3.  It does if I1DEST is not used
2155
     in I2SRC.  */
2156
  i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
2157
 
2158
  /* Ensure that I3's pattern can be the destination of combines.  */
2159
  if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
2160
                          i1 && i2dest_in_i1src && i1_feeds_i3,
2161
                          &i3dest_killed))
2162
    {
2163
      undo_all ();
2164
      return 0;
2165
    }
2166
 
2167
  /* See if any of the insns is a MULT operation.  Unless one is, we will
2168
     reject a combination that is, since it must be slower.  Be conservative
2169
     here.  */
2170
  if (GET_CODE (i2src) == MULT
2171
      || (i1 != 0 && GET_CODE (i1src) == MULT)
2172
      || (GET_CODE (PATTERN (i3)) == SET
2173
          && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
2174
    have_mult = 1;
2175
 
2176
  /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
2177
     We used to do this EXCEPT in one case: I3 has a post-inc in an
2178
     output operand.  However, that exception can give rise to insns like
2179
        mov r3,(r3)+
2180
     which is a famous insn on the PDP-11 where the value of r3 used as the
2181
     source was model-dependent.  Avoid this sort of thing.  */
2182
 
2183
#if 0
2184
  if (!(GET_CODE (PATTERN (i3)) == SET
2185
        && REG_P (SET_SRC (PATTERN (i3)))
2186
        && MEM_P (SET_DEST (PATTERN (i3)))
2187
        && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
2188
            || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
2189
    /* It's not the exception.  */
2190
#endif
2191
#ifdef AUTO_INC_DEC
2192
    for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
2193
      if (REG_NOTE_KIND (link) == REG_INC
2194
          && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
2195
              || (i1 != 0
2196
                  && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
2197
        {
2198
          undo_all ();
2199
          return 0;
2200
        }
2201
#endif
2202
 
2203
  /* See if the SETs in I1 or I2 need to be kept around in the merged
2204
     instruction: whenever the value set there is still needed past I3.
2205
     For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
2206
 
2207
     For the SET in I1, we have two cases:  If I1 and I2 independently
2208
     feed into I3, the set in I1 needs to be kept around if I1DEST dies
2209
     or is set in I3.  Otherwise (if I1 feeds I2 which feeds I3), the set
2210
     in I1 needs to be kept around unless I1DEST dies or is set in either
2211
     I2 or I3.  We can distinguish these cases by seeing if I2SRC mentions
2212
     I1DEST.  If so, we know I1 feeds into I2.  */
2213
 
2214
  added_sets_2 = ! dead_or_set_p (i3, i2dest);
2215
 
2216
  added_sets_1
2217
    = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
2218
               : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
2219
 
2220
  /* If the set in I2 needs to be kept around, we must make a copy of
2221
     PATTERN (I2), so that when we substitute I1SRC for I1DEST in
2222
     PATTERN (I2), we are only substituting for the original I1DEST, not into
2223
     an already-substituted copy.  This also prevents making self-referential
2224
     rtx.  If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
2225
     I2DEST.  */
2226
 
2227
  if (added_sets_2)
2228
    {
2229
      if (GET_CODE (PATTERN (i2)) == PARALLEL)
2230
        i2pat = gen_rtx_SET (VOIDmode, i2dest, copy_rtx (i2src));
2231
      else
2232
        i2pat = copy_rtx (PATTERN (i2));
2233
    }
2234
 
2235
  if (added_sets_1)
2236
    {
2237
      if (GET_CODE (PATTERN (i1)) == PARALLEL)
2238
        i1pat = gen_rtx_SET (VOIDmode, i1dest, copy_rtx (i1src));
2239
      else
2240
        i1pat = copy_rtx (PATTERN (i1));
2241
    }
2242
 
2243
  combine_merges++;
2244
 
2245
  /* Substitute in the latest insn for the regs set by the earlier ones.  */
2246
 
2247
  maxreg = max_reg_num ();
2248
 
2249
  subst_insn = i3;
2250
 
2251
#ifndef HAVE_cc0
2252
  /* Many machines that don't use CC0 have insns that can both perform an
2253
     arithmetic operation and set the condition code.  These operations will
2254
     be represented as a PARALLEL with the first element of the vector
2255
     being a COMPARE of an arithmetic operation with the constant zero.
2256
     The second element of the vector will set some pseudo to the result
2257
     of the same arithmetic operation.  If we simplify the COMPARE, we won't
2258
     match such a pattern and so will generate an extra insn.   Here we test
2259
     for this case, where both the comparison and the operation result are
2260
     needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
2261
     I2SRC.  Later we will make the PARALLEL that contains I2.  */
2262
 
2263
  if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
2264
      && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
2265
      && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
2266
      && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
2267
    {
2268
#ifdef SELECT_CC_MODE
2269
      rtx *cc_use;
2270
      enum machine_mode compare_mode;
2271
#endif
2272
 
2273
      newpat = PATTERN (i3);
2274
      SUBST (XEXP (SET_SRC (newpat), 0), i2src);
2275
 
2276
      i2_is_used = 1;
2277
 
2278
#ifdef SELECT_CC_MODE
2279
      /* See if a COMPARE with the operand we substituted in should be done
2280
         with the mode that is currently being used.  If not, do the same
2281
         processing we do in `subst' for a SET; namely, if the destination
2282
         is used only once, try to replace it with a register of the proper
2283
         mode and also replace the COMPARE.  */
2284
      if (undobuf.other_insn == 0
2285
          && (cc_use = find_single_use (SET_DEST (newpat), i3,
2286
                                        &undobuf.other_insn))
2287
          && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
2288
                                              i2src, const0_rtx))
2289
              != GET_MODE (SET_DEST (newpat))))
2290
        {
2291
          if (can_change_dest_mode(SET_DEST (newpat), added_sets_2,
2292
                                   compare_mode))
2293
            {
2294
              unsigned int regno = REGNO (SET_DEST (newpat));
2295
              rtx new_dest;
2296
 
2297
              if (regno < FIRST_PSEUDO_REGISTER)
2298
                new_dest = gen_rtx_REG (compare_mode, regno);
2299
              else
2300
                {
2301
                  SUBST_MODE (regno_reg_rtx[regno], compare_mode);
2302
                  new_dest = regno_reg_rtx[regno];
2303
                }
2304
 
2305
              SUBST (SET_DEST (newpat), new_dest);
2306
              SUBST (XEXP (*cc_use, 0), new_dest);
2307
              SUBST (SET_SRC (newpat),
2308
                     gen_rtx_COMPARE (compare_mode, i2src, const0_rtx));
2309
            }
2310
          else
2311
            undobuf.other_insn = 0;
2312
        }
2313
#endif
2314
    }
2315
  else
2316
#endif
2317
    {
2318
      /* It is possible that the source of I2 or I1 may be performing
2319
         an unneeded operation, such as a ZERO_EXTEND of something
2320
         that is known to have the high part zero.  Handle that case
2321
         by letting subst look at the innermost one of them.
2322
 
2323
         Another way to do this would be to have a function that tries
2324
         to simplify a single insn instead of merging two or more
2325
         insns.  We don't do this because of the potential of infinite
2326
         loops and because of the potential extra memory required.
2327
         However, doing it the way we are is a bit of a kludge and
2328
         doesn't catch all cases.
2329
 
2330
         But only do this if -fexpensive-optimizations since it slows
2331
         things down and doesn't usually win.
2332
 
2333
         This is not done in the COMPARE case above because the
2334
         unmodified I2PAT is used in the PARALLEL and so a pattern
2335
         with a modified I2SRC would not match.  */
2336
 
2337
      if (flag_expensive_optimizations)
2338
        {
2339
          /* Pass pc_rtx so no substitutions are done, just
2340
             simplifications.  */
2341
          if (i1)
2342
            {
2343
              subst_low_cuid = INSN_CUID (i1);
2344
              i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
2345
            }
2346
          else
2347
            {
2348
              subst_low_cuid = INSN_CUID (i2);
2349
              i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
2350
            }
2351
        }
2352
 
2353
      n_occurrences = 0;         /* `subst' counts here */
2354
 
2355
      /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
2356
         need to make a unique copy of I2SRC each time we substitute it
2357
         to avoid self-referential rtl.  */
2358
 
2359
      subst_low_cuid = INSN_CUID (i2);
2360
      newpat = subst (PATTERN (i3), i2dest, i2src, 0,
2361
                      ! i1_feeds_i3 && i1dest_in_i1src);
2362
      substed_i2 = 1;
2363
 
2364
      /* Record whether i2's body now appears within i3's body.  */
2365
      i2_is_used = n_occurrences;
2366
    }
2367
 
2368
  /* If we already got a failure, don't try to do more.  Otherwise,
2369
     try to substitute in I1 if we have it.  */
2370
 
2371
  if (i1 && GET_CODE (newpat) != CLOBBER)
2372
    {
2373
      /* Before we can do this substitution, we must redo the test done
2374
         above (see detailed comments there) that ensures  that I1DEST
2375
         isn't mentioned in any SETs in NEWPAT that are field assignments.  */
2376
 
2377
      if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
2378
                              0, (rtx*) 0))
2379
        {
2380
          undo_all ();
2381
          return 0;
2382
        }
2383
 
2384
      n_occurrences = 0;
2385
      subst_low_cuid = INSN_CUID (i1);
2386
      newpat = subst (newpat, i1dest, i1src, 0, 0);
2387
      substed_i1 = 1;
2388
    }
2389
 
2390
  /* Fail if an autoincrement side-effect has been duplicated.  Be careful
2391
     to count all the ways that I2SRC and I1SRC can be used.  */
2392
  if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
2393
       && i2_is_used + added_sets_2 > 1)
2394
      || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
2395
          && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
2396
              > 1))
2397
      /* Fail if we tried to make a new register.  */
2398
      || max_reg_num () != maxreg
2399
      /* Fail if we couldn't do something and have a CLOBBER.  */
2400
      || GET_CODE (newpat) == CLOBBER
2401
      /* Fail if this new pattern is a MULT and we didn't have one before
2402
         at the outer level.  */
2403
      || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
2404
          && ! have_mult))
2405
    {
2406
      undo_all ();
2407
      return 0;
2408
    }
2409
 
2410
  /* If the actions of the earlier insns must be kept
2411
     in addition to substituting them into the latest one,
2412
     we must make a new PARALLEL for the latest insn
2413
     to hold additional the SETs.  */
2414
 
2415
  if (added_sets_1 || added_sets_2)
2416
    {
2417
      combine_extras++;
2418
 
2419
      if (GET_CODE (newpat) == PARALLEL)
2420
        {
2421
          rtvec old = XVEC (newpat, 0);
2422
          total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
2423
          newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
2424
          memcpy (XVEC (newpat, 0)->elem, &old->elem[0],
2425
                  sizeof (old->elem[0]) * old->num_elem);
2426
        }
2427
      else
2428
        {
2429
          rtx old = newpat;
2430
          total_sets = 1 + added_sets_1 + added_sets_2;
2431
          newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
2432
          XVECEXP (newpat, 0, 0) = old;
2433
        }
2434
 
2435
      if (added_sets_1)
2436
        XVECEXP (newpat, 0, --total_sets) = i1pat;
2437
 
2438
      if (added_sets_2)
2439
        {
2440
          /* If there is no I1, use I2's body as is.  We used to also not do
2441
             the subst call below if I2 was substituted into I3,
2442
             but that could lose a simplification.  */
2443
          if (i1 == 0)
2444
            XVECEXP (newpat, 0, --total_sets) = i2pat;
2445
          else
2446
            /* See comment where i2pat is assigned.  */
2447
            XVECEXP (newpat, 0, --total_sets)
2448
              = subst (i2pat, i1dest, i1src, 0, 0);
2449
        }
2450
    }
2451
 
2452
  /* We come here when we are replacing a destination in I2 with the
2453
     destination of I3.  */
2454
 validate_replacement:
2455
 
2456
  /* Note which hard regs this insn has as inputs.  */
2457
  mark_used_regs_combine (newpat);
2458
 
2459
  /* If recog_for_combine fails, it strips existing clobbers.  If we'll
2460
     consider splitting this pattern, we might need these clobbers.  */
2461
  if (i1 && GET_CODE (newpat) == PARALLEL
2462
      && GET_CODE (XVECEXP (newpat, 0, XVECLEN (newpat, 0) - 1)) == CLOBBER)
2463
    {
2464
      int len = XVECLEN (newpat, 0);
2465
 
2466
      newpat_vec_with_clobbers = rtvec_alloc (len);
2467
      for (i = 0; i < len; i++)
2468
        RTVEC_ELT (newpat_vec_with_clobbers, i) = XVECEXP (newpat, 0, i);
2469
    }
2470
 
2471
  /* Is the result of combination a valid instruction?  */
2472
  insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2473
 
2474
  /* If the result isn't valid, see if it is a PARALLEL of two SETs where
2475
     the second SET's destination is a register that is unused and isn't
2476
     marked as an instruction that might trap in an EH region.  In that case,
2477
     we just need the first SET.   This can occur when simplifying a divmod
2478
     insn.  We *must* test for this case here because the code below that
2479
     splits two independent SETs doesn't handle this case correctly when it
2480
     updates the register status.
2481
 
2482
     It's pointless doing this if we originally had two sets, one from
2483
     i3, and one from i2.  Combining then splitting the parallel results
2484
     in the original i2 again plus an invalid insn (which we delete).
2485
     The net effect is only to move instructions around, which makes
2486
     debug info less accurate.
2487
 
2488
     Also check the case where the first SET's destination is unused.
2489
     That would not cause incorrect code, but does cause an unneeded
2490
     insn to remain.  */
2491
 
2492
  if (insn_code_number < 0
2493
      && !(added_sets_2 && i1 == 0)
2494
      && GET_CODE (newpat) == PARALLEL
2495
      && XVECLEN (newpat, 0) == 2
2496
      && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2497
      && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2498
      && asm_noperands (newpat) < 0)
2499
    {
2500
      rtx set0 = XVECEXP (newpat, 0, 0);
2501
      rtx set1 = XVECEXP (newpat, 0, 1);
2502
      rtx note;
2503
 
2504
      if (((REG_P (SET_DEST (set1))
2505
            && find_reg_note (i3, REG_UNUSED, SET_DEST (set1)))
2506
           || (GET_CODE (SET_DEST (set1)) == SUBREG
2507
               && find_reg_note (i3, REG_UNUSED, SUBREG_REG (SET_DEST (set1)))))
2508
          && (!(note = find_reg_note (i3, REG_EH_REGION, NULL_RTX))
2509
              || INTVAL (XEXP (note, 0)) <= 0)
2510
          && ! side_effects_p (SET_SRC (set1)))
2511
        {
2512
          newpat = set0;
2513
          insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2514
        }
2515
 
2516
      else if (((REG_P (SET_DEST (set0))
2517
                 && find_reg_note (i3, REG_UNUSED, SET_DEST (set0)))
2518
                || (GET_CODE (SET_DEST (set0)) == SUBREG
2519
                    && find_reg_note (i3, REG_UNUSED,
2520
                                      SUBREG_REG (SET_DEST (set0)))))
2521
               && (!(note = find_reg_note (i3, REG_EH_REGION, NULL_RTX))
2522
                   || INTVAL (XEXP (note, 0)) <= 0)
2523
               && ! side_effects_p (SET_SRC (set0)))
2524
        {
2525
          newpat = set1;
2526
          insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2527
 
2528
          if (insn_code_number >= 0)
2529
            {
2530
              /* If we will be able to accept this, we have made a
2531
                 change to the destination of I3.  This requires us to
2532
                 do a few adjustments.  */
2533
 
2534
              PATTERN (i3) = newpat;
2535
              adjust_for_new_dest (i3);
2536
            }
2537
        }
2538
    }
2539
 
2540
  /* If we were combining three insns and the result is a simple SET
2541
     with no ASM_OPERANDS that wasn't recognized, try to split it into two
2542
     insns.  There are two ways to do this.  It can be split using a
2543
     machine-specific method (like when you have an addition of a large
2544
     constant) or by combine in the function find_split_point.  */
2545
 
2546
  if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
2547
      && asm_noperands (newpat) < 0)
2548
    {
2549
      rtx m_split, *split;
2550
 
2551
      /* See if the MD file can split NEWPAT.  If it can't, see if letting it
2552
         use I2DEST as a scratch register will help.  In the latter case,
2553
         convert I2DEST to the mode of the source of NEWPAT if we can.  */
2554
 
2555
      m_split = split_insns (newpat, i3);
2556
 
2557
      /* We can only use I2DEST as a scratch reg if it doesn't overlap any
2558
         inputs of NEWPAT.  */
2559
 
2560
      /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
2561
         possible to try that as a scratch reg.  This would require adding
2562
         more code to make it work though.  */
2563
 
2564
      if (m_split == 0 && ! reg_overlap_mentioned_p (i2dest, newpat))
2565
        {
2566
          enum machine_mode new_mode = GET_MODE (SET_DEST (newpat));
2567
 
2568
          /* First try to split using the original register as a
2569
             scratch register.  */
2570
          m_split = split_insns (gen_rtx_PARALLEL
2571
                                 (VOIDmode,
2572
                                  gen_rtvec (2, newpat,
2573
                                             gen_rtx_CLOBBER (VOIDmode,
2574
                                                              i2dest))),
2575
                                 i3);
2576
 
2577
          /* If that didn't work, try changing the mode of I2DEST if
2578
             we can.  */
2579
          if (m_split == 0
2580
              && new_mode != GET_MODE (i2dest)
2581
              && new_mode != VOIDmode
2582
              && can_change_dest_mode (i2dest, added_sets_2, new_mode))
2583
            {
2584
              enum machine_mode old_mode = GET_MODE (i2dest);
2585
              rtx ni2dest;
2586
 
2587
              if (REGNO (i2dest) < FIRST_PSEUDO_REGISTER)
2588
                ni2dest = gen_rtx_REG (new_mode, REGNO (i2dest));
2589
              else
2590
                {
2591
                  SUBST_MODE (regno_reg_rtx[REGNO (i2dest)], new_mode);
2592
                  ni2dest = regno_reg_rtx[REGNO (i2dest)];
2593
                }
2594
 
2595
              m_split = split_insns (gen_rtx_PARALLEL
2596
                                     (VOIDmode,
2597
                                      gen_rtvec (2, newpat,
2598
                                                 gen_rtx_CLOBBER (VOIDmode,
2599
                                                                  ni2dest))),
2600
                                     i3);
2601
 
2602
              if (m_split == 0
2603
                  && REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
2604
                {
2605
                  struct undo *buf;
2606
 
2607
                  PUT_MODE (regno_reg_rtx[REGNO (i2dest)], old_mode);
2608
                  buf = undobuf.undos;
2609
                  undobuf.undos = buf->next;
2610
                  buf->next = undobuf.frees;
2611
                  undobuf.frees = buf;
2612
                }
2613
            }
2614
        }
2615
 
2616
      /* If recog_for_combine has discarded clobbers, try to use them
2617
         again for the split.  */
2618
      if (m_split == 0 && newpat_vec_with_clobbers)
2619
        m_split
2620
          = split_insns (gen_rtx_PARALLEL (VOIDmode,
2621
                                           newpat_vec_with_clobbers), i3);
2622
 
2623
      if (m_split && NEXT_INSN (m_split) == NULL_RTX)
2624
        {
2625
          m_split = PATTERN (m_split);
2626
          insn_code_number = recog_for_combine (&m_split, i3, &new_i3_notes);
2627
          if (insn_code_number >= 0)
2628
            newpat = m_split;
2629
        }
2630
      else if (m_split && NEXT_INSN (NEXT_INSN (m_split)) == NULL_RTX
2631
               && (next_real_insn (i2) == i3
2632
                   || ! use_crosses_set_p (PATTERN (m_split), INSN_CUID (i2))))
2633
        {
2634
          rtx i2set, i3set;
2635
          rtx newi3pat = PATTERN (NEXT_INSN (m_split));
2636
          newi2pat = PATTERN (m_split);
2637
 
2638
          i3set = single_set (NEXT_INSN (m_split));
2639
          i2set = single_set (m_split);
2640
 
2641
          i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2642
 
2643
          /* If I2 or I3 has multiple SETs, we won't know how to track
2644
             register status, so don't use these insns.  If I2's destination
2645
             is used between I2 and I3, we also can't use these insns.  */
2646
 
2647
          if (i2_code_number >= 0 && i2set && i3set
2648
              && (next_real_insn (i2) == i3
2649
                  || ! reg_used_between_p (SET_DEST (i2set), i2, i3)))
2650
            insn_code_number = recog_for_combine (&newi3pat, i3,
2651
                                                  &new_i3_notes);
2652
          if (insn_code_number >= 0)
2653
            newpat = newi3pat;
2654
 
2655
          /* It is possible that both insns now set the destination of I3.
2656
             If so, we must show an extra use of it.  */
2657
 
2658
          if (insn_code_number >= 0)
2659
            {
2660
              rtx new_i3_dest = SET_DEST (i3set);
2661
              rtx new_i2_dest = SET_DEST (i2set);
2662
 
2663
              while (GET_CODE (new_i3_dest) == ZERO_EXTRACT
2664
                     || GET_CODE (new_i3_dest) == STRICT_LOW_PART
2665
                     || GET_CODE (new_i3_dest) == SUBREG)
2666
                new_i3_dest = XEXP (new_i3_dest, 0);
2667
 
2668
              while (GET_CODE (new_i2_dest) == ZERO_EXTRACT
2669
                     || GET_CODE (new_i2_dest) == STRICT_LOW_PART
2670
                     || GET_CODE (new_i2_dest) == SUBREG)
2671
                new_i2_dest = XEXP (new_i2_dest, 0);
2672
 
2673
              if (REG_P (new_i3_dest)
2674
                  && REG_P (new_i2_dest)
2675
                  && REGNO (new_i3_dest) == REGNO (new_i2_dest))
2676
                REG_N_SETS (REGNO (new_i2_dest))++;
2677
            }
2678
        }
2679
 
2680
      /* If we can split it and use I2DEST, go ahead and see if that
2681
         helps things be recognized.  Verify that none of the registers
2682
         are set between I2 and I3.  */
2683
      if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
2684
#ifdef HAVE_cc0
2685
          && REG_P (i2dest)
2686
#endif
2687
          /* We need I2DEST in the proper mode.  If it is a hard register
2688
             or the only use of a pseudo, we can change its mode.
2689
             Make sure we don't change a hard register to have a mode that
2690
             isn't valid for it, or change the number of registers.  */
2691
          && (GET_MODE (*split) == GET_MODE (i2dest)
2692
              || GET_MODE (*split) == VOIDmode
2693
              || can_change_dest_mode (i2dest, added_sets_2,
2694
                                       GET_MODE (*split)))
2695
          && (next_real_insn (i2) == i3
2696
              || ! use_crosses_set_p (*split, INSN_CUID (i2)))
2697
          /* We can't overwrite I2DEST if its value is still used by
2698
             NEWPAT.  */
2699
          && ! reg_referenced_p (i2dest, newpat))
2700
        {
2701
          rtx newdest = i2dest;
2702
          enum rtx_code split_code = GET_CODE (*split);
2703
          enum machine_mode split_mode = GET_MODE (*split);
2704
          bool subst_done = false;
2705
          newi2pat = NULL_RTX;
2706
 
2707
          /* Get NEWDEST as a register in the proper mode.  We have already
2708
             validated that we can do this.  */
2709
          if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
2710
            {
2711
              if (REGNO (i2dest) < FIRST_PSEUDO_REGISTER)
2712
                newdest = gen_rtx_REG (split_mode, REGNO (i2dest));
2713
              else
2714
                {
2715
                  SUBST_MODE (regno_reg_rtx[REGNO (i2dest)], split_mode);
2716
                  newdest = regno_reg_rtx[REGNO (i2dest)];
2717
                }
2718
            }
2719
 
2720
          /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
2721
             an ASHIFT.  This can occur if it was inside a PLUS and hence
2722
             appeared to be a memory address.  This is a kludge.  */
2723
          if (split_code == MULT
2724
              && GET_CODE (XEXP (*split, 1)) == CONST_INT
2725
              && INTVAL (XEXP (*split, 1)) > 0
2726
              && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
2727
            {
2728
              SUBST (*split, gen_rtx_ASHIFT (split_mode,
2729
                                             XEXP (*split, 0), GEN_INT (i)));
2730
              /* Update split_code because we may not have a multiply
2731
                 anymore.  */
2732
              split_code = GET_CODE (*split);
2733
            }
2734
 
2735
#ifdef INSN_SCHEDULING
2736
          /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
2737
             be written as a ZERO_EXTEND.  */
2738
          if (split_code == SUBREG && MEM_P (SUBREG_REG (*split)))
2739
            {
2740
#ifdef LOAD_EXTEND_OP
2741
              /* Or as a SIGN_EXTEND if LOAD_EXTEND_OP says that that's
2742
                 what it really is.  */
2743
              if (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (*split)))
2744
                  == SIGN_EXTEND)
2745
                SUBST (*split, gen_rtx_SIGN_EXTEND (split_mode,
2746
                                                    SUBREG_REG (*split)));
2747
              else
2748
#endif
2749
                SUBST (*split, gen_rtx_ZERO_EXTEND (split_mode,
2750
                                                    SUBREG_REG (*split)));
2751
            }
2752
#endif
2753
 
2754
          /* Attempt to split binary operators using arithmetic identities.  */
2755
          if (BINARY_P (SET_SRC (newpat))
2756
              && split_mode == GET_MODE (SET_SRC (newpat))
2757
              && ! side_effects_p (SET_SRC (newpat)))
2758
            {
2759
              rtx setsrc = SET_SRC (newpat);
2760
              enum machine_mode mode = GET_MODE (setsrc);
2761
              enum rtx_code code = GET_CODE (setsrc);
2762
              rtx src_op0 = XEXP (setsrc, 0);
2763
              rtx src_op1 = XEXP (setsrc, 1);
2764
 
2765
              /* Split "X = Y op Y" as "Z = Y; X = Z op Z".  */
2766
              if (rtx_equal_p (src_op0, src_op1))
2767
                {
2768
                  newi2pat = gen_rtx_SET (VOIDmode, newdest, src_op0);
2769
                  SUBST (XEXP (setsrc, 0), newdest);
2770
                  SUBST (XEXP (setsrc, 1), newdest);
2771
                  subst_done = true;
2772
                }
2773
              /* Split "((P op Q) op R) op S" where op is PLUS or MULT.  */
2774
              else if ((code == PLUS || code == MULT)
2775
                       && GET_CODE (src_op0) == code
2776
                       && GET_CODE (XEXP (src_op0, 0)) == code
2777
                       && (INTEGRAL_MODE_P (mode)
2778
                           || (FLOAT_MODE_P (mode)
2779
                               && flag_unsafe_math_optimizations)))
2780
                {
2781
                  rtx p = XEXP (XEXP (src_op0, 0), 0);
2782
                  rtx q = XEXP (XEXP (src_op0, 0), 1);
2783
                  rtx r = XEXP (src_op0, 1);
2784
                  rtx s = src_op1;
2785
 
2786
                  /* Split both "((X op Y) op X) op Y" and
2787
                     "((X op Y) op Y) op X" as "T op T" where T is
2788
                     "X op Y".  */
2789
                  if ((rtx_equal_p (p,r) && rtx_equal_p (q,s))
2790
                       || (rtx_equal_p (p,s) && rtx_equal_p (q,r)))
2791
                    {
2792
                      newi2pat = gen_rtx_SET (VOIDmode, newdest,
2793
                                              XEXP (src_op0, 0));
2794
                      SUBST (XEXP (setsrc, 0), newdest);
2795
                      SUBST (XEXP (setsrc, 1), newdest);
2796
                      subst_done = true;
2797
                    }
2798
                  /* Split "((X op X) op Y) op Y)" as "T op T" where
2799
                     T is "X op Y".  */
2800
                  else if (rtx_equal_p (p,q) && rtx_equal_p (r,s))
2801
                    {
2802
                      rtx tmp = simplify_gen_binary (code, mode, p, r);
2803
                      newi2pat = gen_rtx_SET (VOIDmode, newdest, tmp);
2804
                      SUBST (XEXP (setsrc, 0), newdest);
2805
                      SUBST (XEXP (setsrc, 1), newdest);
2806
                      subst_done = true;
2807
                    }
2808
                }
2809
            }
2810
 
2811
          if (!subst_done)
2812
            {
2813
              newi2pat = gen_rtx_SET (VOIDmode, newdest, *split);
2814
              SUBST (*split, newdest);
2815
            }
2816
 
2817
          i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2818
 
2819
          /* recog_for_combine might have added CLOBBERs to newi2pat.
2820
             Make sure NEWPAT does not depend on the clobbered regs.  */
2821
          if (GET_CODE (newi2pat) == PARALLEL)
2822
            for (i = XVECLEN (newi2pat, 0) - 1; i >= 0; i--)
2823
              if (GET_CODE (XVECEXP (newi2pat, 0, i)) == CLOBBER)
2824
                {
2825
                  rtx reg = XEXP (XVECEXP (newi2pat, 0, i), 0);
2826
                  if (reg_overlap_mentioned_p (reg, newpat))
2827
                    {
2828
                      undo_all ();
2829
                      return 0;
2830
                    }
2831
                }
2832
 
2833
          /* If the split point was a MULT and we didn't have one before,
2834
             don't use one now.  */
2835
          if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
2836
            insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2837
        }
2838
    }
2839
 
2840
  /* Check for a case where we loaded from memory in a narrow mode and
2841
     then sign extended it, but we need both registers.  In that case,
2842
     we have a PARALLEL with both loads from the same memory location.
2843
     We can split this into a load from memory followed by a register-register
2844
     copy.  This saves at least one insn, more if register allocation can
2845
     eliminate the copy.
2846
 
2847
     We cannot do this if the destination of the first assignment is a
2848
     condition code register or cc0.  We eliminate this case by making sure
2849
     the SET_DEST and SET_SRC have the same mode.
2850
 
2851
     We cannot do this if the destination of the second assignment is
2852
     a register that we have already assumed is zero-extended.  Similarly
2853
     for a SUBREG of such a register.  */
2854
 
2855
  else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2856
           && GET_CODE (newpat) == PARALLEL
2857
           && XVECLEN (newpat, 0) == 2
2858
           && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2859
           && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
2860
           && (GET_MODE (SET_DEST (XVECEXP (newpat, 0, 0)))
2861
               == GET_MODE (SET_SRC (XVECEXP (newpat, 0, 0))))
2862
           && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2863
           && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2864
                           XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
2865
           && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2866
                                   INSN_CUID (i2))
2867
           && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2868
           && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2869
           && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
2870
                 (REG_P (temp)
2871
                  && reg_stat[REGNO (temp)].nonzero_bits != 0
2872
                  && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2873
                  && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2874
                  && (reg_stat[REGNO (temp)].nonzero_bits
2875
                      != GET_MODE_MASK (word_mode))))
2876
           && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
2877
                 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
2878
                     (REG_P (temp)
2879
                      && reg_stat[REGNO (temp)].nonzero_bits != 0
2880
                      && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2881
                      && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2882
                      && (reg_stat[REGNO (temp)].nonzero_bits
2883
                          != GET_MODE_MASK (word_mode)))))
2884
           && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2885
                                         SET_SRC (XVECEXP (newpat, 0, 1)))
2886
           && ! find_reg_note (i3, REG_UNUSED,
2887
                               SET_DEST (XVECEXP (newpat, 0, 0))))
2888
    {
2889
      rtx ni2dest;
2890
 
2891
      newi2pat = XVECEXP (newpat, 0, 0);
2892
      ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
2893
      newpat = XVECEXP (newpat, 0, 1);
2894
      SUBST (SET_SRC (newpat),
2895
             gen_lowpart (GET_MODE (SET_SRC (newpat)), ni2dest));
2896
      i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2897
 
2898
      if (i2_code_number >= 0)
2899
        insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2900
 
2901
      if (insn_code_number >= 0)
2902
        swap_i2i3 = 1;
2903
    }
2904
 
2905
  /* Similarly, check for a case where we have a PARALLEL of two independent
2906
     SETs but we started with three insns.  In this case, we can do the sets
2907
     as two separate insns.  This case occurs when some SET allows two
2908
     other insns to combine, but the destination of that SET is still live.  */
2909
 
2910
  else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2911
           && GET_CODE (newpat) == PARALLEL
2912
           && XVECLEN (newpat, 0) == 2
2913
           && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2914
           && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
2915
           && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
2916
           && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2917
           && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2918
           && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2919
           && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2920
                                   INSN_CUID (i2))
2921
           && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2922
                                  XVECEXP (newpat, 0, 0))
2923
           && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
2924
                                  XVECEXP (newpat, 0, 1))
2925
           && ! (contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 0)))
2926
                 && contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 1))))
2927
#ifdef HAVE_cc0
2928
           /* We cannot split the parallel into two sets if both sets
2929
              reference cc0.  */
2930
           && ! (reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0))
2931
                 && reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 1)))
2932
#endif
2933
           )
2934
    {
2935
      /* Normally, it doesn't matter which of the two is done first,
2936
         but it does if one references cc0.  In that case, it has to
2937
         be first.  */
2938
#ifdef HAVE_cc0
2939
      if (reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0)))
2940
        {
2941
          newi2pat = XVECEXP (newpat, 0, 0);
2942
          newpat = XVECEXP (newpat, 0, 1);
2943
        }
2944
      else
2945
#endif
2946
        {
2947
          newi2pat = XVECEXP (newpat, 0, 1);
2948
          newpat = XVECEXP (newpat, 0, 0);
2949
        }
2950
 
2951
      i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
2952
 
2953
      if (i2_code_number >= 0)
2954
        insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
2955
    }
2956
 
2957
  /* If it still isn't recognized, fail and change things back the way they
2958
     were.  */
2959
  if ((insn_code_number < 0
2960
       /* Is the result a reasonable ASM_OPERANDS?  */
2961
       && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
2962
    {
2963
      undo_all ();
2964
      return 0;
2965
    }
2966
 
2967
  /* If we had to change another insn, make sure it is valid also.  */
2968
  if (undobuf.other_insn)
2969
    {
2970
      rtx other_pat = PATTERN (undobuf.other_insn);
2971
      rtx new_other_notes;
2972
      rtx note, next;
2973
 
2974
      CLEAR_HARD_REG_SET (newpat_used_regs);
2975
 
2976
      other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
2977
                                             &new_other_notes);
2978
 
2979
      if (other_code_number < 0 && ! check_asm_operands (other_pat))
2980
        {
2981
          undo_all ();
2982
          return 0;
2983
        }
2984
 
2985
      PATTERN (undobuf.other_insn) = other_pat;
2986
 
2987
      /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
2988
         are still valid.  Then add any non-duplicate notes added by
2989
         recog_for_combine.  */
2990
      for (note = REG_NOTES (undobuf.other_insn); note; note = next)
2991
        {
2992
          next = XEXP (note, 1);
2993
 
2994
          if (REG_NOTE_KIND (note) == REG_UNUSED
2995
              && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
2996
            {
2997
              if (REG_P (XEXP (note, 0)))
2998
                REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
2999
 
3000
              remove_note (undobuf.other_insn, note);
3001
            }
3002
        }
3003
 
3004
      for (note = new_other_notes; note; note = XEXP (note, 1))
3005
        if (REG_P (XEXP (note, 0)))
3006
          REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
3007
 
3008
      distribute_notes (new_other_notes, undobuf.other_insn,
3009
                        undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
3010
    }
3011
#ifdef HAVE_cc0
3012
  /* If I2 is the CC0 setter and I3 is the CC0 user then check whether
3013
     they are adjacent to each other or not.  */
3014
  {
3015
    rtx p = prev_nonnote_insn (i3);
3016
    if (p && p != i2 && NONJUMP_INSN_P (p) && newi2pat
3017
        && sets_cc0_p (newi2pat))
3018
      {
3019
        undo_all ();
3020
        return 0;
3021
      }
3022
  }
3023
#endif
3024
 
3025
  /* Only allow this combination if insn_rtx_costs reports that the
3026
     replacement instructions are cheaper than the originals.  */
3027
  if (!combine_validate_cost (i1, i2, i3, newpat, newi2pat))
3028
    {
3029
      undo_all ();
3030
      return 0;
3031
    }
3032
 
3033
  /* We now know that we can do this combination.  Merge the insns and
3034
     update the status of registers and LOG_LINKS.  */
3035
 
3036
  if (swap_i2i3)
3037
    {
3038
      rtx insn;
3039
      rtx link;
3040
      rtx ni2dest;
3041
 
3042
      /* I3 now uses what used to be its destination and which is now
3043
         I2's destination.  This requires us to do a few adjustments.  */
3044
      PATTERN (i3) = newpat;
3045
      adjust_for_new_dest (i3);
3046
 
3047
      /* We need a LOG_LINK from I3 to I2.  But we used to have one,
3048
         so we still will.
3049
 
3050
         However, some later insn might be using I2's dest and have
3051
         a LOG_LINK pointing at I3.  We must remove this link.
3052
         The simplest way to remove the link is to point it at I1,
3053
         which we know will be a NOTE.  */
3054
 
3055
      /* newi2pat is usually a SET here; however, recog_for_combine might
3056
         have added some clobbers.  */
3057
      if (GET_CODE (newi2pat) == PARALLEL)
3058
        ni2dest = SET_DEST (XVECEXP (newi2pat, 0, 0));
3059
      else
3060
        ni2dest = SET_DEST (newi2pat);
3061
 
3062
      for (insn = NEXT_INSN (i3);
3063
           insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR
3064
                    || insn != BB_HEAD (this_basic_block->next_bb));
3065
           insn = NEXT_INSN (insn))
3066
        {
3067
          if (INSN_P (insn) && reg_referenced_p (ni2dest, PATTERN (insn)))
3068
            {
3069
              for (link = LOG_LINKS (insn); link;
3070
                   link = XEXP (link, 1))
3071
                if (XEXP (link, 0) == i3)
3072
                  XEXP (link, 0) = i1;
3073
 
3074
              break;
3075
            }
3076
        }
3077
    }
3078
 
3079
  {
3080
    rtx i3notes, i2notes, i1notes = 0;
3081
    rtx i3links, i2links, i1links = 0;
3082
    rtx midnotes = 0;
3083
    unsigned int regno;
3084
    /* Compute which registers we expect to eliminate.  newi2pat may be setting
3085
       either i3dest or i2dest, so we must check it.  Also, i1dest may be the
3086
       same as i3dest, in which case newi2pat may be setting i1dest.  */
3087
    rtx elim_i2 = ((newi2pat && reg_set_p (i2dest, newi2pat))
3088
                   || i2dest_in_i2src || i2dest_in_i1src
3089
                   || !i2dest_killed
3090
                   ? 0 : i2dest);
3091
    rtx elim_i1 = (i1 == 0 || i1dest_in_i1src
3092
                   || (newi2pat && reg_set_p (i1dest, newi2pat))
3093
                   || !i1dest_killed
3094
                   ? 0 : i1dest);
3095
 
3096
    /* Get the old REG_NOTES and LOG_LINKS from all our insns and
3097
       clear them.  */
3098
    i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
3099
    i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
3100
    if (i1)
3101
      i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
3102
 
3103
    /* Ensure that we do not have something that should not be shared but
3104
       occurs multiple times in the new insns.  Check this by first
3105
       resetting all the `used' flags and then copying anything is shared.  */
3106
 
3107
    reset_used_flags (i3notes);
3108
    reset_used_flags (i2notes);
3109
    reset_used_flags (i1notes);
3110
    reset_used_flags (newpat);
3111
    reset_used_flags (newi2pat);
3112
    if (undobuf.other_insn)
3113
      reset_used_flags (PATTERN (undobuf.other_insn));
3114
 
3115
    i3notes = copy_rtx_if_shared (i3notes);
3116
    i2notes = copy_rtx_if_shared (i2notes);
3117
    i1notes = copy_rtx_if_shared (i1notes);
3118
    newpat = copy_rtx_if_shared (newpat);
3119
    newi2pat = copy_rtx_if_shared (newi2pat);
3120
    if (undobuf.other_insn)
3121
      reset_used_flags (PATTERN (undobuf.other_insn));
3122
 
3123
    INSN_CODE (i3) = insn_code_number;
3124
    PATTERN (i3) = newpat;
3125
 
3126
    if (CALL_P (i3) && CALL_INSN_FUNCTION_USAGE (i3))
3127
      {
3128
        rtx call_usage = CALL_INSN_FUNCTION_USAGE (i3);
3129
 
3130
        reset_used_flags (call_usage);
3131
        call_usage = copy_rtx (call_usage);
3132
 
3133
        if (substed_i2)
3134
          replace_rtx (call_usage, i2dest, i2src);
3135
 
3136
        if (substed_i1)
3137
          replace_rtx (call_usage, i1dest, i1src);
3138
 
3139
        CALL_INSN_FUNCTION_USAGE (i3) = call_usage;
3140
      }
3141
 
3142
    if (undobuf.other_insn)
3143
      INSN_CODE (undobuf.other_insn) = other_code_number;
3144
 
3145
    /* We had one special case above where I2 had more than one set and
3146
       we replaced a destination of one of those sets with the destination
3147
       of I3.  In that case, we have to update LOG_LINKS of insns later
3148
       in this basic block.  Note that this (expensive) case is rare.
3149
 
3150
       Also, in this case, we must pretend that all REG_NOTEs for I2
3151
       actually came from I3, so that REG_UNUSED notes from I2 will be
3152
       properly handled.  */
3153
 
3154
    if (i3_subst_into_i2)
3155
      {
3156
        for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
3157
          if ((GET_CODE (XVECEXP (PATTERN (i2), 0, i)) == SET
3158
               || GET_CODE (XVECEXP (PATTERN (i2), 0, i)) == CLOBBER)
3159
              && REG_P (SET_DEST (XVECEXP (PATTERN (i2), 0, i)))
3160
              && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
3161
              && ! find_reg_note (i2, REG_UNUSED,
3162
                                  SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
3163
            for (temp = NEXT_INSN (i2);
3164
                 temp && (this_basic_block->next_bb == EXIT_BLOCK_PTR
3165
                          || BB_HEAD (this_basic_block) != temp);
3166
                 temp = NEXT_INSN (temp))
3167
              if (temp != i3 && INSN_P (temp))
3168
                for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
3169
                  if (XEXP (link, 0) == i2)
3170
                    XEXP (link, 0) = i3;
3171
 
3172
        if (i3notes)
3173
          {
3174
            rtx link = i3notes;
3175
            while (XEXP (link, 1))
3176
              link = XEXP (link, 1);
3177
            XEXP (link, 1) = i2notes;
3178
          }
3179
        else
3180
          i3notes = i2notes;
3181
        i2notes = 0;
3182
      }
3183
 
3184
    LOG_LINKS (i3) = 0;
3185
    REG_NOTES (i3) = 0;
3186
    LOG_LINKS (i2) = 0;
3187
    REG_NOTES (i2) = 0;
3188
 
3189
    if (newi2pat)
3190
      {
3191
        INSN_CODE (i2) = i2_code_number;
3192
        PATTERN (i2) = newi2pat;
3193
      }
3194
    else
3195
      SET_INSN_DELETED (i2);
3196
 
3197
    if (i1)
3198
      {
3199
        LOG_LINKS (i1) = 0;
3200
        REG_NOTES (i1) = 0;
3201
        SET_INSN_DELETED (i1);
3202
      }
3203
 
3204
    /* Get death notes for everything that is now used in either I3 or
3205
       I2 and used to die in a previous insn.  If we built two new
3206
       patterns, move from I1 to I2 then I2 to I3 so that we get the
3207
       proper movement on registers that I2 modifies.  */
3208
 
3209
    if (newi2pat)
3210
      {
3211
        move_deaths (newi2pat, NULL_RTX, INSN_CUID (i1), i2, &midnotes);
3212
        move_deaths (newpat, newi2pat, INSN_CUID (i1), i3, &midnotes);
3213
      }
3214
    else
3215
      move_deaths (newpat, NULL_RTX, i1 ? INSN_CUID (i1) : INSN_CUID (i2),
3216
                   i3, &midnotes);
3217
 
3218
    /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3.  */
3219
    if (i3notes)
3220
      distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
3221
                        elim_i2, elim_i1);
3222
    if (i2notes)
3223
      distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
3224
                        elim_i2, elim_i1);
3225
    if (i1notes)
3226
      distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
3227
                        elim_i2, elim_i1);
3228
    if (midnotes)
3229
      distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
3230
                        elim_i2, elim_i1);
3231
 
3232
    /* Distribute any notes added to I2 or I3 by recog_for_combine.  We
3233
       know these are REG_UNUSED and want them to go to the desired insn,
3234
       so we always pass it as i3.  We have not counted the notes in
3235
       reg_n_deaths yet, so we need to do so now.  */
3236
 
3237
    if (newi2pat && new_i2_notes)
3238
      {
3239
        for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
3240
          if (REG_P (XEXP (temp, 0)))
3241
            REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
3242
 
3243
        distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
3244
      }
3245
 
3246
    if (new_i3_notes)
3247
      {
3248
        for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
3249
          if (REG_P (XEXP (temp, 0)))
3250
            REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
3251
 
3252
        distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
3253
      }
3254
 
3255
    /* If I3DEST was used in I3SRC, it really died in I3.  We may need to
3256
       put a REG_DEAD note for it somewhere.  If NEWI2PAT exists and sets
3257
       I3DEST, the death must be somewhere before I2, not I3.  If we passed I3
3258
       in that case, it might delete I2.  Similarly for I2 and I1.
3259
       Show an additional death due to the REG_DEAD note we make here.  If
3260
       we discard it in distribute_notes, we will decrement it again.  */
3261
 
3262
    if (i3dest_killed)
3263
      {
3264
        if (REG_P (i3dest_killed))
3265
          REG_N_DEATHS (REGNO (i3dest_killed))++;
3266
 
3267
        if (newi2pat && reg_set_p (i3dest_killed, newi2pat))
3268
          distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
3269
                                               NULL_RTX),
3270
                            NULL_RTX, i2, NULL_RTX, elim_i2, elim_i1);
3271
        else
3272
          distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
3273
                                               NULL_RTX),
3274
                            NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
3275
                            elim_i2, elim_i1);
3276
      }
3277
 
3278
    if (i2dest_in_i2src)
3279
      {
3280
        if (REG_P (i2dest))
3281
          REG_N_DEATHS (REGNO (i2dest))++;
3282
 
3283
        if (newi2pat && reg_set_p (i2dest, newi2pat))
3284
          distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
3285
                            NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
3286
        else
3287
          distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
3288
                            NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
3289
                            NULL_RTX, NULL_RTX);
3290
      }
3291
 
3292
    if (i1dest_in_i1src)
3293
      {
3294
        if (REG_P (i1dest))
3295
          REG_N_DEATHS (REGNO (i1dest))++;
3296
 
3297
        if (newi2pat && reg_set_p (i1dest, newi2pat))
3298
          distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
3299
                            NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
3300
        else
3301
          distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
3302
                            NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
3303
                            NULL_RTX, NULL_RTX);
3304
      }
3305
 
3306
    distribute_links (i3links);
3307
    distribute_links (i2links);
3308
    distribute_links (i1links);
3309
 
3310
    if (REG_P (i2dest))
3311
      {
3312
        rtx link;
3313
        rtx i2_insn = 0, i2_val = 0, set;
3314
 
3315
        /* The insn that used to set this register doesn't exist, and
3316
           this life of the register may not exist either.  See if one of
3317
           I3's links points to an insn that sets I2DEST.  If it does,
3318
           that is now the last known value for I2DEST. If we don't update
3319
           this and I2 set the register to a value that depended on its old
3320
           contents, we will get confused.  If this insn is used, thing
3321
           will be set correctly in combine_instructions.  */
3322
 
3323
        for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
3324
          if ((set = single_set (XEXP (link, 0))) != 0
3325
              && rtx_equal_p (i2dest, SET_DEST (set)))
3326
            i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
3327
 
3328
        record_value_for_reg (i2dest, i2_insn, i2_val);
3329
 
3330
        /* If the reg formerly set in I2 died only once and that was in I3,
3331
           zero its use count so it won't make `reload' do any work.  */
3332
        if (! added_sets_2
3333
            && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat))
3334
            && ! i2dest_in_i2src)
3335
          {
3336
            regno = REGNO (i2dest);
3337
            REG_N_SETS (regno)--;
3338
          }
3339
      }
3340
 
3341
    if (i1 && REG_P (i1dest))
3342
      {
3343
        rtx link;
3344
        rtx i1_insn = 0, i1_val = 0, set;
3345
 
3346
        for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
3347
          if ((set = single_set (XEXP (link, 0))) != 0
3348
              && rtx_equal_p (i1dest, SET_DEST (set)))
3349
            i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
3350
 
3351
        record_value_for_reg (i1dest, i1_insn, i1_val);
3352
 
3353
        regno = REGNO (i1dest);
3354
        if (! added_sets_1 && ! i1dest_in_i1src)
3355
          REG_N_SETS (regno)--;
3356
      }
3357
 
3358
    /* Update reg_stat[].nonzero_bits et al for any changes that may have
3359
       been made to this insn.  The order of
3360
       set_nonzero_bits_and_sign_copies() is important.  Because newi2pat
3361
       can affect nonzero_bits of newpat */
3362
    if (newi2pat)
3363
      note_stores (newi2pat, set_nonzero_bits_and_sign_copies, NULL);
3364
    note_stores (newpat, set_nonzero_bits_and_sign_copies, NULL);
3365
 
3366
    /* Set new_direct_jump_p if a new return or simple jump instruction
3367
       has been created.
3368
 
3369
       If I3 is now an unconditional jump, ensure that it has a
3370
       BARRIER following it since it may have initially been a
3371
       conditional jump.  It may also be the last nonnote insn.  */
3372
 
3373
    if (returnjump_p (i3) || any_uncondjump_p (i3))
3374
      {
3375
        *new_direct_jump_p = 1;
3376
        mark_jump_label (PATTERN (i3), i3, 0);
3377
 
3378
        if ((temp = next_nonnote_insn (i3)) == NULL_RTX
3379
            || !BARRIER_P (temp))
3380
          emit_barrier_after (i3);
3381
      }
3382
 
3383
    if (undobuf.other_insn != NULL_RTX
3384
        && (returnjump_p (undobuf.other_insn)
3385
            || any_uncondjump_p (undobuf.other_insn)))
3386
      {
3387
        *new_direct_jump_p = 1;
3388
 
3389
        if ((temp = next_nonnote_insn (undobuf.other_insn)) == NULL_RTX
3390
            || !BARRIER_P (temp))
3391
          emit_barrier_after (undobuf.other_insn);
3392
      }
3393
 
3394
    /* An NOOP jump does not need barrier, but it does need cleaning up
3395
       of CFG.  */
3396
    if (GET_CODE (newpat) == SET
3397
        && SET_SRC (newpat) == pc_rtx
3398
        && SET_DEST (newpat) == pc_rtx)
3399
      *new_direct_jump_p = 1;
3400
  }
3401
 
3402
  combine_successes++;
3403
  undo_commit ();
3404
 
3405
  if (added_links_insn
3406
      && (newi2pat == 0 || INSN_CUID (added_links_insn) < INSN_CUID (i2))
3407
      && INSN_CUID (added_links_insn) < INSN_CUID (i3))
3408
    return added_links_insn;
3409
  else
3410
    return newi2pat ? i2 : i3;
3411
}
3412
 
3413
/* Undo all the modifications recorded in undobuf.  */
3414
 
3415
static void
3416
undo_all (void)
3417
{
3418
  struct undo *undo, *next;
3419
 
3420
  for (undo = undobuf.undos; undo; undo = next)
3421
    {
3422
      next = undo->next;
3423
      switch (undo->kind)
3424
        {
3425
        case UNDO_RTX:
3426
          *undo->where.r = undo->old_contents.r;
3427
          break;
3428
        case UNDO_INT:
3429
          *undo->where.i = undo->old_contents.i;
3430
          break;
3431
        case UNDO_MODE:
3432
          PUT_MODE (*undo->where.r, undo->old_contents.m);
3433
          break;
3434
        default:
3435
          gcc_unreachable ();
3436
        }
3437
 
3438
      undo->next = undobuf.frees;
3439
      undobuf.frees = undo;
3440
    }
3441
 
3442
  undobuf.undos = 0;
3443
}
3444
 
3445
/* We've committed to accepting the changes we made.  Move all
3446
   of the undos to the free list.  */
3447
 
3448
static void
3449
undo_commit (void)
3450
{
3451
  struct undo *undo, *next;
3452
 
3453
  for (undo = undobuf.undos; undo; undo = next)
3454
    {
3455
      next = undo->next;
3456
      undo->next = undobuf.frees;
3457
      undobuf.frees = undo;
3458
    }
3459
  undobuf.undos = 0;
3460
}
3461
 
3462
/* Find the innermost point within the rtx at LOC, possibly LOC itself,
3463
   where we have an arithmetic expression and return that point.  LOC will
3464
   be inside INSN.
3465
 
3466
   try_combine will call this function to see if an insn can be split into
3467
   two insns.  */
3468
 
3469
static rtx *
3470
find_split_point (rtx *loc, rtx insn)
3471
{
3472
  rtx x = *loc;
3473
  enum rtx_code code = GET_CODE (x);
3474
  rtx *split;
3475
  unsigned HOST_WIDE_INT len = 0;
3476
  HOST_WIDE_INT pos = 0;
3477
  int unsignedp = 0;
3478
  rtx inner = NULL_RTX;
3479
 
3480
  /* First special-case some codes.  */
3481
  switch (code)
3482
    {
3483
    case SUBREG:
3484
#ifdef INSN_SCHEDULING
3485
      /* If we are making a paradoxical SUBREG invalid, it becomes a split
3486
         point.  */
3487
      if (MEM_P (SUBREG_REG (x)))
3488
        return loc;
3489
#endif
3490
      return find_split_point (&SUBREG_REG (x), insn);
3491
 
3492
    case MEM:
3493
#ifdef HAVE_lo_sum
3494
      /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
3495
         using LO_SUM and HIGH.  */
3496
      if (GET_CODE (XEXP (x, 0)) == CONST
3497
          || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
3498
        {
3499
          SUBST (XEXP (x, 0),
3500
                 gen_rtx_LO_SUM (Pmode,
3501
                                 gen_rtx_HIGH (Pmode, XEXP (x, 0)),
3502
                                 XEXP (x, 0)));
3503
          return &XEXP (XEXP (x, 0), 0);
3504
        }
3505
#endif
3506
 
3507
      /* If we have a PLUS whose second operand is a constant and the
3508
         address is not valid, perhaps will can split it up using
3509
         the machine-specific way to split large constants.  We use
3510
         the first pseudo-reg (one of the virtual regs) as a placeholder;
3511
         it will not remain in the result.  */
3512
      if (GET_CODE (XEXP (x, 0)) == PLUS
3513
          && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3514
          && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
3515
        {
3516
          rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
3517
          rtx seq = split_insns (gen_rtx_SET (VOIDmode, reg, XEXP (x, 0)),
3518
                                 subst_insn);
3519
 
3520
          /* This should have produced two insns, each of which sets our
3521
             placeholder.  If the source of the second is a valid address,
3522
             we can make put both sources together and make a split point
3523
             in the middle.  */
3524
 
3525
          if (seq
3526
              && NEXT_INSN (seq) != NULL_RTX
3527
              && NEXT_INSN (NEXT_INSN (seq)) == NULL_RTX
3528
              && NONJUMP_INSN_P (seq)
3529
              && GET_CODE (PATTERN (seq)) == SET
3530
              && SET_DEST (PATTERN (seq)) == reg
3531
              && ! reg_mentioned_p (reg,
3532
                                    SET_SRC (PATTERN (seq)))
3533
              && NONJUMP_INSN_P (NEXT_INSN (seq))
3534
              && GET_CODE (PATTERN (NEXT_INSN (seq))) == SET
3535
              && SET_DEST (PATTERN (NEXT_INSN (seq))) == reg
3536
              && memory_address_p (GET_MODE (x),
3537
                                   SET_SRC (PATTERN (NEXT_INSN (seq)))))
3538
            {
3539
              rtx src1 = SET_SRC (PATTERN (seq));
3540
              rtx src2 = SET_SRC (PATTERN (NEXT_INSN (seq)));
3541
 
3542
              /* Replace the placeholder in SRC2 with SRC1.  If we can
3543
                 find where in SRC2 it was placed, that can become our
3544
                 split point and we can replace this address with SRC2.
3545
                 Just try two obvious places.  */
3546
 
3547
              src2 = replace_rtx (src2, reg, src1);
3548
              split = 0;
3549
              if (XEXP (src2, 0) == src1)
3550
                split = &XEXP (src2, 0);
3551
              else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
3552
                       && XEXP (XEXP (src2, 0), 0) == src1)
3553
                split = &XEXP (XEXP (src2, 0), 0);
3554
 
3555
              if (split)
3556
                {
3557
                  SUBST (XEXP (x, 0), src2);
3558
                  return split;
3559
                }
3560
            }
3561
 
3562
          /* If that didn't work, perhaps the first operand is complex and
3563
             needs to be computed separately, so make a split point there.
3564
             This will occur on machines that just support REG + CONST
3565
             and have a constant moved through some previous computation.  */
3566
 
3567
          else if (!OBJECT_P (XEXP (XEXP (x, 0), 0))
3568
                   && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
3569
                         && OBJECT_P (SUBREG_REG (XEXP (XEXP (x, 0), 0)))))
3570
            return &XEXP (XEXP (x, 0), 0);
3571
        }
3572
      break;
3573
 
3574
    case SET:
3575
#ifdef HAVE_cc0
3576
      /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
3577
         ZERO_EXTRACT, the most likely reason why this doesn't match is that
3578
         we need to put the operand into a register.  So split at that
3579
         point.  */
3580
 
3581
      if (SET_DEST (x) == cc0_rtx
3582
          && GET_CODE (SET_SRC (x)) != COMPARE
3583
          && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
3584
          && !OBJECT_P (SET_SRC (x))
3585
          && ! (GET_CODE (SET_SRC (x)) == SUBREG
3586
                && OBJECT_P (SUBREG_REG (SET_SRC (x)))))
3587
        return &SET_SRC (x);
3588
#endif
3589
 
3590
      /* See if we can split SET_SRC as it stands.  */
3591
      split = find_split_point (&SET_SRC (x), insn);
3592
      if (split && split != &SET_SRC (x))
3593
        return split;
3594
 
3595
      /* See if we can split SET_DEST as it stands.  */
3596
      split = find_split_point (&SET_DEST (x), insn);
3597
      if (split && split != &SET_DEST (x))
3598
        return split;
3599
 
3600
      /* See if this is a bitfield assignment with everything constant.  If
3601
         so, this is an IOR of an AND, so split it into that.  */
3602
      if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
3603
          && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
3604
              <= HOST_BITS_PER_WIDE_INT)
3605
          && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
3606
          && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
3607
          && GET_CODE (SET_SRC (x)) == CONST_INT
3608
          && ((INTVAL (XEXP (SET_DEST (x), 1))
3609
               + INTVAL (XEXP (SET_DEST (x), 2)))
3610
              <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
3611
          && ! side_effects_p (XEXP (SET_DEST (x), 0)))
3612
        {
3613
          HOST_WIDE_INT pos = INTVAL (XEXP (SET_DEST (x), 2));
3614
          unsigned HOST_WIDE_INT len = INTVAL (XEXP (SET_DEST (x), 1));
3615
          unsigned HOST_WIDE_INT src = INTVAL (SET_SRC (x));
3616
          rtx dest = XEXP (SET_DEST (x), 0);
3617
          enum machine_mode mode = GET_MODE (dest);
3618
          unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
3619
          rtx or_mask;
3620
 
3621
          if (BITS_BIG_ENDIAN)
3622
            pos = GET_MODE_BITSIZE (mode) - len - pos;
3623
 
3624
          or_mask = gen_int_mode (src << pos, mode);
3625
          if (src == mask)
3626
            SUBST (SET_SRC (x),
3627
                   simplify_gen_binary (IOR, mode, dest, or_mask));
3628
          else
3629
            {
3630
              rtx negmask = gen_int_mode (~(mask << pos), mode);
3631
              SUBST (SET_SRC (x),
3632
                     simplify_gen_binary (IOR, mode,
3633
                                          simplify_gen_binary (AND, mode,
3634
                                                               dest, negmask),
3635
                                          or_mask));
3636
            }
3637
 
3638
          SUBST (SET_DEST (x), dest);
3639
 
3640
          split = find_split_point (&SET_SRC (x), insn);
3641
          if (split && split != &SET_SRC (x))
3642
            return split;
3643
        }
3644
 
3645
      /* Otherwise, see if this is an operation that we can split into two.
3646
         If so, try to split that.  */
3647
      code = GET_CODE (SET_SRC (x));
3648
 
3649
      switch (code)
3650
        {
3651
        case AND:
3652
          /* If we are AND'ing with a large constant that is only a single
3653
             bit and the result is only being used in a context where we
3654
             need to know if it is zero or nonzero, replace it with a bit
3655
             extraction.  This will avoid the large constant, which might
3656
             have taken more than one insn to make.  If the constant were
3657
             not a valid argument to the AND but took only one insn to make,
3658
             this is no worse, but if it took more than one insn, it will
3659
             be better.  */
3660
 
3661
          if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3662
              && REG_P (XEXP (SET_SRC (x), 0))
3663
              && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
3664
              && REG_P (SET_DEST (x))
3665
              && (split = find_single_use (SET_DEST (x), insn, (rtx*) 0)) != 0
3666
              && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
3667
              && XEXP (*split, 0) == SET_DEST (x)
3668
              && XEXP (*split, 1) == const0_rtx)
3669
            {
3670
              rtx extraction = make_extraction (GET_MODE (SET_DEST (x)),
3671
                                                XEXP (SET_SRC (x), 0),
3672
                                                pos, NULL_RTX, 1, 1, 0, 0);
3673
              if (extraction != 0)
3674
                {
3675
                  SUBST (SET_SRC (x), extraction);
3676
                  return find_split_point (loc, insn);
3677
                }
3678
            }
3679
          break;
3680
 
3681
        case NE:
3682
          /* If STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X
3683
             is known to be on, this can be converted into a NEG of a shift.  */
3684
          if (STORE_FLAG_VALUE == -1 && XEXP (SET_SRC (x), 1) == const0_rtx
3685
              && GET_MODE (SET_SRC (x)) == GET_MODE (XEXP (SET_SRC (x), 0))
3686
              && 1 <= (pos = exact_log2
3687
                       (nonzero_bits (XEXP (SET_SRC (x), 0),
3688
                                      GET_MODE (XEXP (SET_SRC (x), 0))))))
3689
            {
3690
              enum machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0));
3691
 
3692
              SUBST (SET_SRC (x),
3693
                     gen_rtx_NEG (mode,
3694
                                  gen_rtx_LSHIFTRT (mode,
3695
                                                    XEXP (SET_SRC (x), 0),
3696
                                                    GEN_INT (pos))));
3697
 
3698
              split = find_split_point (&SET_SRC (x), insn);
3699
              if (split && split != &SET_SRC (x))
3700
                return split;
3701
            }
3702
          break;
3703
 
3704
        case SIGN_EXTEND:
3705
          inner = XEXP (SET_SRC (x), 0);
3706
 
3707
          /* We can't optimize if either mode is a partial integer
3708
             mode as we don't know how many bits are significant
3709
             in those modes.  */
3710
          if (GET_MODE_CLASS (GET_MODE (inner)) == MODE_PARTIAL_INT
3711
              || GET_MODE_CLASS (GET_MODE (SET_SRC (x))) == MODE_PARTIAL_INT)
3712
            break;
3713
 
3714
          pos = 0;
3715
          len = GET_MODE_BITSIZE (GET_MODE (inner));
3716
          unsignedp = 0;
3717
          break;
3718
 
3719
        case SIGN_EXTRACT:
3720
        case ZERO_EXTRACT:
3721
          if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3722
              && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
3723
            {
3724
              inner = XEXP (SET_SRC (x), 0);
3725
              len = INTVAL (XEXP (SET_SRC (x), 1));
3726
              pos = INTVAL (XEXP (SET_SRC (x), 2));
3727
 
3728
              if (BITS_BIG_ENDIAN)
3729
                pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
3730
              unsignedp = (code == ZERO_EXTRACT);
3731
            }
3732
          break;
3733
 
3734
        default:
3735
          break;
3736
        }
3737
 
3738
      if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
3739
        {
3740
          enum machine_mode mode = GET_MODE (SET_SRC (x));
3741
 
3742
          /* For unsigned, we have a choice of a shift followed by an
3743
             AND or two shifts.  Use two shifts for field sizes where the
3744
             constant might be too large.  We assume here that we can
3745
             always at least get 8-bit constants in an AND insn, which is
3746
             true for every current RISC.  */
3747
 
3748
          if (unsignedp && len <= 8)
3749
            {
3750
              SUBST (SET_SRC (x),
3751
                     gen_rtx_AND (mode,
3752
                                  gen_rtx_LSHIFTRT
3753
                                  (mode, gen_lowpart (mode, inner),
3754
                                   GEN_INT (pos)),
3755
                                  GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
3756
 
3757
              split = find_split_point (&SET_SRC (x), insn);
3758
              if (split && split != &SET_SRC (x))
3759
                return split;
3760
            }
3761
          else
3762
            {
3763
              SUBST (SET_SRC (x),
3764
                     gen_rtx_fmt_ee
3765
                     (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
3766
                      gen_rtx_ASHIFT (mode,
3767
                                      gen_lowpart (mode, inner),
3768
                                      GEN_INT (GET_MODE_BITSIZE (mode)
3769
                                               - len - pos)),
3770
                      GEN_INT (GET_MODE_BITSIZE (mode) - len)));
3771
 
3772
              split = find_split_point (&SET_SRC (x), insn);
3773
              if (split && split != &SET_SRC (x))
3774
                return split;
3775
            }
3776
        }
3777
 
3778
      /* See if this is a simple operation with a constant as the second
3779
         operand.  It might be that this constant is out of range and hence
3780
         could be used as a split point.  */
3781
      if (BINARY_P (SET_SRC (x))
3782
          && CONSTANT_P (XEXP (SET_SRC (x), 1))
3783
          && (OBJECT_P (XEXP (SET_SRC (x), 0))
3784
              || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
3785
                  && OBJECT_P (SUBREG_REG (XEXP (SET_SRC (x), 0))))))
3786
        return &XEXP (SET_SRC (x), 1);
3787
 
3788
      /* Finally, see if this is a simple operation with its first operand
3789
         not in a register.  The operation might require this operand in a
3790
         register, so return it as a split point.  We can always do this
3791
         because if the first operand were another operation, we would have
3792
         already found it as a split point.  */
3793
      if ((BINARY_P (SET_SRC (x)) || UNARY_P (SET_SRC (x)))
3794
          && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
3795
        return &XEXP (SET_SRC (x), 0);
3796
 
3797
      return 0;
3798
 
3799
    case AND:
3800
    case IOR:
3801
      /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
3802
         it is better to write this as (not (ior A B)) so we can split it.
3803
         Similarly for IOR.  */
3804
      if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
3805
        {
3806
          SUBST (*loc,
3807
                 gen_rtx_NOT (GET_MODE (x),
3808
                              gen_rtx_fmt_ee (code == IOR ? AND : IOR,
3809
                                              GET_MODE (x),
3810
                                              XEXP (XEXP (x, 0), 0),
3811
                                              XEXP (XEXP (x, 1), 0))));
3812
          return find_split_point (loc, insn);
3813
        }
3814
 
3815
      /* Many RISC machines have a large set of logical insns.  If the
3816
         second operand is a NOT, put it first so we will try to split the
3817
         other operand first.  */
3818
      if (GET_CODE (XEXP (x, 1)) == NOT)
3819
        {
3820
          rtx tem = XEXP (x, 0);
3821
          SUBST (XEXP (x, 0), XEXP (x, 1));
3822
          SUBST (XEXP (x, 1), tem);
3823
        }
3824
      break;
3825
 
3826
    default:
3827
      break;
3828
    }
3829
 
3830
  /* Otherwise, select our actions depending on our rtx class.  */
3831
  switch (GET_RTX_CLASS (code))
3832
    {
3833
    case RTX_BITFIELD_OPS:              /* This is ZERO_EXTRACT and SIGN_EXTRACT.  */
3834
    case RTX_TERNARY:
3835
      split = find_split_point (&XEXP (x, 2), insn);
3836
      if (split)
3837
        return split;
3838
      /* ... fall through ...  */
3839
    case RTX_BIN_ARITH:
3840
    case RTX_COMM_ARITH:
3841
    case RTX_COMPARE:
3842
    case RTX_COMM_COMPARE:
3843
      split = find_split_point (&XEXP (x, 1), insn);
3844
      if (split)
3845
        return split;
3846
      /* ... fall through ...  */
3847
    case RTX_UNARY:
3848
      /* Some machines have (and (shift ...) ...) insns.  If X is not
3849
         an AND, but XEXP (X, 0) is, use it as our split point.  */
3850
      if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
3851
        return &XEXP (x, 0);
3852
 
3853
      split = find_split_point (&XEXP (x, 0), insn);
3854
      if (split)
3855
        return split;
3856
      return loc;
3857
 
3858
    default:
3859
      /* Otherwise, we don't have a split point.  */
3860
      return 0;
3861
    }
3862
}
3863
 
3864
/* Throughout X, replace FROM with TO, and return the result.
3865
   The result is TO if X is FROM;
3866
   otherwise the result is X, but its contents may have been modified.
3867
   If they were modified, a record was made in undobuf so that
3868
   undo_all will (among other things) return X to its original state.
3869
 
3870
   If the number of changes necessary is too much to record to undo,
3871
   the excess changes are not made, so the result is invalid.
3872
   The changes already made can still be undone.
3873
   undobuf.num_undo is incremented for such changes, so by testing that
3874
   the caller can tell whether the result is valid.
3875
 
3876
   `n_occurrences' is incremented each time FROM is replaced.
3877
 
3878
   IN_DEST is nonzero if we are processing the SET_DEST of a SET.
3879
 
3880
   UNIQUE_COPY is nonzero if each substitution must be unique.  We do this
3881
   by copying if `n_occurrences' is nonzero.  */
3882
 
3883
static rtx
3884
subst (rtx x, rtx from, rtx to, int in_dest, int unique_copy)
3885
{
3886
  enum rtx_code code = GET_CODE (x);
3887
  enum machine_mode op0_mode = VOIDmode;
3888
  const char *fmt;
3889
  int len, i;
3890
  rtx new;
3891
 
3892
/* Two expressions are equal if they are identical copies of a shared
3893
   RTX or if they are both registers with the same register number
3894
   and mode.  */
3895
 
3896
#define COMBINE_RTX_EQUAL_P(X,Y)                        \
3897
  ((X) == (Y)                                           \
3898
   || (REG_P (X) && REG_P (Y)   \
3899
       && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
3900
 
3901
  if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
3902
    {
3903
      n_occurrences++;
3904
      return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
3905
    }
3906
 
3907
  /* If X and FROM are the same register but different modes, they will
3908
     not have been seen as equal above.  However, flow.c will make a
3909
     LOG_LINKS entry for that case.  If we do nothing, we will try to
3910
     rerecognize our original insn and, when it succeeds, we will
3911
     delete the feeding insn, which is incorrect.
3912
 
3913
     So force this insn not to match in this (rare) case.  */
3914
  if (! in_dest && code == REG && REG_P (from)
3915
      && REGNO (x) == REGNO (from))
3916
    return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
3917
 
3918
  /* If this is an object, we are done unless it is a MEM or LO_SUM, both
3919
     of which may contain things that can be combined.  */
3920
  if (code != MEM && code != LO_SUM && OBJECT_P (x))
3921
    return x;
3922
 
3923
  /* It is possible to have a subexpression appear twice in the insn.
3924
     Suppose that FROM is a register that appears within TO.
3925
     Then, after that subexpression has been scanned once by `subst',
3926
     the second time it is scanned, TO may be found.  If we were
3927
     to scan TO here, we would find FROM within it and create a
3928
     self-referent rtl structure which is completely wrong.  */
3929
  if (COMBINE_RTX_EQUAL_P (x, to))
3930
    return to;
3931
 
3932
  /* Parallel asm_operands need special attention because all of the
3933
     inputs are shared across the arms.  Furthermore, unsharing the
3934
     rtl results in recognition failures.  Failure to handle this case
3935
     specially can result in circular rtl.
3936
 
3937
     Solve this by doing a normal pass across the first entry of the
3938
     parallel, and only processing the SET_DESTs of the subsequent
3939
     entries.  Ug.  */
3940
 
3941
  if (code == PARALLEL
3942
      && GET_CODE (XVECEXP (x, 0, 0)) == SET
3943
      && GET_CODE (SET_SRC (XVECEXP (x, 0, 0))) == ASM_OPERANDS)
3944
    {
3945
      new = subst (XVECEXP (x, 0, 0), from, to, 0, unique_copy);
3946
 
3947
      /* If this substitution failed, this whole thing fails.  */
3948
      if (GET_CODE (new) == CLOBBER
3949
          && XEXP (new, 0) == const0_rtx)
3950
        return new;
3951
 
3952
      SUBST (XVECEXP (x, 0, 0), new);
3953
 
3954
      for (i = XVECLEN (x, 0) - 1; i >= 1; i--)
3955
        {
3956
          rtx dest = SET_DEST (XVECEXP (x, 0, i));
3957
 
3958
          if (!REG_P (dest)
3959
              && GET_CODE (dest) != CC0
3960
              && GET_CODE (dest) != PC)
3961
            {
3962
              new = subst (dest, from, to, 0, unique_copy);
3963
 
3964
              /* If this substitution failed, this whole thing fails.  */
3965
              if (GET_CODE (new) == CLOBBER
3966
                  && XEXP (new, 0) == const0_rtx)
3967
                return new;
3968
 
3969
              SUBST (SET_DEST (XVECEXP (x, 0, i)), new);
3970
            }
3971
        }
3972
    }
3973
  else
3974
    {
3975
      len = GET_RTX_LENGTH (code);
3976
      fmt = GET_RTX_FORMAT (code);
3977
 
3978
      /* We don't need to process a SET_DEST that is a register, CC0,
3979
         or PC, so set up to skip this common case.  All other cases
3980
         where we want to suppress replacing something inside a
3981
         SET_SRC are handled via the IN_DEST operand.  */
3982
      if (code == SET
3983
          && (REG_P (SET_DEST (x))
3984
              || GET_CODE (SET_DEST (x)) == CC0
3985
              || GET_CODE (SET_DEST (x)) == PC))
3986
        fmt = "ie";
3987
 
3988
      /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a
3989
         constant.  */
3990
      if (fmt[0] == 'e')
3991
        op0_mode = GET_MODE (XEXP (x, 0));
3992
 
3993
      for (i = 0; i < len; i++)
3994
        {
3995
          if (fmt[i] == 'E')
3996
            {
3997
              int j;
3998
              for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3999
                {
4000
                  if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
4001
                    {
4002
                      new = (unique_copy && n_occurrences
4003
                             ? copy_rtx (to) : to);
4004
                      n_occurrences++;
4005
                    }
4006
                  else
4007
                    {
4008
                      new = subst (XVECEXP (x, i, j), from, to, 0,
4009
                                   unique_copy);
4010
 
4011
                      /* If this substitution failed, this whole thing
4012
                         fails.  */
4013
                      if (GET_CODE (new) == CLOBBER
4014
                          && XEXP (new, 0) == const0_rtx)
4015
                        return new;
4016
                    }
4017
 
4018
                  SUBST (XVECEXP (x, i, j), new);
4019
                }
4020
            }
4021
          else if (fmt[i] == 'e')
4022
            {
4023
              /* If this is a register being set, ignore it.  */
4024
              new = XEXP (x, i);
4025
              if (in_dest
4026
                  && i == 0
4027
                  && (((code == SUBREG || code == ZERO_EXTRACT)
4028
                       && REG_P (new))
4029
                      || code == STRICT_LOW_PART))
4030
                ;
4031
 
4032
              else if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
4033
                {
4034
                  /* In general, don't install a subreg involving two
4035
                     modes not tieable.  It can worsen register
4036
                     allocation, and can even make invalid reload
4037
                     insns, since the reg inside may need to be copied
4038
                     from in the outside mode, and that may be invalid
4039
                     if it is an fp reg copied in integer mode.
4040
 
4041
                     We allow two exceptions to this: It is valid if
4042
                     it is inside another SUBREG and the mode of that
4043
                     SUBREG and the mode of the inside of TO is
4044
                     tieable and it is valid if X is a SET that copies
4045
                     FROM to CC0.  */
4046
 
4047
                  if (GET_CODE (to) == SUBREG
4048
                      && ! MODES_TIEABLE_P (GET_MODE (to),
4049
                                            GET_MODE (SUBREG_REG (to)))
4050
                      && ! (code == SUBREG
4051
                            && MODES_TIEABLE_P (GET_MODE (x),
4052
                                                GET_MODE (SUBREG_REG (to))))
4053
#ifdef HAVE_cc0
4054
                      && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
4055
#endif
4056
                      )
4057
                    return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
4058
 
4059
#ifdef CANNOT_CHANGE_MODE_CLASS
4060
                  if (code == SUBREG
4061
                      && REG_P (to)
4062
                      && REGNO (to) < FIRST_PSEUDO_REGISTER
4063
                      && REG_CANNOT_CHANGE_MODE_P (REGNO (to),
4064
                                                   GET_MODE (to),
4065
                                                   GET_MODE (x)))
4066
                    return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
4067
#endif
4068
 
4069
                  new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
4070
                  n_occurrences++;
4071
                }
4072
              else
4073
                /* If we are in a SET_DEST, suppress most cases unless we
4074
                   have gone inside a MEM, in which case we want to
4075
                   simplify the address.  We assume here that things that
4076
                   are actually part of the destination have their inner
4077
                   parts in the first expression.  This is true for SUBREG,
4078
                   STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
4079
                   things aside from REG and MEM that should appear in a
4080
                   SET_DEST.  */
4081
                new = subst (XEXP (x, i), from, to,
4082
                             (((in_dest
4083
                                && (code == SUBREG || code == STRICT_LOW_PART
4084
                                    || code == ZERO_EXTRACT))
4085
                               || code == SET)
4086
                              && i == 0), unique_copy);
4087
 
4088
              /* If we found that we will have to reject this combination,
4089
                 indicate that by returning the CLOBBER ourselves, rather than
4090
                 an expression containing it.  This will speed things up as
4091
                 well as prevent accidents where two CLOBBERs are considered
4092
                 to be equal, thus producing an incorrect simplification.  */
4093
 
4094
              if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
4095
                return new;
4096
 
4097
              if (GET_CODE (x) == SUBREG
4098
                  && (GET_CODE (new) == CONST_INT
4099
                      || GET_CODE (new) == CONST_DOUBLE))
4100
                {
4101
                  enum machine_mode mode = GET_MODE (x);
4102
 
4103
                  x = simplify_subreg (GET_MODE (x), new,
4104
                                       GET_MODE (SUBREG_REG (x)),
4105
                                       SUBREG_BYTE (x));
4106
                  if (! x)
4107
                    x = gen_rtx_CLOBBER (mode, const0_rtx);
4108
                }
4109
              else if (GET_CODE (new) == CONST_INT
4110
                       && GET_CODE (x) == ZERO_EXTEND)
4111
                {
4112
                  x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
4113
                                                new, GET_MODE (XEXP (x, 0)));
4114
                  gcc_assert (x);
4115
                }
4116
              else
4117
                SUBST (XEXP (x, i), new);
4118
            }
4119
        }
4120
    }
4121
 
4122
  /* Try to simplify X.  If the simplification changed the code, it is likely
4123
     that further simplification will help, so loop, but limit the number
4124
     of repetitions that will be performed.  */
4125
 
4126
  for (i = 0; i < 4; i++)
4127
    {
4128
      /* If X is sufficiently simple, don't bother trying to do anything
4129
         with it.  */
4130
      if (code != CONST_INT && code != REG && code != CLOBBER)
4131
        x = combine_simplify_rtx (x, op0_mode, in_dest);
4132
 
4133
      if (GET_CODE (x) == code)
4134
        break;
4135
 
4136
      code = GET_CODE (x);
4137
 
4138
      /* We no longer know the original mode of operand 0 since we
4139
         have changed the form of X)  */
4140
      op0_mode = VOIDmode;
4141
    }
4142
 
4143
  return x;
4144
}
4145
 
4146
/* Simplify X, a piece of RTL.  We just operate on the expression at the
4147
   outer level; call `subst' to simplify recursively.  Return the new
4148
   expression.
4149
 
4150
   OP0_MODE is the original mode of XEXP (x, 0).  IN_DEST is nonzero
4151
   if we are inside a SET_DEST.  */
4152
 
4153
static rtx
4154
combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest)
4155
{
4156
  enum rtx_code code = GET_CODE (x);
4157
  enum machine_mode mode = GET_MODE (x);
4158
  rtx temp;
4159
  int i;
4160
 
4161
  /* If this is a commutative operation, put a constant last and a complex
4162
     expression first.  We don't need to do this for comparisons here.  */
4163
  if (COMMUTATIVE_ARITH_P (x)
4164
      && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
4165
    {
4166
      temp = XEXP (x, 0);
4167
      SUBST (XEXP (x, 0), XEXP (x, 1));
4168
      SUBST (XEXP (x, 1), temp);
4169
    }
4170
 
4171
  /* If this is a simple operation applied to an IF_THEN_ELSE, try
4172
     applying it to the arms of the IF_THEN_ELSE.  This often simplifies
4173
     things.  Check for cases where both arms are testing the same
4174
     condition.
4175
 
4176
     Don't do anything if all operands are very simple.  */
4177
 
4178
  if ((BINARY_P (x)
4179
       && ((!OBJECT_P (XEXP (x, 0))
4180
            && ! (GET_CODE (XEXP (x, 0)) == SUBREG
4181
                  && OBJECT_P (SUBREG_REG (XEXP (x, 0)))))
4182
           || (!OBJECT_P (XEXP (x, 1))
4183
               && ! (GET_CODE (XEXP (x, 1)) == SUBREG
4184
                     && OBJECT_P (SUBREG_REG (XEXP (x, 1)))))))
4185
      || (UNARY_P (x)
4186
          && (!OBJECT_P (XEXP (x, 0))
4187
               && ! (GET_CODE (XEXP (x, 0)) == SUBREG
4188
                     && OBJECT_P (SUBREG_REG (XEXP (x, 0)))))))
4189
    {
4190
      rtx cond, true_rtx, false_rtx;
4191
 
4192
      cond = if_then_else_cond (x, &true_rtx, &false_rtx);
4193
      if (cond != 0
4194
          /* If everything is a comparison, what we have is highly unlikely
4195
             to be simpler, so don't use it.  */
4196
          && ! (COMPARISON_P (x)
4197
                && (COMPARISON_P (true_rtx) || COMPARISON_P (false_rtx))))
4198
        {
4199
          rtx cop1 = const0_rtx;
4200
          enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
4201
 
4202
          if (cond_code == NE && COMPARISON_P (cond))
4203
            return x;
4204
 
4205
          /* Simplify the alternative arms; this may collapse the true and
4206
             false arms to store-flag values.  Be careful to use copy_rtx
4207
             here since true_rtx or false_rtx might share RTL with x as a
4208
             result of the if_then_else_cond call above.  */
4209
          true_rtx = subst (copy_rtx (true_rtx), pc_rtx, pc_rtx, 0, 0);
4210
          false_rtx = subst (copy_rtx (false_rtx), pc_rtx, pc_rtx, 0, 0);
4211
 
4212
          /* If true_rtx and false_rtx are not general_operands, an if_then_else
4213
             is unlikely to be simpler.  */
4214
          if (general_operand (true_rtx, VOIDmode)
4215
              && general_operand (false_rtx, VOIDmode))
4216
            {
4217
              enum rtx_code reversed;
4218
 
4219
              /* Restarting if we generate a store-flag expression will cause
4220
                 us to loop.  Just drop through in this case.  */
4221
 
4222
              /* If the result values are STORE_FLAG_VALUE and zero, we can
4223
                 just make the comparison operation.  */
4224
              if (true_rtx == const_true_rtx && false_rtx == const0_rtx)
4225
                x = simplify_gen_relational (cond_code, mode, VOIDmode,
4226
                                             cond, cop1);
4227
              else if (true_rtx == const0_rtx && false_rtx == const_true_rtx
4228
                       && ((reversed = reversed_comparison_code_parts
4229
                                        (cond_code, cond, cop1, NULL))
4230
                           != UNKNOWN))
4231
                x = simplify_gen_relational (reversed, mode, VOIDmode,
4232
                                             cond, cop1);
4233
 
4234
              /* Likewise, we can make the negate of a comparison operation
4235
                 if the result values are - STORE_FLAG_VALUE and zero.  */
4236
              else if (GET_CODE (true_rtx) == CONST_INT
4237
                       && INTVAL (true_rtx) == - STORE_FLAG_VALUE
4238
                       && false_rtx == const0_rtx)
4239
                x = simplify_gen_unary (NEG, mode,
4240
                                        simplify_gen_relational (cond_code,
4241
                                                                 mode, VOIDmode,
4242
                                                                 cond, cop1),
4243
                                        mode);
4244
              else if (GET_CODE (false_rtx) == CONST_INT
4245
                       && INTVAL (false_rtx) == - STORE_FLAG_VALUE
4246
                       && true_rtx == const0_rtx
4247
                       && ((reversed = reversed_comparison_code_parts
4248
                                        (cond_code, cond, cop1, NULL))
4249
                           != UNKNOWN))
4250
                x = simplify_gen_unary (NEG, mode,
4251
                                        simplify_gen_relational (reversed,
4252
                                                                 mode, VOIDmode,
4253
                                                                 cond, cop1),
4254
                                        mode);
4255
              else
4256
                return gen_rtx_IF_THEN_ELSE (mode,
4257
                                             simplify_gen_relational (cond_code,
4258
                                                                      mode,
4259
                                                                      VOIDmode,
4260
                                                                      cond,
4261
                                                                      cop1),
4262
                                             true_rtx, false_rtx);
4263
 
4264
              code = GET_CODE (x);
4265
              op0_mode = VOIDmode;
4266
            }
4267
        }
4268
    }
4269
 
4270
  /* Try to fold this expression in case we have constants that weren't
4271
     present before.  */
4272
  temp = 0;
4273
  switch (GET_RTX_CLASS (code))
4274
    {
4275
    case RTX_UNARY:
4276
      if (op0_mode == VOIDmode)
4277
        op0_mode = GET_MODE (XEXP (x, 0));
4278
      temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
4279
      break;
4280
    case RTX_COMPARE:
4281
    case RTX_COMM_COMPARE:
4282
      {
4283
        enum machine_mode cmp_mode = GET_MODE (XEXP (x, 0));
4284
        if (cmp_mode == VOIDmode)
4285
          {
4286
            cmp_mode = GET_MODE (XEXP (x, 1));
4287
            if (cmp_mode == VOIDmode)
4288
              cmp_mode = op0_mode;
4289
          }
4290
        temp = simplify_relational_operation (code, mode, cmp_mode,
4291
                                              XEXP (x, 0), XEXP (x, 1));
4292
      }
4293
      break;
4294
    case RTX_COMM_ARITH:
4295
    case RTX_BIN_ARITH:
4296
      temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
4297
      break;
4298
    case RTX_BITFIELD_OPS:
4299
    case RTX_TERNARY:
4300
      temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
4301
                                         XEXP (x, 1), XEXP (x, 2));
4302
      break;
4303
    default:
4304
      break;
4305
    }
4306
 
4307
  if (temp)
4308
    {
4309
      x = temp;
4310
      code = GET_CODE (temp);
4311
      op0_mode = VOIDmode;
4312
      mode = GET_MODE (temp);
4313
    }
4314
 
4315
  /* First see if we can apply the inverse distributive law.  */
4316
  if (code == PLUS || code == MINUS
4317
      || code == AND || code == IOR || code == XOR)
4318
    {
4319
      x = apply_distributive_law (x);
4320
      code = GET_CODE (x);
4321
      op0_mode = VOIDmode;
4322
    }
4323
 
4324
  /* If CODE is an associative operation not otherwise handled, see if we
4325
     can associate some operands.  This can win if they are constants or
4326
     if they are logically related (i.e. (a & b) & a).  */
4327
  if ((code == PLUS || code == MINUS || code == MULT || code == DIV
4328
       || code == AND || code == IOR || code == XOR
4329
       || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
4330
      && ((INTEGRAL_MODE_P (mode) && code != DIV)
4331
          || (flag_unsafe_math_optimizations && FLOAT_MODE_P (mode))))
4332
    {
4333
      if (GET_CODE (XEXP (x, 0)) == code)
4334
        {
4335
          rtx other = XEXP (XEXP (x, 0), 0);
4336
          rtx inner_op0 = XEXP (XEXP (x, 0), 1);
4337
          rtx inner_op1 = XEXP (x, 1);
4338
          rtx inner;
4339
 
4340
          /* Make sure we pass the constant operand if any as the second
4341
             one if this is a commutative operation.  */
4342
          if (CONSTANT_P (inner_op0) && COMMUTATIVE_ARITH_P (x))
4343
            {
4344
              rtx tem = inner_op0;
4345
              inner_op0 = inner_op1;
4346
              inner_op1 = tem;
4347
            }
4348
          inner = simplify_binary_operation (code == MINUS ? PLUS
4349
                                             : code == DIV ? MULT
4350
                                             : code,
4351
                                             mode, inner_op0, inner_op1);
4352
 
4353
          /* For commutative operations, try the other pair if that one
4354
             didn't simplify.  */
4355
          if (inner == 0 && COMMUTATIVE_ARITH_P (x))
4356
            {
4357
              other = XEXP (XEXP (x, 0), 1);
4358
              inner = simplify_binary_operation (code, mode,
4359
                                                 XEXP (XEXP (x, 0), 0),
4360
                                                 XEXP (x, 1));
4361
            }
4362
 
4363
          if (inner)
4364
            return simplify_gen_binary (code, mode, other, inner);
4365
        }
4366
    }
4367
 
4368
  /* A little bit of algebraic simplification here.  */
4369
  switch (code)
4370
    {
4371
    case MEM:
4372
      /* Ensure that our address has any ASHIFTs converted to MULT in case
4373
         address-recognizing predicates are called later.  */
4374
      temp = make_compound_operation (XEXP (x, 0), MEM);
4375
      SUBST (XEXP (x, 0), temp);
4376
      break;
4377
 
4378
    case SUBREG:
4379
      if (op0_mode == VOIDmode)
4380
        op0_mode = GET_MODE (SUBREG_REG (x));
4381
 
4382
      /* See if this can be moved to simplify_subreg.  */
4383
      if (CONSTANT_P (SUBREG_REG (x))
4384
          && subreg_lowpart_offset (mode, op0_mode) == SUBREG_BYTE (x)
4385
             /* Don't call gen_lowpart if the inner mode
4386
                is VOIDmode and we cannot simplify it, as SUBREG without
4387
                inner mode is invalid.  */
4388
          && (GET_MODE (SUBREG_REG (x)) != VOIDmode
4389
              || gen_lowpart_common (mode, SUBREG_REG (x))))
4390
        return gen_lowpart (mode, SUBREG_REG (x));
4391
 
4392
      if (GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_CC)
4393
        break;
4394
      {
4395
        rtx temp;
4396
        temp = simplify_subreg (mode, SUBREG_REG (x), op0_mode,
4397
                                SUBREG_BYTE (x));
4398
        if (temp)
4399
          return temp;
4400
      }
4401
 
4402
      /* Don't change the mode of the MEM if that would change the meaning
4403
         of the address.  */
4404
      if (MEM_P (SUBREG_REG (x))
4405
          && (MEM_VOLATILE_P (SUBREG_REG (x))
4406
              || mode_dependent_address_p (XEXP (SUBREG_REG (x), 0))))
4407
        return gen_rtx_CLOBBER (mode, const0_rtx);
4408
 
4409
      /* Note that we cannot do any narrowing for non-constants since
4410
         we might have been counting on using the fact that some bits were
4411
         zero.  We now do this in the SET.  */
4412
 
4413
      break;
4414
 
4415
    case NEG:
4416
      temp = expand_compound_operation (XEXP (x, 0));
4417
 
4418
      /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
4419
         replaced by (lshiftrt X C).  This will convert
4420
         (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y).  */
4421
 
4422
      if (GET_CODE (temp) == ASHIFTRT
4423
          && GET_CODE (XEXP (temp, 1)) == CONST_INT
4424
          && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
4425
        return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (temp, 0),
4426
                                     INTVAL (XEXP (temp, 1)));
4427
 
4428
      /* If X has only a single bit that might be nonzero, say, bit I, convert
4429
         (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
4430
         MODE minus 1.  This will convert (neg (zero_extract X 1 Y)) to
4431
         (sign_extract X 1 Y).  But only do this if TEMP isn't a register
4432
         or a SUBREG of one since we'd be making the expression more
4433
         complex if it was just a register.  */
4434
 
4435
      if (!REG_P (temp)
4436
          && ! (GET_CODE (temp) == SUBREG
4437
                && REG_P (SUBREG_REG (temp)))
4438
          && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
4439
        {
4440
          rtx temp1 = simplify_shift_const
4441
            (NULL_RTX, ASHIFTRT, mode,
4442
             simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
4443
                                   GET_MODE_BITSIZE (mode) - 1 - i),
4444
             GET_MODE_BITSIZE (mode) - 1 - i);
4445
 
4446
          /* If all we did was surround TEMP with the two shifts, we
4447
             haven't improved anything, so don't use it.  Otherwise,
4448
             we are better off with TEMP1.  */
4449
          if (GET_CODE (temp1) != ASHIFTRT
4450
              || GET_CODE (XEXP (temp1, 0)) != ASHIFT
4451
              || XEXP (XEXP (temp1, 0), 0) != temp)
4452
            return temp1;
4453
        }
4454
      break;
4455
 
4456
    case TRUNCATE:
4457
      /* We can't handle truncation to a partial integer mode here
4458
         because we don't know the real bitsize of the partial
4459
         integer mode.  */
4460
      if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
4461
        break;
4462
 
4463
      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4464
          && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
4465
                                    GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))))
4466
        SUBST (XEXP (x, 0),
4467
               force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
4468
                              GET_MODE_MASK (mode), 0));
4469
 
4470
      /* Similarly to what we do in simplify-rtx.c, a truncate of a register
4471
         whose value is a comparison can be replaced with a subreg if
4472
         STORE_FLAG_VALUE permits.  */
4473
      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4474
          && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0
4475
          && (temp = get_last_value (XEXP (x, 0)))
4476
          && COMPARISON_P (temp))
4477
        return gen_lowpart (mode, XEXP (x, 0));
4478
      break;
4479
 
4480
#ifdef HAVE_cc0
4481
    case COMPARE:
4482
      /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
4483
         using cc0, in which case we want to leave it as a COMPARE
4484
         so we can distinguish it from a register-register-copy.  */
4485
      if (XEXP (x, 1) == const0_rtx)
4486
        return XEXP (x, 0);
4487
 
4488
      /* x - 0 is the same as x unless x's mode has signed zeros and
4489
         allows rounding towards -infinity.  Under those conditions,
4490
 
4491
      if (!(HONOR_SIGNED_ZEROS (GET_MODE (XEXP (x, 0)))
4492
            && HONOR_SIGN_DEPENDENT_ROUNDING (GET_MODE (XEXP (x, 0))))
4493
          && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
4494
        return XEXP (x, 0);
4495
      break;
4496
#endif
4497
 
4498
    case CONST:
4499
      /* (const (const X)) can become (const X).  Do it this way rather than
4500
         returning the inner CONST since CONST can be shared with a
4501
         REG_EQUAL note.  */
4502
      if (GET_CODE (XEXP (x, 0)) == CONST)
4503
        SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4504
      break;
4505
 
4506
#ifdef HAVE_lo_sum
4507
    case LO_SUM:
4508
      /* Convert (lo_sum (high FOO) FOO) to FOO.  This is necessary so we
4509
         can add in an offset.  find_split_point will split this address up
4510
         again if it doesn't match.  */
4511
      if (GET_CODE (XEXP (x, 0)) == HIGH
4512
          && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
4513
        return XEXP (x, 1);
4514
      break;
4515
#endif
4516
 
4517
    case PLUS:
4518
      /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
4519
         when c is (const_int (pow2 + 1) / 2) is a sign extension of a
4520
         bit-field and can be replaced by either a sign_extend or a
4521
         sign_extract.  The `and' may be a zero_extend and the two
4522
         <c>, -<c> constants may be reversed.  */
4523
      if (GET_CODE (XEXP (x, 0)) == XOR
4524
          && GET_CODE (XEXP (x, 1)) == CONST_INT
4525
          && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4526
          && INTVAL (XEXP (x, 1)) == -INTVAL (XEXP (XEXP (x, 0), 1))
4527
          && ((i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
4528
              || (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
4529
          && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4530
          && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
4531
               && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
4532
               && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
4533
                   == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
4534
              || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
4535
                  && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
4536
                      == (unsigned int) i + 1))))
4537
        return simplify_shift_const
4538
          (NULL_RTX, ASHIFTRT, mode,
4539
           simplify_shift_const (NULL_RTX, ASHIFT, mode,
4540
                                 XEXP (XEXP (XEXP (x, 0), 0), 0),
4541
                                 GET_MODE_BITSIZE (mode) - (i + 1)),
4542
           GET_MODE_BITSIZE (mode) - (i + 1));
4543
 
4544
      /* If only the low-order bit of X is possibly nonzero, (plus x -1)
4545
         can become (ashiftrt (ashift (xor x 1) C) C) where C is
4546
         the bitsize of the mode - 1.  This allows simplification of
4547
         "a = (b & 8) == 0;"  */
4548
      if (XEXP (x, 1) == constm1_rtx
4549
          && !REG_P (XEXP (x, 0))
4550
          && ! (GET_CODE (XEXP (x, 0)) == SUBREG
4551
                && REG_P (SUBREG_REG (XEXP (x, 0))))
4552
          && nonzero_bits (XEXP (x, 0), mode) == 1)
4553
        return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
4554
           simplify_shift_const (NULL_RTX, ASHIFT, mode,
4555
                                 gen_rtx_XOR (mode, XEXP (x, 0), const1_rtx),
4556
                                 GET_MODE_BITSIZE (mode) - 1),
4557
           GET_MODE_BITSIZE (mode) - 1);
4558
 
4559
      /* If we are adding two things that have no bits in common, convert
4560
         the addition into an IOR.  This will often be further simplified,
4561
         for example in cases like ((a & 1) + (a & 2)), which can
4562
         become a & 3.  */
4563
 
4564
      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4565
          && (nonzero_bits (XEXP (x, 0), mode)
4566
              & nonzero_bits (XEXP (x, 1), mode)) == 0)
4567
        {
4568
          /* Try to simplify the expression further.  */
4569
          rtx tor = simplify_gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
4570
          temp = combine_simplify_rtx (tor, mode, in_dest);
4571
 
4572
          /* If we could, great.  If not, do not go ahead with the IOR
4573
             replacement, since PLUS appears in many special purpose
4574
             address arithmetic instructions.  */
4575
          if (GET_CODE (temp) != CLOBBER && temp != tor)
4576
            return temp;
4577
        }
4578
      break;
4579
 
4580
    case MINUS:
4581
      /* (minus <foo> (and <foo> (const_int -pow2))) becomes
4582
         (and <foo> (const_int pow2-1))  */
4583
      if (GET_CODE (XEXP (x, 1)) == AND
4584
          && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
4585
          && exact_log2 (-INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
4586
          && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
4587
        return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
4588
                                       -INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
4589
      break;
4590
 
4591
    case MULT:
4592
      /* If we have (mult (plus A B) C), apply the distributive law and then
4593
         the inverse distributive law to see if things simplify.  This
4594
         occurs mostly in addresses, often when unrolling loops.  */
4595
 
4596
      if (GET_CODE (XEXP (x, 0)) == PLUS)
4597
        {
4598
          rtx result = distribute_and_simplify_rtx (x, 0);
4599
          if (result)
4600
            return result;
4601
        }
4602
 
4603
      /* Try simplify a*(b/c) as (a*b)/c.  */
4604
      if (FLOAT_MODE_P (mode) && flag_unsafe_math_optimizations
4605
          && GET_CODE (XEXP (x, 0)) == DIV)
4606
        {
4607
          rtx tem = simplify_binary_operation (MULT, mode,
4608
                                               XEXP (XEXP (x, 0), 0),
4609
                                               XEXP (x, 1));
4610
          if (tem)
4611
            return simplify_gen_binary (DIV, mode, tem, XEXP (XEXP (x, 0), 1));
4612
        }
4613
      break;
4614
 
4615
    case UDIV:
4616
      /* If this is a divide by a power of two, treat it as a shift if
4617
         its first operand is a shift.  */
4618
      if (GET_CODE (XEXP (x, 1)) == CONST_INT
4619
          && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
4620
          && (GET_CODE (XEXP (x, 0)) == ASHIFT
4621
              || GET_CODE (XEXP (x, 0)) == LSHIFTRT
4622
              || GET_CODE (XEXP (x, 0)) == ASHIFTRT
4623
              || GET_CODE (XEXP (x, 0)) == ROTATE
4624
              || GET_CODE (XEXP (x, 0)) == ROTATERT))
4625
        return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
4626
      break;
4627
 
4628
    case EQ:  case NE:
4629
    case GT:  case GTU:  case GE:  case GEU:
4630
    case LT:  case LTU:  case LE:  case LEU:
4631
    case UNEQ:  case LTGT:
4632
    case UNGT:  case UNGE:
4633
    case UNLT:  case UNLE:
4634
    case UNORDERED: case ORDERED:
4635
      /* If the first operand is a condition code, we can't do anything
4636
         with it.  */
4637
      if (GET_CODE (XEXP (x, 0)) == COMPARE
4638
          || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
4639
              && ! CC0_P (XEXP (x, 0))))
4640
        {
4641
          rtx op0 = XEXP (x, 0);
4642
          rtx op1 = XEXP (x, 1);
4643
          enum rtx_code new_code;
4644
 
4645
          if (GET_CODE (op0) == COMPARE)
4646
            op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4647
 
4648
          /* Simplify our comparison, if possible.  */
4649
          new_code = simplify_comparison (code, &op0, &op1);
4650
 
4651
          /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
4652
             if only the low-order bit is possibly nonzero in X (such as when
4653
             X is a ZERO_EXTRACT of one bit).  Similarly, we can convert EQ to
4654
             (xor X 1) or (minus 1 X); we use the former.  Finally, if X is
4655
             known to be either 0 or -1, NE becomes a NEG and EQ becomes
4656
             (plus X 1).
4657
 
4658
             Remove any ZERO_EXTRACT we made when thinking this was a
4659
             comparison.  It may now be simpler to use, e.g., an AND.  If a
4660
             ZERO_EXTRACT is indeed appropriate, it will be placed back by
4661
             the call to make_compound_operation in the SET case.  */
4662
 
4663
          if (STORE_FLAG_VALUE == 1
4664
              && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4665
              && op1 == const0_rtx
4666
              && mode == GET_MODE (op0)
4667
              && nonzero_bits (op0, mode) == 1)
4668
            return gen_lowpart (mode,
4669
                                expand_compound_operation (op0));
4670
 
4671
          else if (STORE_FLAG_VALUE == 1
4672
                   && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4673
                   && op1 == const0_rtx
4674
                   && mode == GET_MODE (op0)
4675
                   && (num_sign_bit_copies (op0, mode)
4676
                       == GET_MODE_BITSIZE (mode)))
4677
            {
4678
              op0 = expand_compound_operation (op0);
4679
              return simplify_gen_unary (NEG, mode,
4680
                                         gen_lowpart (mode, op0),
4681
                                         mode);
4682
            }
4683
 
4684
          else if (STORE_FLAG_VALUE == 1
4685
                   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4686
                   && op1 == const0_rtx
4687
                   && mode == GET_MODE (op0)
4688
                   && nonzero_bits (op0, mode) == 1)
4689
            {
4690
              op0 = expand_compound_operation (op0);
4691
              return simplify_gen_binary (XOR, mode,
4692
                                          gen_lowpart (mode, op0),
4693
                                          const1_rtx);
4694
            }
4695
 
4696
          else if (STORE_FLAG_VALUE == 1
4697
                   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4698
                   && op1 == const0_rtx
4699
                   && mode == GET_MODE (op0)
4700
                   && (num_sign_bit_copies (op0, mode)
4701
                       == GET_MODE_BITSIZE (mode)))
4702
            {
4703
              op0 = expand_compound_operation (op0);
4704
              return plus_constant (gen_lowpart (mode, op0), 1);
4705
            }
4706
 
4707
          /* If STORE_FLAG_VALUE is -1, we have cases similar to
4708
             those above.  */
4709
          if (STORE_FLAG_VALUE == -1
4710
              && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4711
              && op1 == const0_rtx
4712
              && (num_sign_bit_copies (op0, mode)
4713
                  == GET_MODE_BITSIZE (mode)))
4714
            return gen_lowpart (mode,
4715
                                expand_compound_operation (op0));
4716
 
4717
          else if (STORE_FLAG_VALUE == -1
4718
                   && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4719
                   && op1 == const0_rtx
4720
                   && mode == GET_MODE (op0)
4721
                   && nonzero_bits (op0, mode) == 1)
4722
            {
4723
              op0 = expand_compound_operation (op0);
4724
              return simplify_gen_unary (NEG, mode,
4725
                                         gen_lowpart (mode, op0),
4726
                                         mode);
4727
            }
4728
 
4729
          else if (STORE_FLAG_VALUE == -1
4730
                   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4731
                   && op1 == const0_rtx
4732
                   && mode == GET_MODE (op0)
4733
                   && (num_sign_bit_copies (op0, mode)
4734
                       == GET_MODE_BITSIZE (mode)))
4735
            {
4736
              op0 = expand_compound_operation (op0);
4737
              return simplify_gen_unary (NOT, mode,
4738
                                         gen_lowpart (mode, op0),
4739
                                         mode);
4740
            }
4741
 
4742
          /* If X is 0/1, (eq X 0) is X-1.  */
4743
          else if (STORE_FLAG_VALUE == -1
4744
                   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
4745
                   && op1 == const0_rtx
4746
                   && mode == GET_MODE (op0)
4747
                   && nonzero_bits (op0, mode) == 1)
4748
            {
4749
              op0 = expand_compound_operation (op0);
4750
              return plus_constant (gen_lowpart (mode, op0), -1);
4751
            }
4752
 
4753
          /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
4754
             one bit that might be nonzero, we can convert (ne x 0) to
4755
             (ashift x c) where C puts the bit in the sign bit.  Remove any
4756
             AND with STORE_FLAG_VALUE when we are done, since we are only
4757
             going to test the sign bit.  */
4758
          if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4759
              && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4760
              && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
4761
                  == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
4762
              && op1 == const0_rtx
4763
              && mode == GET_MODE (op0)
4764
              && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
4765
            {
4766
              x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
4767
                                        expand_compound_operation (op0),
4768
                                        GET_MODE_BITSIZE (mode) - 1 - i);
4769
              if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
4770
                return XEXP (x, 0);
4771
              else
4772
                return x;
4773
            }
4774
 
4775
          /* If the code changed, return a whole new comparison.  */
4776
          if (new_code != code)
4777
            return gen_rtx_fmt_ee (new_code, mode, op0, op1);
4778
 
4779
          /* Otherwise, keep this operation, but maybe change its operands.
4780
             This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR).  */
4781
          SUBST (XEXP (x, 0), op0);
4782
          SUBST (XEXP (x, 1), op1);
4783
        }
4784
      break;
4785
 
4786
    case IF_THEN_ELSE:
4787
      return simplify_if_then_else (x);
4788
 
4789
    case ZERO_EXTRACT:
4790
    case SIGN_EXTRACT:
4791
    case ZERO_EXTEND:
4792
    case SIGN_EXTEND:
4793
      /* If we are processing SET_DEST, we are done.  */
4794
      if (in_dest)
4795
        return x;
4796
 
4797
      return expand_compound_operation (x);
4798
 
4799
    case SET:
4800
      return simplify_set (x);
4801
 
4802
    case AND:
4803
    case IOR:
4804
      return simplify_logical (x);
4805
 
4806
    case ASHIFT:
4807
    case LSHIFTRT:
4808
    case ASHIFTRT:
4809
    case ROTATE:
4810
    case ROTATERT:
4811
      /* If this is a shift by a constant amount, simplify it.  */
4812
      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4813
        return simplify_shift_const (x, code, mode, XEXP (x, 0),
4814
                                     INTVAL (XEXP (x, 1)));
4815
 
4816
      else if (SHIFT_COUNT_TRUNCATED && !REG_P (XEXP (x, 1)))
4817
        SUBST (XEXP (x, 1),
4818
               force_to_mode (XEXP (x, 1), GET_MODE (XEXP (x, 1)),
4819
                              ((HOST_WIDE_INT) 1
4820
                               << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
4821
                              - 1,
4822
                              0));
4823
      break;
4824
 
4825
    default:
4826
      break;
4827
    }
4828
 
4829
  return x;
4830
}
4831
 
4832
/* Simplify X, an IF_THEN_ELSE expression.  Return the new expression.  */
4833
 
4834
static rtx
4835
simplify_if_then_else (rtx x)
4836
{
4837
  enum machine_mode mode = GET_MODE (x);
4838
  rtx cond = XEXP (x, 0);
4839
  rtx true_rtx = XEXP (x, 1);
4840
  rtx false_rtx = XEXP (x, 2);
4841
  enum rtx_code true_code = GET_CODE (cond);
4842
  int comparison_p = COMPARISON_P (cond);
4843
  rtx temp;
4844
  int i;
4845
  enum rtx_code false_code;
4846
  rtx reversed;
4847
 
4848
  /* Simplify storing of the truth value.  */
4849
  if (comparison_p && true_rtx == const_true_rtx && false_rtx == const0_rtx)
4850
    return simplify_gen_relational (true_code, mode, VOIDmode,
4851
                                    XEXP (cond, 0), XEXP (cond, 1));
4852
 
4853
  /* Also when the truth value has to be reversed.  */
4854
  if (comparison_p
4855
      && true_rtx == const0_rtx && false_rtx == const_true_rtx
4856
      && (reversed = reversed_comparison (cond, mode)))
4857
    return reversed;
4858
 
4859
  /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
4860
     in it is being compared against certain values.  Get the true and false
4861
     comparisons and see if that says anything about the value of each arm.  */
4862
 
4863
  if (comparison_p
4864
      && ((false_code = reversed_comparison_code (cond, NULL))
4865
          != UNKNOWN)
4866
      && REG_P (XEXP (cond, 0)))
4867
    {
4868
      HOST_WIDE_INT nzb;
4869
      rtx from = XEXP (cond, 0);
4870
      rtx true_val = XEXP (cond, 1);
4871
      rtx false_val = true_val;
4872
      int swapped = 0;
4873
 
4874
      /* If FALSE_CODE is EQ, swap the codes and arms.  */
4875
 
4876
      if (false_code == EQ)
4877
        {
4878
          swapped = 1, true_code = EQ, false_code = NE;
4879
          temp = true_rtx, true_rtx = false_rtx, false_rtx = temp;
4880
        }
4881
 
4882
      /* If we are comparing against zero and the expression being tested has
4883
         only a single bit that might be nonzero, that is its value when it is
4884
         not equal to zero.  Similarly if it is known to be -1 or 0.  */
4885
 
4886
      if (true_code == EQ && true_val == const0_rtx
4887
          && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
4888
        false_code = EQ, false_val = GEN_INT (nzb);
4889
      else if (true_code == EQ && true_val == const0_rtx
4890
               && (num_sign_bit_copies (from, GET_MODE (from))
4891
                   == GET_MODE_BITSIZE (GET_MODE (from))))
4892
        false_code = EQ, false_val = constm1_rtx;
4893
 
4894
      /* Now simplify an arm if we know the value of the register in the
4895
         branch and it is used in the arm.  Be careful due to the potential
4896
         of locally-shared RTL.  */
4897
 
4898
      if (reg_mentioned_p (from, true_rtx))
4899
        true_rtx = subst (known_cond (copy_rtx (true_rtx), true_code,
4900
                                      from, true_val),
4901
                      pc_rtx, pc_rtx, 0, 0);
4902
      if (reg_mentioned_p (from, false_rtx))
4903
        false_rtx = subst (known_cond (copy_rtx (false_rtx), false_code,
4904
                                   from, false_val),
4905
                       pc_rtx, pc_rtx, 0, 0);
4906
 
4907
      SUBST (XEXP (x, 1), swapped ? false_rtx : true_rtx);
4908
      SUBST (XEXP (x, 2), swapped ? true_rtx : false_rtx);
4909
 
4910
      true_rtx = XEXP (x, 1);
4911
      false_rtx = XEXP (x, 2);
4912
      true_code = GET_CODE (cond);
4913
    }
4914
 
4915
  /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
4916
     reversed, do so to avoid needing two sets of patterns for
4917
     subtract-and-branch insns.  Similarly if we have a constant in the true
4918
     arm, the false arm is the same as the first operand of the comparison, or
4919
     the false arm is more complicated than the true arm.  */
4920
 
4921
  if (comparison_p
4922
      && reversed_comparison_code (cond, NULL) != UNKNOWN
4923
      && (true_rtx == pc_rtx
4924
          || (CONSTANT_P (true_rtx)
4925
              && GET_CODE (false_rtx) != CONST_INT && false_rtx != pc_rtx)
4926
          || true_rtx == const0_rtx
4927
          || (OBJECT_P (true_rtx) && !OBJECT_P (false_rtx))
4928
          || (GET_CODE (true_rtx) == SUBREG && OBJECT_P (SUBREG_REG (true_rtx))
4929
              && !OBJECT_P (false_rtx))
4930
          || reg_mentioned_p (true_rtx, false_rtx)
4931
          || rtx_equal_p (false_rtx, XEXP (cond, 0))))
4932
    {
4933
      true_code = reversed_comparison_code (cond, NULL);
4934
      SUBST (XEXP (x, 0), reversed_comparison (cond, GET_MODE (cond)));
4935
      SUBST (XEXP (x, 1), false_rtx);
4936
      SUBST (XEXP (x, 2), true_rtx);
4937
 
4938
      temp = true_rtx, true_rtx = false_rtx, false_rtx = temp;
4939
      cond = XEXP (x, 0);
4940
 
4941
      /* It is possible that the conditional has been simplified out.  */
4942
      true_code = GET_CODE (cond);
4943
      comparison_p = COMPARISON_P (cond);
4944
    }
4945
 
4946
  /* If the two arms are identical, we don't need the comparison.  */
4947
 
4948
  if (rtx_equal_p (true_rtx, false_rtx) && ! side_effects_p (cond))
4949
    return true_rtx;
4950
 
4951
  /* Convert a == b ? b : a to "a".  */
4952
  if (true_code == EQ && ! side_effects_p (cond)
4953
      && !HONOR_NANS (mode)
4954
      && rtx_equal_p (XEXP (cond, 0), false_rtx)
4955
      && rtx_equal_p (XEXP (cond, 1), true_rtx))
4956
    return false_rtx;
4957
  else if (true_code == NE && ! side_effects_p (cond)
4958
           && !HONOR_NANS (mode)
4959
           && rtx_equal_p (XEXP (cond, 0), true_rtx)
4960
           && rtx_equal_p (XEXP (cond, 1), false_rtx))
4961
    return true_rtx;
4962
 
4963
  /* Look for cases where we have (abs x) or (neg (abs X)).  */
4964
 
4965
  if (GET_MODE_CLASS (mode) == MODE_INT
4966
      && GET_CODE (false_rtx) == NEG
4967
      && rtx_equal_p (true_rtx, XEXP (false_rtx, 0))
4968
      && comparison_p
4969
      && rtx_equal_p (true_rtx, XEXP (cond, 0))
4970
      && ! side_effects_p (true_rtx))
4971
    switch (true_code)
4972
      {
4973
      case GT:
4974
      case GE:
4975
        return simplify_gen_unary (ABS, mode, true_rtx, mode);
4976
      case LT:
4977
      case LE:
4978
        return
4979
          simplify_gen_unary (NEG, mode,
4980
                              simplify_gen_unary (ABS, mode, true_rtx, mode),
4981
                              mode);
4982
      default:
4983
        break;
4984
      }
4985
 
4986
  /* Look for MIN or MAX.  */
4987
 
4988
  if ((! FLOAT_MODE_P (mode) || flag_unsafe_math_optimizations)
4989
      && comparison_p
4990
      && rtx_equal_p (XEXP (cond, 0), true_rtx)
4991
      && rtx_equal_p (XEXP (cond, 1), false_rtx)
4992
      && ! side_effects_p (cond))
4993
    switch (true_code)
4994
      {
4995
      case GE:
4996
      case GT:
4997
        return simplify_gen_binary (SMAX, mode, true_rtx, false_rtx);
4998
      case LE:
4999
      case LT:
5000
        return simplify_gen_binary (SMIN, mode, true_rtx, false_rtx);
5001
      case GEU:
5002
      case GTU:
5003
        return simplify_gen_binary (UMAX, mode, true_rtx, false_rtx);
5004
      case LEU:
5005
      case LTU:
5006
        return simplify_gen_binary (UMIN, mode, true_rtx, false_rtx);
5007
      default:
5008
        break;
5009
      }
5010
 
5011
  /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
5012
     second operand is zero, this can be done as (OP Z (mult COND C2)) where
5013
     C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
5014
     SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
5015
     We can do this kind of thing in some cases when STORE_FLAG_VALUE is
5016
     neither 1 or -1, but it isn't worth checking for.  */
5017
 
5018
  if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
5019
      && comparison_p
5020
      && GET_MODE_CLASS (mode) == MODE_INT
5021
      && ! side_effects_p (x))
5022
    {
5023
      rtx t = make_compound_operation (true_rtx, SET);
5024
      rtx f = make_compound_operation (false_rtx, SET);
5025
      rtx cond_op0 = XEXP (cond, 0);
5026
      rtx cond_op1 = XEXP (cond, 1);
5027
      enum rtx_code op = UNKNOWN, extend_op = UNKNOWN;
5028
      enum machine_mode m = mode;
5029
      rtx z = 0, c1 = NULL_RTX;
5030
 
5031
      if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
5032
           || GET_CODE (t) == IOR || GET_CODE (t) == XOR
5033
           || GET_CODE (t) == ASHIFT
5034
           || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
5035
          && rtx_equal_p (XEXP (t, 0), f))
5036
        c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
5037
 
5038
      /* If an identity-zero op is commutative, check whether there
5039
         would be a match if we swapped the operands.  */
5040
      else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
5041
                || GET_CODE (t) == XOR)
5042
               && rtx_equal_p (XEXP (t, 1), f))
5043
        c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
5044
      else if (GET_CODE (t) == SIGN_EXTEND
5045
               && (GET_CODE (XEXP (t, 0)) == PLUS
5046
                   || GET_CODE (XEXP (t, 0)) == MINUS
5047
                   || GET_CODE (XEXP (t, 0)) == IOR
5048
                   || GET_CODE (XEXP (t, 0)) == XOR
5049
                   || GET_CODE (XEXP (t, 0)) == ASHIFT
5050
                   || GET_CODE (XEXP (t, 0)) == LSHIFTRT
5051
                   || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
5052
               && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
5053
               && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
5054
               && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
5055
               && (num_sign_bit_copies (f, GET_MODE (f))
5056
                   > (unsigned int)
5057
                     (GET_MODE_BITSIZE (mode)
5058
                      - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
5059
        {
5060
          c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
5061
          extend_op = SIGN_EXTEND;
5062
          m = GET_MODE (XEXP (t, 0));
5063
        }
5064
      else if (GET_CODE (t) == SIGN_EXTEND
5065
               && (GET_CODE (XEXP (t, 0)) == PLUS
5066
                   || GET_CODE (XEXP (t, 0)) == IOR
5067
                   || GET_CODE (XEXP (t, 0)) == XOR)
5068
               && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
5069
               && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
5070
               && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
5071
               && (num_sign_bit_copies (f, GET_MODE (f))
5072
                   > (unsigned int)
5073
                     (GET_MODE_BITSIZE (mode)
5074
                      - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1))))))
5075
        {
5076
          c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
5077
          extend_op = SIGN_EXTEND;
5078
          m = GET_MODE (XEXP (t, 0));
5079
        }
5080
      else if (GET_CODE (t) == ZERO_EXTEND
5081
               && (GET_CODE (XEXP (t, 0)) == PLUS
5082
                   || GET_CODE (XEXP (t, 0)) == MINUS
5083
                   || GET_CODE (XEXP (t, 0)) == IOR
5084
                   || GET_CODE (XEXP (t, 0)) == XOR
5085
                   || GET_CODE (XEXP (t, 0)) == ASHIFT
5086
                   || GET_CODE (XEXP (t, 0)) == LSHIFTRT
5087
                   || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
5088
               && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
5089
               && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5090
               && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
5091
               && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
5092
               && ((nonzero_bits (f, GET_MODE (f))
5093
                    & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
5094
                   == 0))
5095
        {
5096
          c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
5097
          extend_op = ZERO_EXTEND;
5098
          m = GET_MODE (XEXP (t, 0));
5099
        }
5100
      else if (GET_CODE (t) == ZERO_EXTEND
5101
               && (GET_CODE (XEXP (t, 0)) == PLUS
5102
                   || GET_CODE (XEXP (t, 0)) == IOR
5103
                   || GET_CODE (XEXP (t, 0)) == XOR)
5104
               && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
5105
               && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5106
               && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
5107
               && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
5108
               && ((nonzero_bits (f, GET_MODE (f))
5109
                    & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
5110
                   == 0))
5111
        {
5112
          c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
5113
          extend_op = ZERO_EXTEND;
5114
          m = GET_MODE (XEXP (t, 0));
5115
        }
5116
 
5117
      if (z)
5118
        {
5119
          temp = subst (simplify_gen_relational (true_code, m, VOIDmode,
5120
                                                 cond_op0, cond_op1),
5121
                        pc_rtx, pc_rtx, 0, 0);
5122
          temp = simplify_gen_binary (MULT, m, temp,
5123
                                      simplify_gen_binary (MULT, m, c1,
5124
                                                           const_true_rtx));
5125
          temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
5126
          temp = simplify_gen_binary (op, m, gen_lowpart (m, z), temp);
5127
 
5128
          if (extend_op != UNKNOWN)
5129
            temp = simplify_gen_unary (extend_op, mode, temp, m);
5130
 
5131
          return temp;
5132
        }
5133
    }
5134
 
5135
  /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
5136
     1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
5137
     negation of a single bit, we can convert this operation to a shift.  We
5138
     can actually do this more generally, but it doesn't seem worth it.  */
5139
 
5140
  if (true_code == NE && XEXP (cond, 1) == const0_rtx
5141
      && false_rtx == const0_rtx && GET_CODE (true_rtx) == CONST_INT
5142
      && ((1 == nonzero_bits (XEXP (cond, 0), mode)
5143
           && (i = exact_log2 (INTVAL (true_rtx))) >= 0)
5144
          || ((num_sign_bit_copies (XEXP (cond, 0), mode)
5145
               == GET_MODE_BITSIZE (mode))
5146
              && (i = exact_log2 (-INTVAL (true_rtx))) >= 0)))
5147
    return
5148
      simplify_shift_const (NULL_RTX, ASHIFT, mode,
5149
                            gen_lowpart (mode, XEXP (cond, 0)), i);
5150
 
5151
  /* (IF_THEN_ELSE (NE REG 0) (0) (8)) is REG for nonzero_bits (REG) == 8.  */
5152
  if (true_code == NE && XEXP (cond, 1) == const0_rtx
5153
      && false_rtx == const0_rtx && GET_CODE (true_rtx) == CONST_INT
5154
      && GET_MODE (XEXP (cond, 0)) == mode
5155
      && (INTVAL (true_rtx) & GET_MODE_MASK (mode))
5156
          == nonzero_bits (XEXP (cond, 0), mode)
5157
      && (i = exact_log2 (INTVAL (true_rtx) & GET_MODE_MASK (mode))) >= 0)
5158
    return XEXP (cond, 0);
5159
 
5160
  return x;
5161
}
5162
 
5163
/* Simplify X, a SET expression.  Return the new expression.  */
5164
 
5165
static rtx
5166
simplify_set (rtx x)
5167
{
5168
  rtx src = SET_SRC (x);
5169
  rtx dest = SET_DEST (x);
5170
  enum machine_mode mode
5171
    = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
5172
  rtx other_insn;
5173
  rtx *cc_use;
5174
 
5175
  /* (set (pc) (return)) gets written as (return).  */
5176
  if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN)
5177
    return src;
5178
 
5179
  /* Now that we know for sure which bits of SRC we are using, see if we can
5180
     simplify the expression for the object knowing that we only need the
5181
     low-order bits.  */
5182
 
5183
  if (GET_MODE_CLASS (mode) == MODE_INT
5184
      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
5185
    {
5186
      src = force_to_mode (src, mode, ~(HOST_WIDE_INT) 0, 0);
5187
      SUBST (SET_SRC (x), src);
5188
    }
5189
 
5190
  /* If we are setting CC0 or if the source is a COMPARE, look for the use of
5191
     the comparison result and try to simplify it unless we already have used
5192
     undobuf.other_insn.  */
5193
  if ((GET_MODE_CLASS (mode) == MODE_CC
5194
       || GET_CODE (src) == COMPARE
5195
       || CC0_P (dest))
5196
      && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
5197
      && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
5198
      && COMPARISON_P (*cc_use)
5199
      && rtx_equal_p (XEXP (*cc_use, 0), dest))
5200
    {
5201
      enum rtx_code old_code = GET_CODE (*cc_use);
5202
      enum rtx_code new_code;
5203
      rtx op0, op1, tmp;
5204
      int other_changed = 0;
5205
      enum machine_mode compare_mode = GET_MODE (dest);
5206
 
5207
      if (GET_CODE (src) == COMPARE)
5208
        op0 = XEXP (src, 0), op1 = XEXP (src, 1);
5209
      else
5210
        op0 = src, op1 = CONST0_RTX (GET_MODE (src));
5211
 
5212
      tmp = simplify_relational_operation (old_code, compare_mode, VOIDmode,
5213
                                           op0, op1);
5214
      if (!tmp)
5215
        new_code = old_code;
5216
      else if (!CONSTANT_P (tmp))
5217
        {
5218
          new_code = GET_CODE (tmp);
5219
          op0 = XEXP (tmp, 0);
5220
          op1 = XEXP (tmp, 1);
5221
        }
5222
      else
5223
        {
5224
          rtx pat = PATTERN (other_insn);
5225
          undobuf.other_insn = other_insn;
5226
          SUBST (*cc_use, tmp);
5227
 
5228
          /* Attempt to simplify CC user.  */
5229
          if (GET_CODE (pat) == SET)
5230
            {
5231
              rtx new = simplify_rtx (SET_SRC (pat));
5232
              if (new != NULL_RTX)
5233
                SUBST (SET_SRC (pat), new);
5234
            }
5235
 
5236
          /* Convert X into a no-op move.  */
5237
          SUBST (SET_DEST (x), pc_rtx);
5238
          SUBST (SET_SRC (x), pc_rtx);
5239
          return x;
5240
        }
5241
 
5242
      /* Simplify our comparison, if possible.  */
5243
      new_code = simplify_comparison (new_code, &op0, &op1);
5244
 
5245
#ifdef SELECT_CC_MODE
5246
      /* If this machine has CC modes other than CCmode, check to see if we
5247
         need to use a different CC mode here.  */
5248
      if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
5249
        compare_mode = GET_MODE (op0);
5250
      else
5251
        compare_mode = SELECT_CC_MODE (new_code, op0, op1);
5252
 
5253
#ifndef HAVE_cc0
5254
      /* If the mode changed, we have to change SET_DEST, the mode in the
5255
         compare, and the mode in the place SET_DEST is used.  If SET_DEST is
5256
         a hard register, just build new versions with the proper mode.  If it
5257
         is a pseudo, we lose unless it is only time we set the pseudo, in
5258
         which case we can safely change its mode.  */
5259
      if (compare_mode != GET_MODE (dest))
5260
        {
5261
          if (can_change_dest_mode (dest, 0, compare_mode))
5262
            {
5263
              unsigned int regno = REGNO (dest);
5264
              rtx new_dest;
5265
 
5266
              if (regno < FIRST_PSEUDO_REGISTER)
5267
                new_dest = gen_rtx_REG (compare_mode, regno);
5268
              else
5269
                {
5270
                  SUBST_MODE (regno_reg_rtx[regno], compare_mode);
5271
                  new_dest = regno_reg_rtx[regno];
5272
                }
5273
 
5274
              SUBST (SET_DEST (x), new_dest);
5275
              SUBST (XEXP (*cc_use, 0), new_dest);
5276
              other_changed = 1;
5277
 
5278
              dest = new_dest;
5279
            }
5280
        }
5281
#endif  /* cc0 */
5282
#endif  /* SELECT_CC_MODE */
5283
 
5284
      /* If the code changed, we have to build a new comparison in
5285
         undobuf.other_insn.  */
5286
      if (new_code != old_code)
5287
        {
5288
          int other_changed_previously = other_changed;
5289
          unsigned HOST_WIDE_INT mask;
5290
 
5291
          SUBST (*cc_use, gen_rtx_fmt_ee (new_code, GET_MODE (*cc_use),
5292
                                          dest, const0_rtx));
5293
          other_changed = 1;
5294
 
5295
          /* If the only change we made was to change an EQ into an NE or
5296
             vice versa, OP0 has only one bit that might be nonzero, and OP1
5297
             is zero, check if changing the user of the condition code will
5298
             produce a valid insn.  If it won't, we can keep the original code
5299
             in that insn by surrounding our operation with an XOR.  */
5300
 
5301
          if (((old_code == NE && new_code == EQ)
5302
               || (old_code == EQ && new_code == NE))
5303
              && ! other_changed_previously && op1 == const0_rtx
5304
              && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
5305
              && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
5306
            {
5307
              rtx pat = PATTERN (other_insn), note = 0;
5308
 
5309
              if ((recog_for_combine (&pat, other_insn, &note) < 0
5310
                   && ! check_asm_operands (pat)))
5311
                {
5312
                  PUT_CODE (*cc_use, old_code);
5313
                  other_changed = 0;
5314
 
5315
                  op0 = simplify_gen_binary (XOR, GET_MODE (op0),
5316
                                             op0, GEN_INT (mask));
5317
                }
5318
            }
5319
        }
5320
 
5321
      if (other_changed)
5322
        undobuf.other_insn = other_insn;
5323
 
5324
#ifdef HAVE_cc0
5325
      /* If we are now comparing against zero, change our source if
5326
         needed.  If we do not use cc0, we always have a COMPARE.  */
5327
      if (op1 == const0_rtx && dest == cc0_rtx)
5328
        {
5329
          SUBST (SET_SRC (x), op0);
5330
          src = op0;
5331
        }
5332
      else
5333
#endif
5334
 
5335
      /* Otherwise, if we didn't previously have a COMPARE in the
5336
         correct mode, we need one.  */
5337
      if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode)
5338
        {
5339
          SUBST (SET_SRC (x), gen_rtx_COMPARE (compare_mode, op0, op1));
5340
          src = SET_SRC (x);
5341
        }
5342
      else if (GET_MODE (op0) == compare_mode && op1 == const0_rtx)
5343
        {
5344
          SUBST (SET_SRC (x), op0);
5345
          src = SET_SRC (x);
5346
        }
5347
      /* Otherwise, update the COMPARE if needed.  */
5348
      else if (XEXP (src, 0) != op0 || XEXP (src, 1) != op1)
5349
        {
5350
          SUBST (SET_SRC (x), gen_rtx_COMPARE (compare_mode, op0, op1));
5351
          src = SET_SRC (x);
5352
        }
5353
    }
5354
  else
5355
    {
5356
      /* Get SET_SRC in a form where we have placed back any
5357
         compound expressions.  Then do the checks below.  */
5358
      src = make_compound_operation (src, SET);
5359
      SUBST (SET_SRC (x), src);
5360
    }
5361
 
5362
  /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
5363
     and X being a REG or (subreg (reg)), we may be able to convert this to
5364
     (set (subreg:m2 x) (op)).
5365
 
5366
     We can always do this if M1 is narrower than M2 because that means that
5367
     we only care about the low bits of the result.
5368
 
5369
     However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
5370
     perform a narrower operation than requested since the high-order bits will
5371
     be undefined.  On machine where it is defined, this transformation is safe
5372
     as long as M1 and M2 have the same number of words.  */
5373
 
5374
  if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
5375
      && !OBJECT_P (SUBREG_REG (src))
5376
      && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
5377
           / UNITS_PER_WORD)
5378
          == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5379
               + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
5380
#ifndef WORD_REGISTER_OPERATIONS
5381
      && (GET_MODE_SIZE (GET_MODE (src))
5382
        < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
5383
#endif
5384
#ifdef CANNOT_CHANGE_MODE_CLASS
5385
      && ! (REG_P (dest) && REGNO (dest) < FIRST_PSEUDO_REGISTER
5386
            && REG_CANNOT_CHANGE_MODE_P (REGNO (dest),
5387
                                         GET_MODE (SUBREG_REG (src)),
5388
                                         GET_MODE (src)))
5389
#endif
5390
      && (REG_P (dest)
5391
          || (GET_CODE (dest) == SUBREG
5392
              && REG_P (SUBREG_REG (dest)))))
5393
    {
5394
      SUBST (SET_DEST (x),
5395
             gen_lowpart (GET_MODE (SUBREG_REG (src)),
5396
                                      dest));
5397
      SUBST (SET_SRC (x), SUBREG_REG (src));
5398
 
5399
      src = SET_SRC (x), dest = SET_DEST (x);
5400
    }
5401
 
5402
#ifdef HAVE_cc0
5403
  /* If we have (set (cc0) (subreg ...)), we try to remove the subreg
5404
     in SRC.  */
5405
  if (dest == cc0_rtx
5406
      && GET_CODE (src) == SUBREG
5407
      && subreg_lowpart_p (src)
5408
      && (GET_MODE_BITSIZE (GET_MODE (src))
5409
          < GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (src)))))
5410
    {
5411
      rtx inner = SUBREG_REG (src);
5412
      enum machine_mode inner_mode = GET_MODE (inner);
5413
 
5414
      /* Here we make sure that we don't have a sign bit on.  */
5415
      if (GET_MODE_BITSIZE (inner_mode) <= HOST_BITS_PER_WIDE_INT
5416
          && (nonzero_bits (inner, inner_mode)
5417
              < ((unsigned HOST_WIDE_INT) 1
5418
                 << (GET_MODE_BITSIZE (GET_MODE (src)) - 1))))
5419
        {
5420
          SUBST (SET_SRC (x), inner);
5421
          src = SET_SRC (x);
5422
        }
5423
    }
5424
#endif
5425
 
5426
#ifdef LOAD_EXTEND_OP
5427
  /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
5428
     would require a paradoxical subreg.  Replace the subreg with a
5429
     zero_extend to avoid the reload that would otherwise be required.  */
5430
 
5431
  if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
5432
      && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != UNKNOWN
5433
      && SUBREG_BYTE (src) == 0
5434
      && (GET_MODE_SIZE (GET_MODE (src))
5435
          > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
5436
      && MEM_P (SUBREG_REG (src)))
5437
    {
5438
      SUBST (SET_SRC (x),
5439
             gen_rtx_fmt_e (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
5440
                            GET_MODE (src), SUBREG_REG (src)));
5441
 
5442
      src = SET_SRC (x);
5443
    }
5444
#endif
5445
 
5446
  /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
5447
     are comparing an item known to be 0 or -1 against 0, use a logical
5448
     operation instead. Check for one of the arms being an IOR of the other
5449
     arm with some value.  We compute three terms to be IOR'ed together.  In
5450
     practice, at most two will be nonzero.  Then we do the IOR's.  */
5451
 
5452
  if (GET_CODE (dest) != PC
5453
      && GET_CODE (src) == IF_THEN_ELSE
5454
      && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
5455
      && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
5456
      && XEXP (XEXP (src, 0), 1) == const0_rtx
5457
      && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0))
5458
#ifdef HAVE_conditional_move
5459
      && ! can_conditionally_move_p (GET_MODE (src))
5460
#endif
5461
      && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
5462
                               GET_MODE (XEXP (XEXP (src, 0), 0)))
5463
          == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0))))
5464
      && ! side_effects_p (src))
5465
    {
5466
      rtx true_rtx = (GET_CODE (XEXP (src, 0)) == NE
5467
                      ? XEXP (src, 1) : XEXP (src, 2));
5468
      rtx false_rtx = (GET_CODE (XEXP (src, 0)) == NE
5469
                   ? XEXP (src, 2) : XEXP (src, 1));
5470
      rtx term1 = const0_rtx, term2, term3;
5471
 
5472
      if (GET_CODE (true_rtx) == IOR
5473
          && rtx_equal_p (XEXP (true_rtx, 0), false_rtx))
5474
        term1 = false_rtx, true_rtx = XEXP (true_rtx, 1), false_rtx = const0_rtx;
5475
      else if (GET_CODE (true_rtx) == IOR
5476
               && rtx_equal_p (XEXP (true_rtx, 1), false_rtx))
5477
        term1 = false_rtx, true_rtx = XEXP (true_rtx, 0), false_rtx = const0_rtx;
5478
      else if (GET_CODE (false_rtx) == IOR
5479
               && rtx_equal_p (XEXP (false_rtx, 0), true_rtx))
5480
        term1 = true_rtx, false_rtx = XEXP (false_rtx, 1), true_rtx = const0_rtx;
5481
      else if (GET_CODE (false_rtx) == IOR
5482
               && rtx_equal_p (XEXP (false_rtx, 1), true_rtx))
5483
        term1 = true_rtx, false_rtx = XEXP (false_rtx, 0), true_rtx = const0_rtx;
5484
 
5485
      term2 = simplify_gen_binary (AND, GET_MODE (src),
5486
                                   XEXP (XEXP (src, 0), 0), true_rtx);
5487
      term3 = simplify_gen_binary (AND, GET_MODE (src),
5488
                                   simplify_gen_unary (NOT, GET_MODE (src),
5489
                                                       XEXP (XEXP (src, 0), 0),
5490
                                                       GET_MODE (src)),
5491
                                   false_rtx);
5492
 
5493
      SUBST (SET_SRC (x),
5494
             simplify_gen_binary (IOR, GET_MODE (src),
5495
                                  simplify_gen_binary (IOR, GET_MODE (src),
5496
                                                       term1, term2),
5497
                                  term3));
5498
 
5499
      src = SET_SRC (x);
5500
    }
5501
 
5502
  /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
5503
     whole thing fail.  */
5504
  if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
5505
    return src;
5506
  else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
5507
    return dest;
5508
  else
5509
    /* Convert this into a field assignment operation, if possible.  */
5510
    return make_field_assignment (x);
5511
}
5512
 
5513
/* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
5514
   result.  */
5515
 
5516
static rtx
5517
simplify_logical (rtx x)
5518
{
5519
  enum machine_mode mode = GET_MODE (x);
5520
  rtx op0 = XEXP (x, 0);
5521
  rtx op1 = XEXP (x, 1);
5522
 
5523
  switch (GET_CODE (x))
5524
    {
5525
    case AND:
5526
      /* We can call simplify_and_const_int only if we don't lose
5527
         any (sign) bits when converting INTVAL (op1) to
5528
         "unsigned HOST_WIDE_INT".  */
5529
      if (GET_CODE (op1) == CONST_INT
5530
          && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5531
              || INTVAL (op1) > 0))
5532
        {
5533
          x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
5534
          if (GET_CODE (x) != AND)
5535
            return x;
5536
 
5537
          op0 = XEXP (x, 0);
5538
          op1 = XEXP (x, 1);
5539
        }
5540
 
5541
      /* If we have any of (and (ior A B) C) or (and (xor A B) C),
5542
         apply the distributive law and then the inverse distributive
5543
         law to see if things simplify.  */
5544
      if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
5545
        {
5546
          rtx result = distribute_and_simplify_rtx (x, 0);
5547
          if (result)
5548
            return result;
5549
        }
5550
      if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
5551
        {
5552
          rtx result = distribute_and_simplify_rtx (x, 1);
5553
          if (result)
5554
            return result;
5555
        }
5556
      break;
5557
 
5558
    case IOR:
5559
      /* If we have (ior (and A B) C), apply the distributive law and then
5560
         the inverse distributive law to see if things simplify.  */
5561
 
5562
      if (GET_CODE (op0) == AND)
5563
        {
5564
          rtx result = distribute_and_simplify_rtx (x, 0);
5565
          if (result)
5566
            return result;
5567
        }
5568
 
5569
      if (GET_CODE (op1) == AND)
5570
        {
5571
          rtx result = distribute_and_simplify_rtx (x, 1);
5572
          if (result)
5573
            return result;
5574
        }
5575
      break;
5576
 
5577
    default:
5578
      gcc_unreachable ();
5579
    }
5580
 
5581
  return x;
5582
}
5583
 
5584
/* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
5585
   operations" because they can be replaced with two more basic operations.
5586
   ZERO_EXTEND is also considered "compound" because it can be replaced with
5587
   an AND operation, which is simpler, though only one operation.
5588
 
5589
   The function expand_compound_operation is called with an rtx expression
5590
   and will convert it to the appropriate shifts and AND operations,
5591
   simplifying at each stage.
5592
 
5593
   The function make_compound_operation is called to convert an expression
5594
   consisting of shifts and ANDs into the equivalent compound expression.
5595
   It is the inverse of this function, loosely speaking.  */
5596
 
5597
static rtx
5598
expand_compound_operation (rtx x)
5599
{
5600
  unsigned HOST_WIDE_INT pos = 0, len;
5601
  int unsignedp = 0;
5602
  unsigned int modewidth;
5603
  rtx tem;
5604
 
5605
  switch (GET_CODE (x))
5606
    {
5607
    case ZERO_EXTEND:
5608
      unsignedp = 1;
5609
    case SIGN_EXTEND:
5610
      /* We can't necessarily use a const_int for a multiword mode;
5611
         it depends on implicitly extending the value.
5612
         Since we don't know the right way to extend it,
5613
         we can't tell whether the implicit way is right.
5614
 
5615
         Even for a mode that is no wider than a const_int,
5616
         we can't win, because we need to sign extend one of its bits through
5617
         the rest of it, and we don't know which bit.  */
5618
      if (GET_CODE (XEXP (x, 0)) == CONST_INT)
5619
        return x;
5620
 
5621
      /* Return if (subreg:MODE FROM 0) is not a safe replacement for
5622
         (zero_extend:MODE FROM) or (sign_extend:MODE FROM).  It is for any MEM
5623
         because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
5624
         reloaded. If not for that, MEM's would very rarely be safe.
5625
 
5626
         Reject MODEs bigger than a word, because we might not be able
5627
         to reference a two-register group starting with an arbitrary register
5628
         (and currently gen_lowpart might crash for a SUBREG).  */
5629
 
5630
      if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD)
5631
        return x;
5632
 
5633
      /* Reject MODEs that aren't scalar integers because turning vector
5634
         or complex modes into shifts causes problems.  */
5635
 
5636
      if (! SCALAR_INT_MODE_P (GET_MODE (XEXP (x, 0))))
5637
        return x;
5638
 
5639
      len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
5640
      /* If the inner object has VOIDmode (the only way this can happen
5641
         is if it is an ASM_OPERANDS), we can't do anything since we don't
5642
         know how much masking to do.  */
5643
      if (len == 0)
5644
        return x;
5645
 
5646
      break;
5647
 
5648
    case ZERO_EXTRACT:
5649
      unsignedp = 1;
5650
 
5651
      /* ... fall through ...  */
5652
 
5653
    case SIGN_EXTRACT:
5654
      /* If the operand is a CLOBBER, just return it.  */
5655
      if (GET_CODE (XEXP (x, 0)) == CLOBBER)
5656
        return XEXP (x, 0);
5657
 
5658
      if (GET_CODE (XEXP (x, 1)) != CONST_INT
5659
          || GET_CODE (XEXP (x, 2)) != CONST_INT
5660
          || GET_MODE (XEXP (x, 0)) == VOIDmode)
5661
        return x;
5662
 
5663
      /* Reject MODEs that aren't scalar integers because turning vector
5664
         or complex modes into shifts causes problems.  */
5665
 
5666
      if (! SCALAR_INT_MODE_P (GET_MODE (XEXP (x, 0))))
5667
        return x;
5668
 
5669
      len = INTVAL (XEXP (x, 1));
5670
      pos = INTVAL (XEXP (x, 2));
5671
 
5672
      /* This should stay within the object being extracted, fail otherwise.  */
5673
      if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
5674
        return x;
5675
 
5676
      if (BITS_BIG_ENDIAN)
5677
        pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
5678
 
5679
      break;
5680
 
5681
    default:
5682
      return x;
5683
    }
5684
  /* Convert sign extension to zero extension, if we know that the high
5685
     bit is not set, as this is easier to optimize.  It will be converted
5686
     back to cheaper alternative in make_extraction.  */
5687
  if (GET_CODE (x) == SIGN_EXTEND
5688
      && (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5689
          && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
5690
                & ~(((unsigned HOST_WIDE_INT)
5691
                      GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
5692
                     >> 1))
5693
               == 0)))
5694
    {
5695
      rtx temp = gen_rtx_ZERO_EXTEND (GET_MODE (x), XEXP (x, 0));
5696
      rtx temp2 = expand_compound_operation (temp);
5697
 
5698
      /* Make sure this is a profitable operation.  */
5699
      if (rtx_cost (x, SET) > rtx_cost (temp2, SET))
5700
       return temp2;
5701
      else if (rtx_cost (x, SET) > rtx_cost (temp, SET))
5702
       return temp;
5703
      else
5704
       return x;
5705
    }
5706
 
5707
  /* We can optimize some special cases of ZERO_EXTEND.  */
5708
  if (GET_CODE (x) == ZERO_EXTEND)
5709
    {
5710
      /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we
5711
         know that the last value didn't have any inappropriate bits
5712
         set.  */
5713
      if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5714
          && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5715
          && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5716
          && (nonzero_bits (XEXP (XEXP (x, 0), 0), GET_MODE (x))
5717
              & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5718
        return XEXP (XEXP (x, 0), 0);
5719
 
5720
      /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)).  */
5721
      if (GET_CODE (XEXP (x, 0)) == SUBREG
5722
          && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5723
          && subreg_lowpart_p (XEXP (x, 0))
5724
          && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5725
          && (nonzero_bits (SUBREG_REG (XEXP (x, 0)), GET_MODE (x))
5726
              & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5727
        return SUBREG_REG (XEXP (x, 0));
5728
 
5729
      /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
5730
         is a comparison and STORE_FLAG_VALUE permits.  This is like
5731
         the first case, but it works even when GET_MODE (x) is larger
5732
         than HOST_WIDE_INT.  */
5733
      if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5734
          && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5735
          && COMPARISON_P (XEXP (XEXP (x, 0), 0))
5736
          && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5737
              <= HOST_BITS_PER_WIDE_INT)
5738
          && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5739
              & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5740
        return XEXP (XEXP (x, 0), 0);
5741
 
5742
      /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)).  */
5743
      if (GET_CODE (XEXP (x, 0)) == SUBREG
5744
          && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5745
          && subreg_lowpart_p (XEXP (x, 0))
5746
          && COMPARISON_P (SUBREG_REG (XEXP (x, 0)))
5747
          && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5748
              <= HOST_BITS_PER_WIDE_INT)
5749
          && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5750
              & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5751
        return SUBREG_REG (XEXP (x, 0));
5752
 
5753
    }
5754
 
5755
  /* If we reach here, we want to return a pair of shifts.  The inner
5756
     shift is a left shift of BITSIZE - POS - LEN bits.  The outer
5757
     shift is a right shift of BITSIZE - LEN bits.  It is arithmetic or
5758
     logical depending on the value of UNSIGNEDP.
5759
 
5760
     If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
5761
     converted into an AND of a shift.
5762
 
5763
     We must check for the case where the left shift would have a negative
5764
     count.  This can happen in a case like (x >> 31) & 255 on machines
5765
     that can't shift by a constant.  On those machines, we would first
5766
     combine the shift with the AND to produce a variable-position
5767
     extraction.  Then the constant of 31 would be substituted in to produce
5768
     a such a position.  */
5769
 
5770
  modewidth = GET_MODE_BITSIZE (GET_MODE (x));
5771
  if (modewidth + len >= pos)
5772
    {
5773
      enum machine_mode mode = GET_MODE (x);
5774
      tem = gen_lowpart (mode, XEXP (x, 0));
5775
      if (!tem || GET_CODE (tem) == CLOBBER)
5776
        return x;
5777
      tem = simplify_shift_const (NULL_RTX, ASHIFT, mode,
5778
                                  tem, modewidth - pos - len);
5779
      tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
5780
                                  mode, tem, modewidth - len);
5781
    }
5782
  else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
5783
    tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
5784
                                  simplify_shift_const (NULL_RTX, LSHIFTRT,
5785
                                                        GET_MODE (x),
5786
                                                        XEXP (x, 0), pos),
5787
                                  ((HOST_WIDE_INT) 1 << len) - 1);
5788
  else
5789
    /* Any other cases we can't handle.  */
5790
    return x;
5791
 
5792
  /* If we couldn't do this for some reason, return the original
5793
     expression.  */
5794
  if (GET_CODE (tem) == CLOBBER)
5795
    return x;
5796
 
5797
  return tem;
5798
}
5799
 
5800
/* X is a SET which contains an assignment of one object into
5801
   a part of another (such as a bit-field assignment, STRICT_LOW_PART,
5802
   or certain SUBREGS). If possible, convert it into a series of
5803
   logical operations.
5804
 
5805
   We half-heartedly support variable positions, but do not at all
5806
   support variable lengths.  */
5807
 
5808
static rtx
5809
expand_field_assignment (rtx x)
5810
{
5811
  rtx inner;
5812
  rtx pos;                      /* Always counts from low bit.  */
5813
  int len;
5814
  rtx mask, cleared, masked;
5815
  enum machine_mode compute_mode;
5816
 
5817
  /* Loop until we find something we can't simplify.  */
5818
  while (1)
5819
    {
5820
      if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
5821
          && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
5822
        {
5823
          inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
5824
          len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
5825
          pos = GEN_INT (subreg_lsb (XEXP (SET_DEST (x), 0)));
5826
        }
5827
      else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
5828
               && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
5829
        {
5830
          inner = XEXP (SET_DEST (x), 0);
5831
          len = INTVAL (XEXP (SET_DEST (x), 1));
5832
          pos = XEXP (SET_DEST (x), 2);
5833
 
5834
          /* A constant position should stay within the width of INNER.  */
5835
          if (GET_CODE (pos) == CONST_INT
5836
              && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
5837
            break;
5838
 
5839
          if (BITS_BIG_ENDIAN)
5840
            {
5841
              if (GET_CODE (pos) == CONST_INT)
5842
                pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
5843
                               - INTVAL (pos));
5844
              else if (GET_CODE (pos) == MINUS
5845
                       && GET_CODE (XEXP (pos, 1)) == CONST_INT
5846
                       && (INTVAL (XEXP (pos, 1))
5847
                           == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
5848
                /* If position is ADJUST - X, new position is X.  */
5849
                pos = XEXP (pos, 0);
5850
              else
5851
                pos = simplify_gen_binary (MINUS, GET_MODE (pos),
5852
                                           GEN_INT (GET_MODE_BITSIZE (
5853
                                                    GET_MODE (inner))
5854
                                                    - len),
5855
                                           pos);
5856
            }
5857
        }
5858
 
5859
      /* A SUBREG between two modes that occupy the same numbers of words
5860
         can be done by moving the SUBREG to the source.  */
5861
      else if (GET_CODE (SET_DEST (x)) == SUBREG
5862
               /* We need SUBREGs to compute nonzero_bits properly.  */
5863
               && nonzero_sign_valid
5864
               && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
5865
                     + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
5866
                   == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
5867
                        + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
5868
        {
5869
          x = gen_rtx_SET (VOIDmode, SUBREG_REG (SET_DEST (x)),
5870
                           gen_lowpart
5871
                           (GET_MODE (SUBREG_REG (SET_DEST (x))),
5872
                            SET_SRC (x)));
5873
          continue;
5874
        }
5875
      else
5876
        break;
5877
 
5878
      while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
5879
        inner = SUBREG_REG (inner);
5880
 
5881
      compute_mode = GET_MODE (inner);
5882
 
5883
      /* Don't attempt bitwise arithmetic on non scalar integer modes.  */
5884
      if (! SCALAR_INT_MODE_P (compute_mode))
5885
        {
5886
          enum machine_mode imode;
5887
 
5888
          /* Don't do anything for vector or complex integral types.  */
5889
          if (! FLOAT_MODE_P (compute_mode))
5890
            break;
5891
 
5892
          /* Try to find an integral mode to pun with.  */
5893
          imode = mode_for_size (GET_MODE_BITSIZE (compute_mode), MODE_INT, 0);
5894
          if (imode == BLKmode)
5895
            break;
5896
 
5897
          compute_mode = imode;
5898
          inner = gen_lowpart (imode, inner);
5899
        }
5900
 
5901
      /* Compute a mask of LEN bits, if we can do this on the host machine.  */
5902
      if (len >= HOST_BITS_PER_WIDE_INT)
5903
        break;
5904
 
5905
      /* Now compute the equivalent expression.  Make a copy of INNER
5906
         for the SET_DEST in case it is a MEM into which we will substitute;
5907
         we don't want shared RTL in that case.  */
5908
      mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
5909
      cleared = simplify_gen_binary (AND, compute_mode,
5910
                                     simplify_gen_unary (NOT, compute_mode,
5911
                                       simplify_gen_binary (ASHIFT,
5912
                                                            compute_mode,
5913
                                                            mask, pos),
5914
                                       compute_mode),
5915
                                     inner);
5916
      masked = simplify_gen_binary (ASHIFT, compute_mode,
5917
                                    simplify_gen_binary (
5918
                                      AND, compute_mode,
5919
                                      gen_lowpart (compute_mode, SET_SRC (x)),
5920
                                      mask),
5921
                                    pos);
5922
 
5923
      x = gen_rtx_SET (VOIDmode, copy_rtx (inner),
5924
                       simplify_gen_binary (IOR, compute_mode,
5925
                                            cleared, masked));
5926
    }
5927
 
5928
  return x;
5929
}
5930
 
5931
/* Return an RTX for a reference to LEN bits of INNER.  If POS_RTX is nonzero,
5932
   it is an RTX that represents a variable starting position; otherwise,
5933
   POS is the (constant) starting bit position (counted from the LSB).
5934
 
5935
   UNSIGNEDP is nonzero for an unsigned reference and zero for a
5936
   signed reference.
5937
 
5938
   IN_DEST is nonzero if this is a reference in the destination of a
5939
   SET.  This is used when a ZERO_ or SIGN_EXTRACT isn't needed.  If nonzero,
5940
   a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
5941
   be used.
5942
 
5943
   IN_COMPARE is nonzero if we are in a COMPARE.  This means that a
5944
   ZERO_EXTRACT should be built even for bits starting at bit 0.
5945
 
5946
   MODE is the desired mode of the result (if IN_DEST == 0).
5947
 
5948
   The result is an RTX for the extraction or NULL_RTX if the target
5949
   can't handle it.  */
5950
 
5951
static rtx
5952
make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos,
5953
                 rtx pos_rtx, unsigned HOST_WIDE_INT len, int unsignedp,
5954
                 int in_dest, int in_compare)
5955
{
5956
  /* This mode describes the size of the storage area
5957
     to fetch the overall value from.  Within that, we
5958
     ignore the POS lowest bits, etc.  */
5959
  enum machine_mode is_mode = GET_MODE (inner);
5960
  enum machine_mode inner_mode;
5961
  enum machine_mode wanted_inner_mode;
5962
  enum machine_mode wanted_inner_reg_mode = word_mode;
5963
  enum machine_mode pos_mode = word_mode;
5964
  enum machine_mode extraction_mode = word_mode;
5965
  enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
5966
  rtx new = 0;
5967
  rtx orig_pos_rtx = pos_rtx;
5968
  HOST_WIDE_INT orig_pos;
5969
 
5970
  if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
5971
    {
5972
      /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
5973
         consider just the QI as the memory to extract from.
5974
         The subreg adds or removes high bits; its mode is
5975
         irrelevant to the meaning of this extraction,
5976
         since POS and LEN count from the lsb.  */
5977
      if (MEM_P (SUBREG_REG (inner)))
5978
        is_mode = GET_MODE (SUBREG_REG (inner));
5979
      inner = SUBREG_REG (inner);
5980
    }
5981
  else if (GET_CODE (inner) == ASHIFT
5982
           && GET_CODE (XEXP (inner, 1)) == CONST_INT
5983
           && pos_rtx == 0 && pos == 0
5984
           && len > (unsigned HOST_WIDE_INT) INTVAL (XEXP (inner, 1)))
5985
    {
5986
      /* We're extracting the least significant bits of an rtx
5987
         (ashift X (const_int C)), where LEN > C.  Extract the
5988
         least significant (LEN - C) bits of X, giving an rtx
5989
         whose mode is MODE, then shift it left C times.  */
5990
      new = make_extraction (mode, XEXP (inner, 0),
5991
                             0, 0, len - INTVAL (XEXP (inner, 1)),
5992
                             unsignedp, in_dest, in_compare);
5993
      if (new != 0)
5994
        return gen_rtx_ASHIFT (mode, new, XEXP (inner, 1));
5995
    }
5996
 
5997
  inner_mode = GET_MODE (inner);
5998
 
5999
  if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
6000
    pos = INTVAL (pos_rtx), pos_rtx = 0;
6001
 
6002
  /* See if this can be done without an extraction.  We never can if the
6003
     width of the field is not the same as that of some integer mode. For
6004
     registers, we can only avoid the extraction if the position is at the
6005
     low-order bit and this is either not in the destination or we have the
6006
     appropriate STRICT_LOW_PART operation available.
6007
 
6008
     For MEM, we can avoid an extract if the field starts on an appropriate
6009
     boundary and we can change the mode of the memory reference.  */
6010
 
6011
  if (tmode != BLKmode
6012
      && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0
6013
           && !MEM_P (inner)
6014
           && (inner_mode == tmode
6015
               || !REG_P (inner)
6016
               || TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (tmode),
6017
                                         GET_MODE_BITSIZE (inner_mode))
6018
               || reg_truncated_to_mode (tmode, inner))
6019
           && (! in_dest
6020
               || (REG_P (inner)
6021
                   && have_insn_for (STRICT_LOW_PART, tmode))))
6022
          || (MEM_P (inner) && pos_rtx == 0
6023
              && (pos
6024
                  % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
6025
                     : BITS_PER_UNIT)) == 0
6026
              /* We can't do this if we are widening INNER_MODE (it
6027
                 may not be aligned, for one thing).  */
6028
              && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
6029
              && (inner_mode == tmode
6030
                  || (! mode_dependent_address_p (XEXP (inner, 0))
6031
                      && ! MEM_VOLATILE_P (inner))))))
6032
    {
6033
      /* If INNER is a MEM, make a new MEM that encompasses just the desired
6034
         field.  If the original and current mode are the same, we need not
6035
         adjust the offset.  Otherwise, we do if bytes big endian.
6036
 
6037
         If INNER is not a MEM, get a piece consisting of just the field
6038
         of interest (in this case POS % BITS_PER_WORD must be 0).  */
6039
 
6040
      if (MEM_P (inner))
6041
        {
6042
          HOST_WIDE_INT offset;
6043
 
6044
          /* POS counts from lsb, but make OFFSET count in memory order.  */
6045
          if (BYTES_BIG_ENDIAN)
6046
            offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
6047
          else
6048
            offset = pos / BITS_PER_UNIT;
6049
 
6050
          new = adjust_address_nv (inner, tmode, offset);
6051
        }
6052
      else if (REG_P (inner))
6053
        {
6054
          if (tmode != inner_mode)
6055
            {
6056
              /* We can't call gen_lowpart in a DEST since we
6057
                 always want a SUBREG (see below) and it would sometimes
6058
                 return a new hard register.  */
6059
              if (pos || in_dest)
6060
                {
6061
                  HOST_WIDE_INT final_word = pos / BITS_PER_WORD;
6062
 
6063
                  if (WORDS_BIG_ENDIAN
6064
                      && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD)
6065
                    final_word = ((GET_MODE_SIZE (inner_mode)
6066
                                   - GET_MODE_SIZE (tmode))
6067
                                  / UNITS_PER_WORD) - final_word;
6068
 
6069
                  final_word *= UNITS_PER_WORD;
6070
                  if (BYTES_BIG_ENDIAN &&
6071
                      GET_MODE_SIZE (inner_mode) > GET_MODE_SIZE (tmode))
6072
                    final_word += (GET_MODE_SIZE (inner_mode)
6073
                                   - GET_MODE_SIZE (tmode)) % UNITS_PER_WORD;
6074
 
6075
                  /* Avoid creating invalid subregs, for example when
6076
                     simplifying (x>>32)&255.  */
6077
                  if (!validate_subreg (tmode, inner_mode, inner, final_word))
6078
                    return NULL_RTX;
6079
 
6080
                  new = gen_rtx_SUBREG (tmode, inner, final_word);
6081
                }
6082
              else
6083
                new = gen_lowpart (tmode, inner);
6084
            }
6085
          else
6086
            new = inner;
6087
        }
6088
      else
6089
        new = force_to_mode (inner, tmode,
6090
                             len >= HOST_BITS_PER_WIDE_INT
6091
                             ? ~(unsigned HOST_WIDE_INT) 0
6092
                             : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
6093
                             0);
6094
 
6095
      /* If this extraction is going into the destination of a SET,
6096
         make a STRICT_LOW_PART unless we made a MEM.  */
6097
 
6098
      if (in_dest)
6099
        return (MEM_P (new) ? new
6100
                : (GET_CODE (new) != SUBREG
6101
                   ? gen_rtx_CLOBBER (tmode, const0_rtx)
6102
                   : gen_rtx_STRICT_LOW_PART (VOIDmode, new)));
6103
 
6104
      if (mode == tmode)
6105
        return new;
6106
 
6107
      if (GET_CODE (new) == CONST_INT)
6108
        return gen_int_mode (INTVAL (new), mode);
6109
 
6110
      /* If we know that no extraneous bits are set, and that the high
6111
         bit is not set, convert the extraction to the cheaper of
6112
         sign and zero extension, that are equivalent in these cases.  */
6113
      if (flag_expensive_optimizations
6114
          && (GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT
6115
              && ((nonzero_bits (new, tmode)
6116
                   & ~(((unsigned HOST_WIDE_INT)
6117
                        GET_MODE_MASK (tmode))
6118
                       >> 1))
6119
                  == 0)))
6120
        {
6121
          rtx temp = gen_rtx_ZERO_EXTEND (mode, new);
6122
          rtx temp1 = gen_rtx_SIGN_EXTEND (mode, new);
6123
 
6124
          /* Prefer ZERO_EXTENSION, since it gives more information to
6125
             backends.  */
6126
          if (rtx_cost (temp, SET) <= rtx_cost (temp1, SET))
6127
            return temp;
6128
          return temp1;
6129
        }
6130
 
6131
      /* Otherwise, sign- or zero-extend unless we already are in the
6132
         proper mode.  */
6133
 
6134
      return (gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
6135
                             mode, new));
6136
    }
6137
 
6138
  /* Unless this is a COMPARE or we have a funny memory reference,
6139
     don't do anything with zero-extending field extracts starting at
6140
     the low-order bit since they are simple AND operations.  */
6141
  if (pos_rtx == 0 && pos == 0 && ! in_dest
6142
      && ! in_compare && unsignedp)
6143
    return 0;
6144
 
6145
  /* Unless INNER is not MEM, reject this if we would be spanning bytes or
6146
     if the position is not a constant and the length is not 1.  In all
6147
     other cases, we would only be going outside our object in cases when
6148
     an original shift would have been undefined.  */
6149
  if (MEM_P (inner)
6150
      && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
6151
          || (pos_rtx != 0 && len != 1)))
6152
    return 0;
6153
 
6154
  /* Get the mode to use should INNER not be a MEM, the mode for the position,
6155
     and the mode for the result.  */
6156
  if (in_dest && mode_for_extraction (EP_insv, -1) != MAX_MACHINE_MODE)
6157
    {
6158
      wanted_inner_reg_mode = mode_for_extraction (EP_insv, 0);
6159
      pos_mode = mode_for_extraction (EP_insv, 2);
6160
      extraction_mode = mode_for_extraction (EP_insv, 3);
6161
    }
6162
 
6163
  if (! in_dest && unsignedp
6164
      && mode_for_extraction (EP_extzv, -1) != MAX_MACHINE_MODE)
6165
    {
6166
      wanted_inner_reg_mode = mode_for_extraction (EP_extzv, 1);
6167
      pos_mode = mode_for_extraction (EP_extzv, 3);
6168
      extraction_mode = mode_for_extraction (EP_extzv, 0);
6169
    }
6170
 
6171
  if (! in_dest && ! unsignedp
6172
      && mode_for_extraction (EP_extv, -1) != MAX_MACHINE_MODE)
6173
    {
6174
      wanted_inner_reg_mode = mode_for_extraction (EP_extv, 1);
6175
      pos_mode = mode_for_extraction (EP_extv, 3);
6176
      extraction_mode = mode_for_extraction (EP_extv, 0);
6177
    }
6178
 
6179
  /* Never narrow an object, since that might not be safe.  */
6180
 
6181
  if (mode != VOIDmode
6182
      && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
6183
    extraction_mode = mode;
6184
 
6185
  if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
6186
      && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
6187
    pos_mode = GET_MODE (pos_rtx);
6188
 
6189
  /* If this is not from memory, the desired mode is the preferred mode
6190
     for an extraction pattern's first input operand, or word_mode if there
6191
     is none.  */
6192
  if (!MEM_P (inner))
6193
    wanted_inner_mode = wanted_inner_reg_mode;
6194
  else
6195
    {
6196
      /* Be careful not to go beyond the extracted object and maintain the
6197
         natural alignment of the memory.  */
6198
      wanted_inner_mode = smallest_mode_for_size (len, MODE_INT);
6199
      while (pos % GET_MODE_BITSIZE (wanted_inner_mode) + len
6200
             > GET_MODE_BITSIZE (wanted_inner_mode))
6201
        {
6202
          wanted_inner_mode = GET_MODE_WIDER_MODE (wanted_inner_mode);
6203
          gcc_assert (wanted_inner_mode != VOIDmode);
6204
        }
6205
 
6206
      /* If we have to change the mode of memory and cannot, the desired mode
6207
         is EXTRACTION_MODE.  */
6208
      if (inner_mode != wanted_inner_mode
6209
          && (mode_dependent_address_p (XEXP (inner, 0))
6210
              || MEM_VOLATILE_P (inner)
6211
              || pos_rtx))
6212
        wanted_inner_mode = extraction_mode;
6213
    }
6214
 
6215
  orig_pos = pos;
6216
 
6217
  if (BITS_BIG_ENDIAN)
6218
    {
6219
      /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to
6220
         BITS_BIG_ENDIAN style.  If position is constant, compute new
6221
         position.  Otherwise, build subtraction.
6222
         Note that POS is relative to the mode of the original argument.
6223
         If it's a MEM we need to recompute POS relative to that.
6224
         However, if we're extracting from (or inserting into) a register,
6225
         we want to recompute POS relative to wanted_inner_mode.  */
6226
      int width = (MEM_P (inner)
6227
                   ? GET_MODE_BITSIZE (is_mode)
6228
                   : GET_MODE_BITSIZE (wanted_inner_mode));
6229
 
6230
      if (pos_rtx == 0)
6231
        pos = width - len - pos;
6232
      else
6233
        pos_rtx
6234
          = gen_rtx_MINUS (GET_MODE (pos_rtx), GEN_INT (width - len), pos_rtx);
6235
      /* POS may be less than 0 now, but we check for that below.
6236
         Note that it can only be less than 0 if !MEM_P (inner).  */
6237
    }
6238
 
6239
  /* If INNER has a wider mode, and this is a constant extraction, try to
6240
     make it smaller and adjust the byte to point to the byte containing
6241
     the value.  */
6242
  if (wanted_inner_mode != VOIDmode
6243
      && inner_mode != wanted_inner_mode
6244
      && ! pos_rtx
6245
      && GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode)
6246
      && MEM_P (inner)
6247
      && ! mode_dependent_address_p (XEXP (inner, 0))
6248
      && ! MEM_VOLATILE_P (inner))
6249
    {
6250
      int offset = 0;
6251
 
6252
      /* The computations below will be correct if the machine is big
6253
         endian in both bits and bytes or little endian in bits and bytes.
6254
         If it is mixed, we must adjust.  */
6255
 
6256
      /* If bytes are big endian and we had a paradoxical SUBREG, we must
6257
         adjust OFFSET to compensate.  */
6258
      if (BYTES_BIG_ENDIAN
6259
          && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
6260
        offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
6261
 
6262
      /* We can now move to the desired byte.  */
6263
      offset += (pos / GET_MODE_BITSIZE (wanted_inner_mode))
6264
                * GET_MODE_SIZE (wanted_inner_mode);
6265
      pos %= GET_MODE_BITSIZE (wanted_inner_mode);
6266
 
6267
      if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
6268
          && is_mode != wanted_inner_mode)
6269
        offset = (GET_MODE_SIZE (is_mode)
6270
                  - GET_MODE_SIZE (wanted_inner_mode) - offset);
6271
 
6272
      inner = adjust_address_nv (inner, wanted_inner_mode, offset);
6273
    }
6274
 
6275
  /* If INNER is not memory, we can always get it into the proper mode.  If we
6276
     are changing its mode, POS must be a constant and smaller than the size
6277
     of the new mode.  */
6278
  else if (!MEM_P (inner))
6279
    {
6280
      if (GET_MODE (inner) != wanted_inner_mode
6281
          && (pos_rtx != 0
6282
              || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode)))
6283
        return 0;
6284
 
6285
      if (orig_pos < 0)
6286
        return 0;
6287
 
6288
      inner = force_to_mode (inner, wanted_inner_mode,
6289
                             pos_rtx
6290
                             || len + orig_pos >= HOST_BITS_PER_WIDE_INT
6291
                             ? ~(unsigned HOST_WIDE_INT) 0
6292
                             : ((((unsigned HOST_WIDE_INT) 1 << len) - 1)
6293
                                << orig_pos),
6294
                             0);
6295
    }
6296
 
6297
  /* Adjust mode of POS_RTX, if needed.  If we want a wider mode, we
6298
     have to zero extend.  Otherwise, we can just use a SUBREG.  */
6299
  if (pos_rtx != 0
6300
      && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
6301
    {
6302
      rtx temp = gen_rtx_ZERO_EXTEND (pos_mode, pos_rtx);
6303
 
6304
      /* If we know that no extraneous bits are set, and that the high
6305
         bit is not set, convert extraction to cheaper one - either
6306
         SIGN_EXTENSION or ZERO_EXTENSION, that are equivalent in these
6307
         cases.  */
6308
      if (flag_expensive_optimizations
6309
          && (GET_MODE_BITSIZE (GET_MODE (pos_rtx)) <= HOST_BITS_PER_WIDE_INT
6310
              && ((nonzero_bits (pos_rtx, GET_MODE (pos_rtx))
6311
                   & ~(((unsigned HOST_WIDE_INT)
6312
                        GET_MODE_MASK (GET_MODE (pos_rtx)))
6313
                       >> 1))
6314
                  == 0)))
6315
        {
6316
          rtx temp1 = gen_rtx_SIGN_EXTEND (pos_mode, pos_rtx);
6317
 
6318
          /* Prefer ZERO_EXTENSION, since it gives more information to
6319
             backends.  */
6320
          if (rtx_cost (temp1, SET) < rtx_cost (temp, SET))
6321
            temp = temp1;
6322
        }
6323
      pos_rtx = temp;
6324
    }
6325
  else if (pos_rtx != 0
6326
           && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
6327
    pos_rtx = gen_lowpart (pos_mode, pos_rtx);
6328
 
6329
  /* Make POS_RTX unless we already have it and it is correct.  If we don't
6330
     have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
6331
     be a CONST_INT.  */
6332
  if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
6333
    pos_rtx = orig_pos_rtx;
6334
 
6335
  else if (pos_rtx == 0)
6336
    pos_rtx = GEN_INT (pos);
6337
 
6338
  /* Make the required operation.  See if we can use existing rtx.  */
6339
  new = gen_rtx_fmt_eee (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
6340
                         extraction_mode, inner, GEN_INT (len), pos_rtx);
6341
  if (! in_dest)
6342
    new = gen_lowpart (mode, new);
6343
 
6344
  return new;
6345
}
6346
 
6347
/* See if X contains an ASHIFT of COUNT or more bits that can be commuted
6348
   with any other operations in X.  Return X without that shift if so.  */
6349
 
6350
static rtx
6351
extract_left_shift (rtx x, int count)
6352
{
6353
  enum rtx_code code = GET_CODE (x);
6354
  enum machine_mode mode = GET_MODE (x);
6355
  rtx tem;
6356
 
6357
  switch (code)
6358
    {
6359
    case ASHIFT:
6360
      /* This is the shift itself.  If it is wide enough, we will return
6361
         either the value being shifted if the shift count is equal to
6362
         COUNT or a shift for the difference.  */
6363
      if (GET_CODE (XEXP (x, 1)) == CONST_INT
6364
          && INTVAL (XEXP (x, 1)) >= count)
6365
        return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
6366
                                     INTVAL (XEXP (x, 1)) - count);
6367
      break;
6368
 
6369
    case NEG:  case NOT:
6370
      if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0)
6371
        return simplify_gen_unary (code, mode, tem, mode);
6372
 
6373
      break;
6374
 
6375
    case PLUS:  case IOR:  case XOR:  case AND:
6376
      /* If we can safely shift this constant and we find the inner shift,
6377
         make a new operation.  */
6378
      if (GET_CODE (XEXP (x, 1)) == CONST_INT
6379
          && (INTVAL (XEXP (x, 1)) & ((((HOST_WIDE_INT) 1 << count)) - 1)) == 0
6380
          && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
6381
        return simplify_gen_binary (code, mode, tem,
6382
                                    GEN_INT (INTVAL (XEXP (x, 1)) >> count));
6383
 
6384
      break;
6385
 
6386
    default:
6387
      break;
6388
    }
6389
 
6390
  return 0;
6391
}
6392
 
6393
/* Look at the expression rooted at X.  Look for expressions
6394
   equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
6395
   Form these expressions.
6396
 
6397
   Return the new rtx, usually just X.
6398
 
6399
   Also, for machines like the VAX that don't have logical shift insns,
6400
   try to convert logical to arithmetic shift operations in cases where
6401
   they are equivalent.  This undoes the canonicalizations to logical
6402
   shifts done elsewhere.
6403
 
6404
   We try, as much as possible, to re-use rtl expressions to save memory.
6405
 
6406
   IN_CODE says what kind of expression we are processing.  Normally, it is
6407
   SET.  In a memory address (inside a MEM, PLUS or minus, the latter two
6408
   being kludges), it is MEM.  When processing the arguments of a comparison
6409
   or a COMPARE against zero, it is COMPARE.  */
6410
 
6411
static rtx
6412
make_compound_operation (rtx x, enum rtx_code in_code)
6413
{
6414
  enum rtx_code code = GET_CODE (x);
6415
  enum machine_mode mode = GET_MODE (x);
6416
  int mode_width = GET_MODE_BITSIZE (mode);
6417
  rtx rhs, lhs;
6418
  enum rtx_code next_code;
6419
  int i;
6420
  rtx new = 0;
6421
  rtx tem;
6422
  const char *fmt;
6423
 
6424
  /* Select the code to be used in recursive calls.  Once we are inside an
6425
     address, we stay there.  If we have a comparison, set to COMPARE,
6426
     but once inside, go back to our default of SET.  */
6427
 
6428
  next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
6429
               : ((code == COMPARE || COMPARISON_P (x))
6430
                  && XEXP (x, 1) == const0_rtx) ? COMPARE
6431
               : in_code == COMPARE ? SET : in_code);
6432
 
6433
  /* Process depending on the code of this operation.  If NEW is set
6434
     nonzero, it will be returned.  */
6435
 
6436
  switch (code)
6437
    {
6438
    case ASHIFT:
6439
      /* Convert shifts by constants into multiplications if inside
6440
         an address.  */
6441
      if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
6442
          && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
6443
          && INTVAL (XEXP (x, 1)) >= 0)
6444
        {
6445
          new = make_compound_operation (XEXP (x, 0), next_code);
6446
          new = gen_rtx_MULT (mode, new,
6447
                              GEN_INT ((HOST_WIDE_INT) 1
6448
                                       << INTVAL (XEXP (x, 1))));
6449
        }
6450
      break;
6451
 
6452
    case AND:
6453
      /* If the second operand is not a constant, we can't do anything
6454
         with it.  */
6455
      if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6456
        break;
6457
 
6458
      /* If the constant is a power of two minus one and the first operand
6459
         is a logical right shift, make an extraction.  */
6460
      if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6461
          && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6462
        {
6463
          new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6464
          new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
6465
                                 0, in_code == COMPARE);
6466
        }
6467
 
6468
      /* Same as previous, but for (subreg (lshiftrt ...)) in first op.  */
6469
      else if (GET_CODE (XEXP (x, 0)) == SUBREG
6470
               && subreg_lowpart_p (XEXP (x, 0))
6471
               && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
6472
               && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6473
        {
6474
          new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
6475
                                         next_code);
6476
          new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0,
6477
                                 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
6478
                                 0, in_code == COMPARE);
6479
        }
6480
      /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)).  */
6481
      else if ((GET_CODE (XEXP (x, 0)) == XOR
6482
                || GET_CODE (XEXP (x, 0)) == IOR)
6483
               && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
6484
               && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
6485
               && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6486
        {
6487
          /* Apply the distributive law, and then try to make extractions.  */
6488
          new = gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)), mode,
6489
                                gen_rtx_AND (mode, XEXP (XEXP (x, 0), 0),
6490
                                             XEXP (x, 1)),
6491
                                gen_rtx_AND (mode, XEXP (XEXP (x, 0), 1),
6492
                                             XEXP (x, 1)));
6493
          new = make_compound_operation (new, in_code);
6494
        }
6495
 
6496
      /* If we are have (and (rotate X C) M) and C is larger than the number
6497
         of bits in M, this is an extraction.  */
6498
 
6499
      else if (GET_CODE (XEXP (x, 0)) == ROTATE
6500
               && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6501
               && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
6502
               && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
6503
        {
6504
          new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6505
          new = make_extraction (mode, new,
6506
                                 (GET_MODE_BITSIZE (mode)
6507
                                  - INTVAL (XEXP (XEXP (x, 0), 1))),
6508
                                 NULL_RTX, i, 1, 0, in_code == COMPARE);
6509
        }
6510
 
6511
      /* On machines without logical shifts, if the operand of the AND is
6512
         a logical shift and our mask turns off all the propagated sign
6513
         bits, we can replace the logical shift with an arithmetic shift.  */
6514
      else if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6515
               && !have_insn_for (LSHIFTRT, mode)
6516
               && have_insn_for (ASHIFTRT, mode)
6517
               && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6518
               && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6519
               && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6520
               && mode_width <= HOST_BITS_PER_WIDE_INT)
6521
        {
6522
          unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
6523
 
6524
          mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
6525
          if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
6526
            SUBST (XEXP (x, 0),
6527
                   gen_rtx_ASHIFTRT (mode,
6528
                                     make_compound_operation
6529
                                     (XEXP (XEXP (x, 0), 0), next_code),
6530
                                     XEXP (XEXP (x, 0), 1)));
6531
        }
6532
 
6533
      /* If the constant is one less than a power of two, this might be
6534
         representable by an extraction even if no shift is present.
6535
         If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
6536
         we are in a COMPARE.  */
6537
      else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6538
        new = make_extraction (mode,
6539
                               make_compound_operation (XEXP (x, 0),
6540
                                                        next_code),
6541
                               0, NULL_RTX, i, 1, 0, in_code == COMPARE);
6542
 
6543
      /* If we are in a comparison and this is an AND with a power of two,
6544
         convert this into the appropriate bit extract.  */
6545
      else if (in_code == COMPARE
6546
               && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
6547
        new = make_extraction (mode,
6548
                               make_compound_operation (XEXP (x, 0),
6549
                                                        next_code),
6550
                               i, NULL_RTX, 1, 1, 0, 1);
6551
 
6552
      break;
6553
 
6554
    case LSHIFTRT:
6555
      /* If the sign bit is known to be zero, replace this with an
6556
         arithmetic shift.  */
6557
      if (have_insn_for (ASHIFTRT, mode)
6558
          && ! have_insn_for (LSHIFTRT, mode)
6559
          && mode_width <= HOST_BITS_PER_WIDE_INT
6560
          && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
6561
        {
6562
          new = gen_rtx_ASHIFTRT (mode,
6563
                                  make_compound_operation (XEXP (x, 0),
6564
                                                           next_code),
6565
                                  XEXP (x, 1));
6566
          break;
6567
        }
6568
 
6569
      /* ... fall through ...  */
6570
 
6571
    case ASHIFTRT:
6572
      lhs = XEXP (x, 0);
6573
      rhs = XEXP (x, 1);
6574
 
6575
      /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
6576
         this is a SIGN_EXTRACT.  */
6577
      if (GET_CODE (rhs) == CONST_INT
6578
          && GET_CODE (lhs) == ASHIFT
6579
          && GET_CODE (XEXP (lhs, 1)) == CONST_INT
6580
          && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1)))
6581
        {
6582
          new = make_compound_operation (XEXP (lhs, 0), next_code);
6583
          new = make_extraction (mode, new,
6584
                                 INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
6585
                                 NULL_RTX, mode_width - INTVAL (rhs),
6586
                                 code == LSHIFTRT, 0, in_code == COMPARE);
6587
          break;
6588
        }
6589
 
6590
      /* See if we have operations between an ASHIFTRT and an ASHIFT.
6591
         If so, try to merge the shifts into a SIGN_EXTEND.  We could
6592
         also do this for some cases of SIGN_EXTRACT, but it doesn't
6593
         seem worth the effort; the case checked for occurs on Alpha.  */
6594
 
6595
      if (!OBJECT_P (lhs)
6596
          && ! (GET_CODE (lhs) == SUBREG
6597
                && (OBJECT_P (SUBREG_REG (lhs))))
6598
          && GET_CODE (rhs) == CONST_INT
6599
          && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
6600
          && (new = extract_left_shift (lhs, INTVAL (rhs))) != 0)
6601
        new = make_extraction (mode, make_compound_operation (new, next_code),
6602
                               0, NULL_RTX, mode_width - INTVAL (rhs),
6603
                               code == LSHIFTRT, 0, in_code == COMPARE);
6604
 
6605
      break;
6606
 
6607
    case SUBREG:
6608
      /* Call ourselves recursively on the inner expression.  If we are
6609
         narrowing the object and it has a different RTL code from
6610
         what it originally did, do this SUBREG as a force_to_mode.  */
6611
 
6612
      tem = make_compound_operation (SUBREG_REG (x), in_code);
6613
 
6614
      {
6615
        rtx simplified;
6616
        simplified = simplify_subreg (GET_MODE (x), tem, GET_MODE (tem),
6617
                                      SUBREG_BYTE (x));
6618
 
6619
        if (simplified)
6620
          tem = simplified;
6621
 
6622
        if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
6623
            && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
6624
            && subreg_lowpart_p (x))
6625
          {
6626
            rtx newer = force_to_mode (tem, mode, ~(HOST_WIDE_INT) 0,
6627
                                       0);
6628
 
6629
            /* If we have something other than a SUBREG, we might have
6630
               done an expansion, so rerun ourselves.  */
6631
            if (GET_CODE (newer) != SUBREG)
6632
              newer = make_compound_operation (newer, in_code);
6633
 
6634
            return newer;
6635
          }
6636
 
6637
        if (simplified)
6638
          return tem;
6639
      }
6640
      break;
6641
 
6642
    default:
6643
      break;
6644
    }
6645
 
6646
  if (new)
6647
    {
6648
      x = gen_lowpart (mode, new);
6649
      code = GET_CODE (x);
6650
    }
6651
 
6652
  /* Now recursively process each operand of this operation.  */
6653
  fmt = GET_RTX_FORMAT (code);
6654
  for (i = 0; i < GET_RTX_LENGTH (code); i++)
6655
    if (fmt[i] == 'e')
6656
      {
6657
        new = make_compound_operation (XEXP (x, i), next_code);
6658
        SUBST (XEXP (x, i), new);
6659
      }
6660
 
6661
  /* If this is a commutative operation, the changes to the operands
6662
     may have made it noncanonical.  */
6663
  if (COMMUTATIVE_ARITH_P (x)
6664
      && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
6665
    {
6666
      tem = XEXP (x, 0);
6667
      SUBST (XEXP (x, 0), XEXP (x, 1));
6668
      SUBST (XEXP (x, 1), tem);
6669
    }
6670
 
6671
  return x;
6672
}
6673
 
6674
/* Given M see if it is a value that would select a field of bits
6675
   within an item, but not the entire word.  Return -1 if not.
6676
   Otherwise, return the starting position of the field, where 0 is the
6677
   low-order bit.
6678
 
6679
   *PLEN is set to the length of the field.  */
6680
 
6681
static int
6682
get_pos_from_mask (unsigned HOST_WIDE_INT m, unsigned HOST_WIDE_INT *plen)
6683
{
6684
  /* Get the bit number of the first 1 bit from the right, -1 if none.  */
6685
  int pos = exact_log2 (m & -m);
6686
  int len = 0;
6687
 
6688
  if (pos >= 0)
6689
    /* Now shift off the low-order zero bits and see if we have a
6690
       power of two minus 1.  */
6691
    len = exact_log2 ((m >> pos) + 1);
6692
 
6693
  if (len <= 0)
6694
    pos = -1;
6695
 
6696
  *plen = len;
6697
  return pos;
6698
}
6699
 
6700
/* If X refers to a register that equals REG in value, replace these
6701
   references with REG.  */
6702
static rtx
6703
canon_reg_for_combine (rtx x, rtx reg)
6704
{
6705
  rtx op0, op1, op2;
6706
  const char *fmt;
6707
  int i;
6708
  bool copied;
6709
 
6710
  enum rtx_code code = GET_CODE (x);
6711
  switch (GET_RTX_CLASS (code))
6712
    {
6713
    case RTX_UNARY:
6714
      op0 = canon_reg_for_combine (XEXP (x, 0), reg);
6715
      if (op0 != XEXP (x, 0))
6716
        return simplify_gen_unary (GET_CODE (x), GET_MODE (x), op0,
6717
                                   GET_MODE (reg));
6718
      break;
6719
 
6720
    case RTX_BIN_ARITH:
6721
    case RTX_COMM_ARITH:
6722
      op0 = canon_reg_for_combine (XEXP (x, 0), reg);
6723
      op1 = canon_reg_for_combine (XEXP (x, 1), reg);
6724
      if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
6725
        return simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0, op1);
6726
      break;
6727
 
6728
    case RTX_COMPARE:
6729
    case RTX_COMM_COMPARE:
6730
      op0 = canon_reg_for_combine (XEXP (x, 0), reg);
6731
      op1 = canon_reg_for_combine (XEXP (x, 1), reg);
6732
      if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
6733
        return simplify_gen_relational (GET_CODE (x), GET_MODE (x),
6734
                                        GET_MODE (op0), op0, op1);
6735
      break;
6736
 
6737
    case RTX_TERNARY:
6738
    case RTX_BITFIELD_OPS:
6739
      op0 = canon_reg_for_combine (XEXP (x, 0), reg);
6740
      op1 = canon_reg_for_combine (XEXP (x, 1), reg);
6741
      op2 = canon_reg_for_combine (XEXP (x, 2), reg);
6742
      if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1) || op2 != XEXP (x, 2))
6743
        return simplify_gen_ternary (GET_CODE (x), GET_MODE (x),
6744
                                     GET_MODE (op0), op0, op1, op2);
6745
 
6746
    case RTX_OBJ:
6747
      if (REG_P (x))
6748
        {
6749
          if (rtx_equal_p (get_last_value (reg), x)
6750
              || rtx_equal_p (reg, get_last_value (x)))
6751
            return reg;
6752
          else
6753
            break;
6754
        }
6755
 
6756
      /* fall through */
6757
 
6758
    default:
6759
      fmt = GET_RTX_FORMAT (code);
6760
      copied = false;
6761
      for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6762
        if (fmt[i] == 'e')
6763
          {
6764
            rtx op = canon_reg_for_combine (XEXP (x, i), reg);
6765
            if (op != XEXP (x, i))
6766
              {
6767
                if (!copied)
6768
                  {
6769
                    copied = true;
6770
                    x = copy_rtx (x);
6771
                  }
6772
                XEXP (x, i) = op;
6773
              }
6774
          }
6775
        else if (fmt[i] == 'E')
6776
          {
6777
            int j;
6778
            for (j = 0; j < XVECLEN (x, i); j++)
6779
              {
6780
                rtx op = canon_reg_for_combine (XVECEXP (x, i, j), reg);
6781
                if (op != XVECEXP (x, i, j))
6782
                  {
6783
                    if (!copied)
6784
                      {
6785
                        copied = true;
6786
                        x = copy_rtx (x);
6787
                      }
6788
                    XVECEXP (x, i, j) = op;
6789
                  }
6790
              }
6791
          }
6792
 
6793
      break;
6794
    }
6795
 
6796
  return x;
6797
}
6798
 
6799
/* Return X converted to MODE.  If the value is already truncated to
6800
   MODE we can just return a subreg even though in the general case we
6801
   would need an explicit truncation.  */
6802
 
6803
static rtx
6804
gen_lowpart_or_truncate (enum machine_mode mode, rtx x)
6805
{
6806
  if (GET_MODE_SIZE (GET_MODE (x)) <= GET_MODE_SIZE (mode)
6807
      || TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
6808
                                GET_MODE_BITSIZE (GET_MODE (x)))
6809
      || (REG_P (x) && reg_truncated_to_mode (mode, x)))
6810
    return gen_lowpart (mode, x);
6811
  else
6812
    return simplify_gen_unary (TRUNCATE, mode, x, GET_MODE (x));
6813
}
6814
 
6815
/* See if X can be simplified knowing that we will only refer to it in
6816
   MODE and will only refer to those bits that are nonzero in MASK.
6817
   If other bits are being computed or if masking operations are done
6818
   that select a superset of the bits in MASK, they can sometimes be
6819
   ignored.
6820
 
6821
   Return a possibly simplified expression, but always convert X to
6822
   MODE.  If X is a CONST_INT, AND the CONST_INT with MASK.
6823
 
6824
   If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
6825
   are all off in X.  This is used when X will be complemented, by either
6826
   NOT, NEG, or XOR.  */
6827
 
6828
static rtx
6829
force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask,
6830
               int just_select)
6831
{
6832
  enum rtx_code code = GET_CODE (x);
6833
  int next_select = just_select || code == XOR || code == NOT || code == NEG;
6834
  enum machine_mode op_mode;
6835
  unsigned HOST_WIDE_INT fuller_mask, nonzero;
6836
  rtx op0, op1, temp;
6837
 
6838
  /* If this is a CALL or ASM_OPERANDS, don't do anything.  Some of the
6839
     code below will do the wrong thing since the mode of such an
6840
     expression is VOIDmode.
6841
 
6842
     Also do nothing if X is a CLOBBER; this can happen if X was
6843
     the return value from a call to gen_lowpart.  */
6844
  if (code == CALL || code == ASM_OPERANDS || code == CLOBBER)
6845
    return x;
6846
 
6847
  /* We want to perform the operation is its present mode unless we know
6848
     that the operation is valid in MODE, in which case we do the operation
6849
     in MODE.  */
6850
  op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
6851
              && have_insn_for (code, mode))
6852
             ? mode : GET_MODE (x));
6853
 
6854
  /* It is not valid to do a right-shift in a narrower mode
6855
     than the one it came in with.  */
6856
  if ((code == LSHIFTRT || code == ASHIFTRT)
6857
      && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x)))
6858
    op_mode = GET_MODE (x);
6859
 
6860
  /* Truncate MASK to fit OP_MODE.  */
6861
  if (op_mode)
6862
    mask &= GET_MODE_MASK (op_mode);
6863
 
6864
  /* When we have an arithmetic operation, or a shift whose count we
6865
     do not know, we need to assume that all bits up to the highest-order
6866
     bit in MASK will be needed.  This is how we form such a mask.  */
6867
  if (mask & ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)))
6868
    fuller_mask = ~(unsigned HOST_WIDE_INT) 0;
6869
  else
6870
    fuller_mask = (((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1))
6871
                   - 1);
6872
 
6873
  /* Determine what bits of X are guaranteed to be (non)zero.  */
6874
  nonzero = nonzero_bits (x, mode);
6875
 
6876
  /* If none of the bits in X are needed, return a zero.  */
6877
  if (!just_select && (nonzero & mask) == 0 && !side_effects_p (x))
6878
    x = const0_rtx;
6879
 
6880
  /* If X is a CONST_INT, return a new one.  Do this here since the
6881
     test below will fail.  */
6882
  if (GET_CODE (x) == CONST_INT)
6883
    {
6884
      if (SCALAR_INT_MODE_P (mode))
6885
        return gen_int_mode (INTVAL (x) & mask, mode);
6886
      else
6887
        {
6888
          x = GEN_INT (INTVAL (x) & mask);
6889
          return gen_lowpart_common (mode, x);
6890
        }
6891
    }
6892
 
6893
  /* If X is narrower than MODE and we want all the bits in X's mode, just
6894
     get X in the proper mode.  */
6895
  if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
6896
      && (GET_MODE_MASK (GET_MODE (x)) & ~mask) == 0)
6897
    return gen_lowpart (mode, x);
6898
 
6899
  switch (code)
6900
    {
6901
    case CLOBBER:
6902
      /* If X is a (clobber (const_int)), return it since we know we are
6903
         generating something that won't match.  */
6904
      return x;
6905
 
6906
    case SIGN_EXTEND:
6907
    case ZERO_EXTEND:
6908
    case ZERO_EXTRACT:
6909
    case SIGN_EXTRACT:
6910
      x = expand_compound_operation (x);
6911
      if (GET_CODE (x) != code)
6912
        return force_to_mode (x, mode, mask, next_select);
6913
      break;
6914
 
6915
    case SUBREG:
6916
      if (subreg_lowpart_p (x)
6917
          /* We can ignore the effect of this SUBREG if it narrows the mode or
6918
             if the constant masks to zero all the bits the mode doesn't
6919
             have.  */
6920
          && ((GET_MODE_SIZE (GET_MODE (x))
6921
               < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6922
              || (0 == (mask
6923
                        & GET_MODE_MASK (GET_MODE (x))
6924
                        & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
6925
        return force_to_mode (SUBREG_REG (x), mode, mask, next_select);
6926
      break;
6927
 
6928
    case AND:
6929
      /* If this is an AND with a constant, convert it into an AND
6930
         whose constant is the AND of that constant with MASK.  If it
6931
         remains an AND of MASK, delete it since it is redundant.  */
6932
 
6933
      if (GET_CODE (XEXP (x, 1)) == CONST_INT)
6934
        {
6935
          x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
6936
                                      mask & INTVAL (XEXP (x, 1)));
6937
 
6938
          /* If X is still an AND, see if it is an AND with a mask that
6939
             is just some low-order bits.  If so, and it is MASK, we don't
6940
             need it.  */
6941
 
6942
          if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
6943
              && ((INTVAL (XEXP (x, 1)) & GET_MODE_MASK (GET_MODE (x)))
6944
                  == mask))
6945
            x = XEXP (x, 0);
6946
 
6947
          /* If it remains an AND, try making another AND with the bits
6948
             in the mode mask that aren't in MASK turned on.  If the
6949
             constant in the AND is wide enough, this might make a
6950
             cheaper constant.  */
6951
 
6952
          if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
6953
              && GET_MODE_MASK (GET_MODE (x)) != mask
6954
              && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
6955
            {
6956
              HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1))
6957
                                    | (GET_MODE_MASK (GET_MODE (x)) & ~mask));
6958
              int width = GET_MODE_BITSIZE (GET_MODE (x));
6959
              rtx y;
6960
 
6961
              /* If MODE is narrower than HOST_WIDE_INT and CVAL is a negative
6962
                 number, sign extend it.  */
6963
              if (width > 0 && width < HOST_BITS_PER_WIDE_INT
6964
                  && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6965
                cval |= (HOST_WIDE_INT) -1 << width;
6966
 
6967
              y = simplify_gen_binary (AND, GET_MODE (x),
6968
                                       XEXP (x, 0), GEN_INT (cval));
6969
              if (rtx_cost (y, SET) < rtx_cost (x, SET))
6970
                x = y;
6971
            }
6972
 
6973
          break;
6974
        }
6975
 
6976
      goto binop;
6977
 
6978
    case PLUS:
6979
      /* In (and (plus FOO C1) M), if M is a mask that just turns off
6980
         low-order bits (as in an alignment operation) and FOO is already
6981
         aligned to that boundary, mask C1 to that boundary as well.
6982
         This may eliminate that PLUS and, later, the AND.  */
6983
 
6984
      {
6985
        unsigned int width = GET_MODE_BITSIZE (mode);
6986
        unsigned HOST_WIDE_INT smask = mask;
6987
 
6988
        /* If MODE is narrower than HOST_WIDE_INT and mask is a negative
6989
           number, sign extend it.  */
6990
 
6991
        if (width < HOST_BITS_PER_WIDE_INT
6992
            && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6993
          smask |= (HOST_WIDE_INT) -1 << width;
6994
 
6995
        if (GET_CODE (XEXP (x, 1)) == CONST_INT
6996
            && exact_log2 (- smask) >= 0
6997
            && (nonzero_bits (XEXP (x, 0), mode) & ~smask) == 0
6998
            && (INTVAL (XEXP (x, 1)) & ~smask) != 0)
6999
          return force_to_mode (plus_constant (XEXP (x, 0),
7000
                                               (INTVAL (XEXP (x, 1)) & smask)),
7001
                                mode, smask, next_select);
7002
      }
7003
 
7004
      /* ... fall through ...  */
7005
 
7006
    case MULT:
7007
      /* For PLUS, MINUS and MULT, we need any bits less significant than the
7008
         most significant bit in MASK since carries from those bits will
7009
         affect the bits we are interested in.  */
7010
      mask = fuller_mask;
7011
      goto binop;
7012
 
7013
    case MINUS:
7014
      /* If X is (minus C Y) where C's least set bit is larger than any bit
7015
         in the mask, then we may replace with (neg Y).  */
7016
      if (GET_CODE (XEXP (x, 0)) == CONST_INT
7017
          && (((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 0))
7018
                                        & -INTVAL (XEXP (x, 0))))
7019
              > mask))
7020
        {
7021
          x = simplify_gen_unary (NEG, GET_MODE (x), XEXP (x, 1),
7022
                                  GET_MODE (x));
7023
          return force_to_mode (x, mode, mask, next_select);
7024
        }
7025
 
7026
      /* Similarly, if C contains every bit in the fuller_mask, then we may
7027
         replace with (not Y).  */
7028
      if (GET_CODE (XEXP (x, 0)) == CONST_INT
7029
          && ((INTVAL (XEXP (x, 0)) | (HOST_WIDE_INT) fuller_mask)
7030
              == INTVAL (XEXP (x, 0))))
7031
        {
7032
          x = simplify_gen_unary (NOT, GET_MODE (x),
7033
                                  XEXP (x, 1), GET_MODE (x));
7034
          return force_to_mode (x, mode, mask, next_select);
7035
        }
7036
 
7037
      mask = fuller_mask;
7038
      goto binop;
7039
 
7040
    case IOR:
7041
    case XOR:
7042
      /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
7043
         LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
7044
         operation which may be a bitfield extraction.  Ensure that the
7045
         constant we form is not wider than the mode of X.  */
7046
 
7047
      if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
7048
          && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7049
          && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
7050
          && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
7051
          && GET_CODE (XEXP (x, 1)) == CONST_INT
7052
          && ((INTVAL (XEXP (XEXP (x, 0), 1))
7053
               + floor_log2 (INTVAL (XEXP (x, 1))))
7054
              < GET_MODE_BITSIZE (GET_MODE (x)))
7055
          && (INTVAL (XEXP (x, 1))
7056
              & ~nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0)
7057
        {
7058
          temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
7059
                          << INTVAL (XEXP (XEXP (x, 0), 1)));
7060
          temp = simplify_gen_binary (GET_CODE (x), GET_MODE (x),
7061
                                      XEXP (XEXP (x, 0), 0), temp);
7062
          x = simplify_gen_binary (LSHIFTRT, GET_MODE (x), temp,
7063
                                   XEXP (XEXP (x, 0), 1));
7064
          return force_to_mode (x, mode, mask, next_select);
7065
        }
7066
 
7067
    binop:
7068
      /* For most binary operations, just propagate into the operation and
7069
         change the mode if we have an operation of that mode.  */
7070
 
7071
      op0 = gen_lowpart_or_truncate (op_mode,
7072
                                     force_to_mode (XEXP (x, 0), mode, mask,
7073
                                                    next_select));
7074
      op1 = gen_lowpart_or_truncate (op_mode,
7075
                                     force_to_mode (XEXP (x, 1), mode, mask,
7076
                                        next_select));
7077
 
7078
      if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
7079
        x = simplify_gen_binary (code, op_mode, op0, op1);
7080
      break;
7081
 
7082
    case ASHIFT:
7083
      /* For left shifts, do the same, but just for the first operand.
7084
         However, we cannot do anything with shifts where we cannot
7085
         guarantee that the counts are smaller than the size of the mode
7086
         because such a count will have a different meaning in a
7087
         wider mode.  */
7088
 
7089
      if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
7090
             && INTVAL (XEXP (x, 1)) >= 0
7091
             && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
7092
          && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
7093
                && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
7094
                    < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
7095
        break;
7096
 
7097
      /* If the shift count is a constant and we can do arithmetic in
7098
         the mode of the shift, refine which bits we need.  Otherwise, use the
7099
         conservative form of the mask.  */
7100
      if (GET_CODE (XEXP (x, 1)) == CONST_INT
7101
          && INTVAL (XEXP (x, 1)) >= 0
7102
          && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
7103
          && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
7104
        mask >>= INTVAL (XEXP (x, 1));
7105
      else
7106
        mask = fuller_mask;
7107
 
7108
      op0 = gen_lowpart_or_truncate (op_mode,
7109
                                     force_to_mode (XEXP (x, 0), op_mode,
7110
                                                    mask, next_select));
7111
 
7112
      if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
7113
        x = simplify_gen_binary (code, op_mode, op0, XEXP (x, 1));
7114
      break;
7115
 
7116
    case LSHIFTRT:
7117
      /* Here we can only do something if the shift count is a constant,
7118
         this shift constant is valid for the host, and we can do arithmetic
7119
         in OP_MODE.  */
7120
 
7121
      if (GET_CODE (XEXP (x, 1)) == CONST_INT
7122
          && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
7123
          && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
7124
        {
7125
          rtx inner = XEXP (x, 0);
7126
          unsigned HOST_WIDE_INT inner_mask;
7127
 
7128
          /* Select the mask of the bits we need for the shift operand.  */
7129
          inner_mask = mask << INTVAL (XEXP (x, 1));
7130
 
7131
          /* We can only change the mode of the shift if we can do arithmetic
7132
             in the mode of the shift and INNER_MASK is no wider than the
7133
             width of X's mode.  */
7134
          if ((inner_mask & ~GET_MODE_MASK (GET_MODE (x))) != 0)
7135
            op_mode = GET_MODE (x);
7136
 
7137
          inner = force_to_mode (inner, op_mode, inner_mask, next_select);
7138
 
7139
          if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
7140
            x = simplify_gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
7141
        }
7142
 
7143
      /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
7144
         shift and AND produces only copies of the sign bit (C2 is one less
7145
         than a power of two), we can do this with just a shift.  */
7146
 
7147
      if (GET_CODE (x) == LSHIFTRT
7148
          && GET_CODE (XEXP (x, 1)) == CONST_INT
7149
          /* The shift puts one of the sign bit copies in the least significant
7150
             bit.  */
7151
          && ((INTVAL (XEXP (x, 1))
7152
               + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
7153
              >= GET_MODE_BITSIZE (GET_MODE (x)))
7154
          && exact_log2 (mask + 1) >= 0
7155
          /* Number of bits left after the shift must be more than the mask
7156
             needs.  */
7157
          && ((INTVAL (XEXP (x, 1)) + exact_log2 (mask + 1))
7158
              <= GET_MODE_BITSIZE (GET_MODE (x)))
7159
          /* Must be more sign bit copies than the mask needs.  */
7160
          && ((int) num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
7161
              >= exact_log2 (mask + 1)))
7162
        x = simplify_gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
7163
                                 GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
7164
                                          - exact_log2 (mask + 1)));
7165
 
7166
      goto shiftrt;
7167
 
7168
    case ASHIFTRT:
7169
      /* If we are just looking for the sign bit, we don't need this shift at
7170
         all, even if it has a variable count.  */
7171
      if (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
7172
          && (mask == ((unsigned HOST_WIDE_INT) 1
7173
                       << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
7174
        return force_to_mode (XEXP (x, 0), mode, mask, next_select);
7175
 
7176
      /* If this is a shift by a constant, get a mask that contains those bits
7177
         that are not copies of the sign bit.  We then have two cases:  If
7178
         MASK only includes those bits, this can be a logical shift, which may
7179
         allow simplifications.  If MASK is a single-bit field not within
7180
         those bits, we are requesting a copy of the sign bit and hence can
7181
         shift the sign bit to the appropriate location.  */
7182
 
7183
      if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0
7184
          && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
7185
        {
7186
          int i;
7187
 
7188
          /* If the considered data is wider than HOST_WIDE_INT, we can't
7189
             represent a mask for all its bits in a single scalar.
7190
             But we only care about the lower bits, so calculate these.  */
7191
 
7192
          if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT)
7193
            {
7194
              nonzero = ~(HOST_WIDE_INT) 0;
7195
 
7196
              /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7197
                 is the number of bits a full-width mask would have set.
7198
                 We need only shift if these are fewer than nonzero can
7199
                 hold.  If not, we must keep all bits set in nonzero.  */
7200
 
7201
              if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7202
                  < HOST_BITS_PER_WIDE_INT)
7203
                nonzero >>= INTVAL (XEXP (x, 1))
7204
                            + HOST_BITS_PER_WIDE_INT
7205
                            - GET_MODE_BITSIZE (GET_MODE (x)) ;
7206
            }
7207
          else
7208
            {
7209
              nonzero = GET_MODE_MASK (GET_MODE (x));
7210
              nonzero >>= INTVAL (XEXP (x, 1));
7211
            }
7212
 
7213
          if ((mask & ~nonzero) == 0)
7214
            {
7215
              x = simplify_shift_const (NULL_RTX, LSHIFTRT, GET_MODE (x),
7216
                                        XEXP (x, 0), INTVAL (XEXP (x, 1)));
7217
              if (GET_CODE (x) != ASHIFTRT)
7218
                return force_to_mode (x, mode, mask, next_select);
7219
            }
7220
 
7221
          else if ((i = exact_log2 (mask)) >= 0)
7222
            {
7223
              x = simplify_shift_const
7224
                  (NULL_RTX, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
7225
                   GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
7226
 
7227
              if (GET_CODE (x) != ASHIFTRT)
7228
                return force_to_mode (x, mode, mask, next_select);
7229
            }
7230
        }
7231
 
7232
      /* If MASK is 1, convert this to an LSHIFTRT.  This can be done
7233
         even if the shift count isn't a constant.  */
7234
      if (mask == 1)
7235
        x = simplify_gen_binary (LSHIFTRT, GET_MODE (x),
7236
                                 XEXP (x, 0), XEXP (x, 1));
7237
 
7238
    shiftrt:
7239
 
7240
      /* If this is a zero- or sign-extension operation that just affects bits
7241
         we don't care about, remove it.  Be sure the call above returned
7242
         something that is still a shift.  */
7243
 
7244
      if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
7245
          && GET_CODE (XEXP (x, 1)) == CONST_INT
7246
          && INTVAL (XEXP (x, 1)) >= 0
7247
          && (INTVAL (XEXP (x, 1))
7248
              <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
7249
          && GET_CODE (XEXP (x, 0)) == ASHIFT
7250
          && XEXP (XEXP (x, 0), 1) == XEXP (x, 1))
7251
        return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
7252
                              next_select);
7253
 
7254
      break;
7255
 
7256
    case ROTATE:
7257
    case ROTATERT:
7258
      /* If the shift count is constant and we can do computations
7259
         in the mode of X, compute where the bits we care about are.
7260
         Otherwise, we can't do anything.  Don't change the mode of
7261
         the shift or propagate MODE into the shift, though.  */
7262
      if (GET_CODE (XEXP (x, 1)) == CONST_INT
7263
          && INTVAL (XEXP (x, 1)) >= 0)
7264
        {
7265
          temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
7266
                                            GET_MODE (x), GEN_INT (mask),
7267
                                            XEXP (x, 1));
7268
          if (temp && GET_CODE (temp) == CONST_INT)
7269
            SUBST (XEXP (x, 0),
7270
                   force_to_mode (XEXP (x, 0), GET_MODE (x),
7271
                                  INTVAL (temp), next_select));
7272
        }
7273
      break;
7274
 
7275
    case NEG:
7276
      /* If we just want the low-order bit, the NEG isn't needed since it
7277
         won't change the low-order bit.  */
7278
      if (mask == 1)
7279
        return force_to_mode (XEXP (x, 0), mode, mask, just_select);
7280
 
7281
      /* We need any bits less significant than the most significant bit in
7282
         MASK since carries from those bits will affect the bits we are
7283
         interested in.  */
7284
      mask = fuller_mask;
7285
      goto unop;
7286
 
7287
    case NOT:
7288
      /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
7289
         same as the XOR case above.  Ensure that the constant we form is not
7290
         wider than the mode of X.  */
7291
 
7292
      if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
7293
          && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7294
          && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
7295
          && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
7296
              < GET_MODE_BITSIZE (GET_MODE (x)))
7297
          && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
7298
        {
7299
          temp = gen_int_mode (mask << INTVAL (XEXP (XEXP (x, 0), 1)),
7300
                               GET_MODE (x));
7301
          temp = simplify_gen_binary (XOR, GET_MODE (x),
7302
                                      XEXP (XEXP (x, 0), 0), temp);
7303
          x = simplify_gen_binary (LSHIFTRT, GET_MODE (x),
7304
                                   temp, XEXP (XEXP (x, 0), 1));
7305
 
7306
          return force_to_mode (x, mode, mask, next_select);
7307
        }
7308
 
7309
      /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must
7310
         use the full mask inside the NOT.  */
7311
      mask = fuller_mask;
7312
 
7313
    unop:
7314
      op0 = gen_lowpart_or_truncate (op_mode,
7315
                                     force_to_mode (XEXP (x, 0), mode, mask,
7316
                                                    next_select));
7317
      if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
7318
        x = simplify_gen_unary (code, op_mode, op0, op_mode);
7319
      break;
7320
 
7321
    case NE:
7322
      /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
7323
         in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
7324
         which is equal to STORE_FLAG_VALUE.  */
7325
      if ((mask & ~STORE_FLAG_VALUE) == 0 && XEXP (x, 1) == const0_rtx
7326
          && GET_MODE (XEXP (x, 0)) == mode
7327
          && exact_log2 (nonzero_bits (XEXP (x, 0), mode)) >= 0
7328
          && (nonzero_bits (XEXP (x, 0), mode)
7329
              == (unsigned HOST_WIDE_INT) STORE_FLAG_VALUE))
7330
        return force_to_mode (XEXP (x, 0), mode, mask, next_select);
7331
 
7332
      break;
7333
 
7334
    case IF_THEN_ELSE:
7335
      /* We have no way of knowing if the IF_THEN_ELSE can itself be
7336
         written in a narrower mode.  We play it safe and do not do so.  */
7337
 
7338
      SUBST (XEXP (x, 1),
7339
             gen_lowpart_or_truncate (GET_MODE (x),
7340
                                      force_to_mode (XEXP (x, 1), mode,
7341
                                                     mask, next_select)));
7342
      SUBST (XEXP (x, 2),
7343
             gen_lowpart_or_truncate (GET_MODE (x),
7344
                                      force_to_mode (XEXP (x, 2), mode,
7345
                                                     mask, next_select)));
7346
      break;
7347
 
7348
    default:
7349
      break;
7350
    }
7351
 
7352
  /* Ensure we return a value of the proper mode.  */
7353
  return gen_lowpart_or_truncate (mode, x);
7354
}
7355
 
7356
/* Return nonzero if X is an expression that has one of two values depending on
7357
   whether some other value is zero or nonzero.  In that case, we return the
7358
   value that is being tested, *PTRUE is set to the value if the rtx being
7359
   returned has a nonzero value, and *PFALSE is set to the other alternative.
7360
 
7361
   If we return zero, we set *PTRUE and *PFALSE to X.  */
7362
 
7363
static rtx
7364
if_then_else_cond (rtx x, rtx *ptrue, rtx *pfalse)
7365
{
7366
  enum machine_mode mode = GET_MODE (x);
7367
  enum rtx_code code = GET_CODE (x);
7368
  rtx cond0, cond1, true0, true1, false0, false1;
7369
  unsigned HOST_WIDE_INT nz;
7370
 
7371
  /* If we are comparing a value against zero, we are done.  */
7372
  if ((code == NE || code == EQ)
7373
      && XEXP (x, 1) == const0_rtx)
7374
    {
7375
      *ptrue = (code == NE) ? const_true_rtx : const0_rtx;
7376
      *pfalse = (code == NE) ? const0_rtx : const_true_rtx;
7377
      return XEXP (x, 0);
7378
    }
7379
 
7380
  /* If this is a unary operation whose operand has one of two values, apply
7381
     our opcode to compute those values.  */
7382
  else if (UNARY_P (x)
7383
           && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
7384
    {
7385
      *ptrue = simplify_gen_unary (code, mode, true0, GET_MODE (XEXP (x, 0)));
7386
      *pfalse = simplify_gen_unary (code, mode, false0,
7387
                                    GET_MODE (XEXP (x, 0)));
7388
      return cond0;
7389
    }
7390
 
7391
  /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
7392
     make can't possibly match and would suppress other optimizations.  */
7393
  else if (code == COMPARE)
7394
    ;
7395
 
7396
  /* If this is a binary operation, see if either side has only one of two
7397
     values.  If either one does or if both do and they are conditional on
7398
     the same value, compute the new true and false values.  */
7399
  else if (BINARY_P (x))
7400
    {
7401
      cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
7402
      cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
7403
 
7404
      if ((cond0 != 0 || cond1 != 0)
7405
          && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
7406
        {
7407
          /* If if_then_else_cond returned zero, then true/false are the
7408
             same rtl.  We must copy one of them to prevent invalid rtl
7409
             sharing.  */
7410
          if (cond0 == 0)
7411
            true0 = copy_rtx (true0);
7412
          else if (cond1 == 0)
7413
            true1 = copy_rtx (true1);
7414
 
7415
          if (COMPARISON_P (x))
7416
            {
7417
              *ptrue = simplify_gen_relational (code, mode, VOIDmode,
7418
                                                true0, true1);
7419
              *pfalse = simplify_gen_relational (code, mode, VOIDmode,
7420
                                                 false0, false1);
7421
             }
7422
          else
7423
            {
7424
              *ptrue = simplify_gen_binary (code, mode, true0, true1);
7425
              *pfalse = simplify_gen_binary (code, mode, false0, false1);
7426
            }
7427
 
7428
          return cond0 ? cond0 : cond1;
7429
        }
7430
 
7431
      /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
7432
         operands is zero when the other is nonzero, and vice-versa,
7433
         and STORE_FLAG_VALUE is 1 or -1.  */
7434
 
7435
      if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7436
          && (code == PLUS || code == IOR || code == XOR || code == MINUS
7437
              || code == UMAX)
7438
          && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
7439
        {
7440
          rtx op0 = XEXP (XEXP (x, 0), 1);
7441
          rtx op1 = XEXP (XEXP (x, 1), 1);
7442
 
7443
          cond0 = XEXP (XEXP (x, 0), 0);
7444
          cond1 = XEXP (XEXP (x, 1), 0);
7445
 
7446
          if (COMPARISON_P (cond0)
7447
              && COMPARISON_P (cond1)
7448
              && ((GET_CODE (cond0) == reversed_comparison_code (cond1, NULL)
7449
                   && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
7450
                   && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
7451
                  || ((swap_condition (GET_CODE (cond0))
7452
                       == reversed_comparison_code (cond1, NULL))
7453
                      && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
7454
                      && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
7455
              && ! side_effects_p (x))
7456
            {
7457
              *ptrue = simplify_gen_binary (MULT, mode, op0, const_true_rtx);
7458
              *pfalse = simplify_gen_binary (MULT, mode,
7459
                                             (code == MINUS
7460
                                              ? simplify_gen_unary (NEG, mode,
7461
                                                                    op1, mode)
7462
                                              : op1),
7463
                                              const_true_rtx);
7464
              return cond0;
7465
            }
7466
        }
7467
 
7468
      /* Similarly for MULT, AND and UMIN, except that for these the result
7469
         is always zero.  */
7470
      if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7471
          && (code == MULT || code == AND || code == UMIN)
7472
          && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
7473
        {
7474
          cond0 = XEXP (XEXP (x, 0), 0);
7475
          cond1 = XEXP (XEXP (x, 1), 0);
7476
 
7477
          if (COMPARISON_P (cond0)
7478
              && COMPARISON_P (cond1)
7479
              && ((GET_CODE (cond0) == reversed_comparison_code (cond1, NULL)
7480
                   && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
7481
                   && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
7482
                  || ((swap_condition (GET_CODE (cond0))
7483
                       == reversed_comparison_code (cond1, NULL))
7484
                      && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
7485
                      && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
7486
              && ! side_effects_p (x))
7487
            {
7488
              *ptrue = *pfalse = const0_rtx;
7489
              return cond0;
7490
            }
7491
        }
7492
    }
7493
 
7494
  else if (code == IF_THEN_ELSE)
7495
    {
7496
      /* If we have IF_THEN_ELSE already, extract the condition and
7497
         canonicalize it if it is NE or EQ.  */
7498
      cond0 = XEXP (x, 0);
7499
      *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
7500
      if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
7501
        return XEXP (cond0, 0);
7502
      else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
7503
        {
7504
          *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
7505
          return XEXP (cond0, 0);
7506
        }
7507
      else
7508
        return cond0;
7509
    }
7510
 
7511
  /* If X is a SUBREG, we can narrow both the true and false values
7512
     if the inner expression, if there is a condition.  */
7513
  else if (code == SUBREG
7514
           && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
7515
                                               &true0, &false0)))
7516
    {
7517
      true0 = simplify_gen_subreg (mode, true0,
7518
                                   GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
7519
      false0 = simplify_gen_subreg (mode, false0,
7520
                                    GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
7521
      if (true0 && false0)
7522
        {
7523
          *ptrue = true0;
7524
          *pfalse = false0;
7525
          return cond0;
7526
        }
7527
    }
7528
 
7529
  /* If X is a constant, this isn't special and will cause confusions
7530
     if we treat it as such.  Likewise if it is equivalent to a constant.  */
7531
  else if (CONSTANT_P (x)
7532
           || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
7533
    ;
7534
 
7535
  /* If we're in BImode, canonicalize on 0 and STORE_FLAG_VALUE, as that
7536
     will be least confusing to the rest of the compiler.  */
7537
  else if (mode == BImode)
7538
    {
7539
      *ptrue = GEN_INT (STORE_FLAG_VALUE), *pfalse = const0_rtx;
7540
      return x;
7541
    }
7542
 
7543
  /* If X is known to be either 0 or -1, those are the true and
7544
     false values when testing X.  */
7545
  else if (x == constm1_rtx || x == const0_rtx
7546
           || (mode != VOIDmode
7547
               && num_sign_bit_copies (x, mode) == GET_MODE_BITSIZE (mode)))
7548
    {
7549
      *ptrue = constm1_rtx, *pfalse = const0_rtx;
7550
      return x;
7551
    }
7552
 
7553
  /* Likewise for 0 or a single bit.  */
7554
  else if (SCALAR_INT_MODE_P (mode)
7555
           && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7556
           && exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
7557
    {
7558
      *ptrue = gen_int_mode (nz, mode), *pfalse = const0_rtx;
7559
      return x;
7560
    }
7561
 
7562
  /* Otherwise fail; show no condition with true and false values the same.  */
7563
  *ptrue = *pfalse = x;
7564
  return 0;
7565
}
7566
 
7567
/* Return the value of expression X given the fact that condition COND
7568
   is known to be true when applied to REG as its first operand and VAL
7569
   as its second.  X is known to not be shared and so can be modified in
7570
   place.
7571
 
7572
   We only handle the simplest cases, and specifically those cases that
7573
   arise with IF_THEN_ELSE expressions.  */
7574
 
7575
static rtx
7576
known_cond (rtx x, enum rtx_code cond, rtx reg, rtx val)
7577
{
7578
  enum rtx_code code = GET_CODE (x);
7579
  rtx temp;
7580
  const char *fmt;
7581
  int i, j;
7582
 
7583
  if (side_effects_p (x))
7584
    return x;
7585
 
7586
  /* If either operand of the condition is a floating point value,
7587
     then we have to avoid collapsing an EQ comparison.  */
7588
  if (cond == EQ
7589
      && rtx_equal_p (x, reg)
7590
      && ! FLOAT_MODE_P (GET_MODE (x))
7591
      && ! FLOAT_MODE_P (GET_MODE (val)))
7592
    return val;
7593
 
7594
  if (cond == UNEQ && rtx_equal_p (x, reg))
7595
    return val;
7596
 
7597
  /* If X is (abs REG) and we know something about REG's relationship
7598
     with zero, we may be able to simplify this.  */
7599
 
7600
  if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
7601
    switch (cond)
7602
      {
7603
      case GE:  case GT:  case EQ:
7604
        return XEXP (x, 0);
7605
      case LT:  case LE:
7606
        return simplify_gen_unary (NEG, GET_MODE (XEXP (x, 0)),
7607
                                   XEXP (x, 0),
7608
                                   GET_MODE (XEXP (x, 0)));
7609
      default:
7610
        break;
7611
      }
7612
 
7613
  /* The only other cases we handle are MIN, MAX, and comparisons if the
7614
     operands are the same as REG and VAL.  */
7615
 
7616
  else if (COMPARISON_P (x) || COMMUTATIVE_ARITH_P (x))
7617
    {
7618
      if (rtx_equal_p (XEXP (x, 0), val))
7619
        cond = swap_condition (cond), temp = val, val = reg, reg = temp;
7620
 
7621
      if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
7622
        {
7623
          if (COMPARISON_P (x))
7624
            {
7625
              if (comparison_dominates_p (cond, code))
7626
                return const_true_rtx;
7627
 
7628
              code = reversed_comparison_code (x, NULL);
7629
              if (code != UNKNOWN
7630
                  && comparison_dominates_p (cond, code))
7631
                return const0_rtx;
7632
              else
7633
                return x;
7634
            }
7635
          else if (code == SMAX || code == SMIN
7636
                   || code == UMIN || code == UMAX)
7637
            {
7638
              int unsignedp = (code == UMIN || code == UMAX);
7639
 
7640
              /* Do not reverse the condition when it is NE or EQ.
7641
                 This is because we cannot conclude anything about
7642
                 the value of 'SMAX (x, y)' when x is not equal to y,
7643
                 but we can when x equals y.  */
7644
              if ((code == SMAX || code == UMAX)
7645
                  && ! (cond == EQ || cond == NE))
7646
                cond = reverse_condition (cond);
7647
 
7648
              switch (cond)
7649
                {
7650
                case GE:   case GT:
7651
                  return unsignedp ? x : XEXP (x, 1);
7652
                case LE:   case LT:
7653
                  return unsignedp ? x : XEXP (x, 0);
7654
                case GEU:  case GTU:
7655
                  return unsignedp ? XEXP (x, 1) : x;
7656
                case LEU:  case LTU:
7657
                  return unsignedp ? XEXP (x, 0) : x;
7658
                default:
7659
                  break;
7660
                }
7661
            }
7662
        }
7663
    }
7664
  else if (code == SUBREG)
7665
    {
7666
      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
7667
      rtx new, r = known_cond (SUBREG_REG (x), cond, reg, val);
7668
 
7669
      if (SUBREG_REG (x) != r)
7670
        {
7671
          /* We must simplify subreg here, before we lose track of the
7672
             original inner_mode.  */
7673
          new = simplify_subreg (GET_MODE (x), r,
7674
                                 inner_mode, SUBREG_BYTE (x));
7675
          if (new)
7676
            return new;
7677
          else
7678
            SUBST (SUBREG_REG (x), r);
7679
        }
7680
 
7681
      return x;
7682
    }
7683
  /* We don't have to handle SIGN_EXTEND here, because even in the
7684
     case of replacing something with a modeless CONST_INT, a
7685
     CONST_INT is already (supposed to be) a valid sign extension for
7686
     its narrower mode, which implies it's already properly
7687
     sign-extended for the wider mode.  Now, for ZERO_EXTEND, the
7688
     story is different.  */
7689
  else if (code == ZERO_EXTEND)
7690
    {
7691
      enum machine_mode inner_mode = GET_MODE (XEXP (x, 0));
7692
      rtx new, r = known_cond (XEXP (x, 0), cond, reg, val);
7693
 
7694
      if (XEXP (x, 0) != r)
7695
        {
7696
          /* We must simplify the zero_extend here, before we lose
7697
             track of the original inner_mode.  */
7698
          new = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
7699
                                          r, inner_mode);
7700
          if (new)
7701
            return new;
7702
          else
7703
            SUBST (XEXP (x, 0), r);
7704
        }
7705
 
7706
      return x;
7707
    }
7708
 
7709
  fmt = GET_RTX_FORMAT (code);
7710
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7711
    {
7712
      if (fmt[i] == 'e')
7713
        SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
7714
      else if (fmt[i] == 'E')
7715
        for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7716
          SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
7717
                                                cond, reg, val));
7718
    }
7719
 
7720
  return x;
7721
}
7722
 
7723
/* See if X and Y are equal for the purposes of seeing if we can rewrite an
7724
   assignment as a field assignment.  */
7725
 
7726
static int
7727
rtx_equal_for_field_assignment_p (rtx x, rtx y)
7728
{
7729
  if (x == y || rtx_equal_p (x, y))
7730
    return 1;
7731
 
7732
  if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y))
7733
    return 0;
7734
 
7735
  /* Check for a paradoxical SUBREG of a MEM compared with the MEM.
7736
     Note that all SUBREGs of MEM are paradoxical; otherwise they
7737
     would have been rewritten.  */
7738
  if (MEM_P (x) && GET_CODE (y) == SUBREG
7739
      && MEM_P (SUBREG_REG (y))
7740
      && rtx_equal_p (SUBREG_REG (y),
7741
                      gen_lowpart (GET_MODE (SUBREG_REG (y)), x)))
7742
    return 1;
7743
 
7744
  if (MEM_P (y) && GET_CODE (x) == SUBREG
7745
      && MEM_P (SUBREG_REG (x))
7746
      && rtx_equal_p (SUBREG_REG (x),
7747
                      gen_lowpart (GET_MODE (SUBREG_REG (x)), y)))
7748
    return 1;
7749
 
7750
  /* We used to see if get_last_value of X and Y were the same but that's
7751
     not correct.  In one direction, we'll cause the assignment to have
7752
     the wrong destination and in the case, we'll import a register into this
7753
     insn that might have already have been dead.   So fail if none of the
7754
     above cases are true.  */
7755
  return 0;
7756
}
7757
 
7758
/* See if X, a SET operation, can be rewritten as a bit-field assignment.
7759
   Return that assignment if so.
7760
 
7761
   We only handle the most common cases.  */
7762
 
7763
static rtx
7764
make_field_assignment (rtx x)
7765
{
7766
  rtx dest = SET_DEST (x);
7767
  rtx src = SET_SRC (x);
7768
  rtx assign;
7769
  rtx rhs, lhs;
7770
  HOST_WIDE_INT c1;
7771
  HOST_WIDE_INT pos;
7772
  unsigned HOST_WIDE_INT len;
7773
  rtx other;
7774
  enum machine_mode mode;
7775
 
7776
  /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
7777
     a clear of a one-bit field.  We will have changed it to
7778
     (and (rotate (const_int -2) POS) DEST), so check for that.  Also check
7779
     for a SUBREG.  */
7780
 
7781
  if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
7782
      && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
7783
      && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
7784
      && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7785
    {
7786
      assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
7787
                                1, 1, 1, 0);
7788
      if (assign != 0)
7789
        return gen_rtx_SET (VOIDmode, assign, const0_rtx);
7790
      return x;
7791
    }
7792
 
7793
  if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
7794
      && subreg_lowpart_p (XEXP (src, 0))
7795
      && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
7796
          < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
7797
      && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
7798
      && GET_CODE (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == CONST_INT
7799
      && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
7800
      && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7801
    {
7802
      assign = make_extraction (VOIDmode, dest, 0,
7803
                                XEXP (SUBREG_REG (XEXP (src, 0)), 1),
7804
                                1, 1, 1, 0);
7805
      if (assign != 0)
7806
        return gen_rtx_SET (VOIDmode, assign, const0_rtx);
7807
      return x;
7808
    }
7809
 
7810
  /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
7811
     one-bit field.  */
7812
  if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
7813
      && XEXP (XEXP (src, 0), 0) == const1_rtx
7814
      && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
7815
    {
7816
      assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
7817
                                1, 1, 1, 0);
7818
      if (assign != 0)
7819
        return gen_rtx_SET (VOIDmode, assign, const1_rtx);
7820
      return x;
7821
    }
7822
 
7823
  /* If DEST is already a field assignment, i.e. ZERO_EXTRACT, and the
7824
     SRC is an AND with all bits of that field set, then we can discard
7825
     the AND.  */
7826
  if (GET_CODE (dest) == ZERO_EXTRACT
7827
      && GET_CODE (XEXP (dest, 1)) == CONST_INT
7828
      && GET_CODE (src) == AND
7829
      && GET_CODE (XEXP (src, 1)) == CONST_INT)
7830
    {
7831
      HOST_WIDE_INT width = INTVAL (XEXP (dest, 1));
7832
      unsigned HOST_WIDE_INT and_mask = INTVAL (XEXP (src, 1));
7833
      unsigned HOST_WIDE_INT ze_mask;
7834
 
7835
      if (width >= HOST_BITS_PER_WIDE_INT)
7836
        ze_mask = -1;
7837
      else
7838
        ze_mask = ((unsigned HOST_WIDE_INT)1 << width) - 1;
7839
 
7840
      /* Complete overlap.  We can remove the source AND.  */
7841
      if ((and_mask & ze_mask) == ze_mask)
7842
        return gen_rtx_SET (VOIDmode, dest, XEXP (src, 0));
7843
 
7844
      /* Partial overlap.  We can reduce the source AND.  */
7845
      if ((and_mask & ze_mask) != and_mask)
7846
        {
7847
          mode = GET_MODE (src);
7848
          src = gen_rtx_AND (mode, XEXP (src, 0),
7849
                             gen_int_mode (and_mask & ze_mask, mode));
7850
          return gen_rtx_SET (VOIDmode, dest, src);
7851
        }
7852
    }
7853
 
7854
  /* The other case we handle is assignments into a constant-position
7855
     field.  They look like (ior/xor (and DEST C1) OTHER).  If C1 represents
7856
     a mask that has all one bits except for a group of zero bits and
7857
     OTHER is known to have zeros where C1 has ones, this is such an
7858
     assignment.  Compute the position and length from C1.  Shift OTHER
7859
     to the appropriate position, force it to the required mode, and
7860
     make the extraction.  Check for the AND in both operands.  */
7861
 
7862
  if (GET_CODE (src) != IOR && GET_CODE (src) != XOR)
7863
    return x;
7864
 
7865
  rhs = expand_compound_operation (XEXP (src, 0));
7866
  lhs = expand_compound_operation (XEXP (src, 1));
7867
 
7868
  if (GET_CODE (rhs) == AND
7869
      && GET_CODE (XEXP (rhs, 1)) == CONST_INT
7870
      && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest))
7871
    c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
7872
  else if (GET_CODE (lhs) == AND
7873
           && GET_CODE (XEXP (lhs, 1)) == CONST_INT
7874
           && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest))
7875
    c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
7876
  else
7877
    return x;
7878
 
7879
  pos = get_pos_from_mask ((~c1) & GET_MODE_MASK (GET_MODE (dest)), &len);
7880
  if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
7881
      || GET_MODE_BITSIZE (GET_MODE (dest)) > HOST_BITS_PER_WIDE_INT
7882
      || (c1 & nonzero_bits (other, GET_MODE (dest))) != 0)
7883
    return x;
7884
 
7885
  assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
7886
  if (assign == 0)
7887
    return x;
7888
 
7889
  /* The mode to use for the source is the mode of the assignment, or of
7890
     what is inside a possible STRICT_LOW_PART.  */
7891
  mode = (GET_CODE (assign) == STRICT_LOW_PART
7892
          ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
7893
 
7894
  /* Shift OTHER right POS places and make it the source, restricting it
7895
     to the proper length and mode.  */
7896
 
7897
  src = canon_reg_for_combine (simplify_shift_const (NULL_RTX, LSHIFTRT,
7898
                                                     GET_MODE (src),
7899
                                                     other, pos),
7900
                               dest);
7901
  src = force_to_mode (src, mode,
7902
                       GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
7903
                       ? ~(unsigned HOST_WIDE_INT) 0
7904
                       : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
7905
                       0);
7906
 
7907
  /* If SRC is masked by an AND that does not make a difference in
7908
     the value being stored, strip it.  */
7909
  if (GET_CODE (assign) == ZERO_EXTRACT
7910
      && GET_CODE (XEXP (assign, 1)) == CONST_INT
7911
      && INTVAL (XEXP (assign, 1)) < HOST_BITS_PER_WIDE_INT
7912
      && GET_CODE (src) == AND
7913
      && GET_CODE (XEXP (src, 1)) == CONST_INT
7914
      && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (src, 1))
7915
          == ((unsigned HOST_WIDE_INT) 1 << INTVAL (XEXP (assign, 1))) - 1))
7916
    src = XEXP (src, 0);
7917
 
7918
  return gen_rtx_SET (VOIDmode, assign, src);
7919
}
7920
 
7921
/* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
7922
   if so.  */
7923
 
7924
static rtx
7925
apply_distributive_law (rtx x)
7926
{
7927
  enum rtx_code code = GET_CODE (x);
7928
  enum rtx_code inner_code;
7929
  rtx lhs, rhs, other;
7930
  rtx tem;
7931
 
7932
  /* Distributivity is not true for floating point as it can change the
7933
     value.  So we don't do it unless -funsafe-math-optimizations.  */
7934
  if (FLOAT_MODE_P (GET_MODE (x))
7935
      && ! flag_unsafe_math_optimizations)
7936
    return x;
7937
 
7938
  /* The outer operation can only be one of the following:  */
7939
  if (code != IOR && code != AND && code != XOR
7940
      && code != PLUS && code != MINUS)
7941
    return x;
7942
 
7943
  lhs = XEXP (x, 0);
7944
  rhs = XEXP (x, 1);
7945
 
7946
  /* If either operand is a primitive we can't do anything, so get out
7947
     fast.  */
7948
  if (OBJECT_P (lhs) || OBJECT_P (rhs))
7949
    return x;
7950
 
7951
  lhs = expand_compound_operation (lhs);
7952
  rhs = expand_compound_operation (rhs);
7953
  inner_code = GET_CODE (lhs);
7954
  if (inner_code != GET_CODE (rhs))
7955
    return x;
7956
 
7957
  /* See if the inner and outer operations distribute.  */
7958
  switch (inner_code)
7959
    {
7960
    case LSHIFTRT:
7961
    case ASHIFTRT:
7962
    case AND:
7963
    case IOR:
7964
      /* These all distribute except over PLUS.  */
7965
      if (code == PLUS || code == MINUS)
7966
        return x;
7967
      break;
7968
 
7969
    case MULT:
7970
      if (code != PLUS && code != MINUS)
7971
        return x;
7972
      break;
7973
 
7974
    case ASHIFT:
7975
      /* This is also a multiply, so it distributes over everything.  */
7976
      break;
7977
 
7978
    case SUBREG:
7979
      /* Non-paradoxical SUBREGs distributes over all operations,
7980
         provided the inner modes and byte offsets are the same, this
7981
         is an extraction of a low-order part, we don't convert an fp
7982
         operation to int or vice versa, this is not a vector mode,
7983
         and we would not be converting a single-word operation into a
7984
         multi-word operation.  The latter test is not required, but
7985
         it prevents generating unneeded multi-word operations.  Some
7986
         of the previous tests are redundant given the latter test,
7987
         but are retained because they are required for correctness.
7988
 
7989
         We produce the result slightly differently in this case.  */
7990
 
7991
      if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
7992
          || SUBREG_BYTE (lhs) != SUBREG_BYTE (rhs)
7993
          || ! subreg_lowpart_p (lhs)
7994
          || (GET_MODE_CLASS (GET_MODE (lhs))
7995
              != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
7996
          || (GET_MODE_SIZE (GET_MODE (lhs))
7997
              > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
7998
          || VECTOR_MODE_P (GET_MODE (lhs))
7999
          || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD
8000
          /* Result might need to be truncated.  Don't change mode if
8001
             explicit truncation is needed.  */
8002
          || !TRULY_NOOP_TRUNCATION
8003
               (GET_MODE_BITSIZE (GET_MODE (x)),
8004
                GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (lhs)))))
8005
        return x;
8006
 
8007
      tem = simplify_gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
8008
                                 SUBREG_REG (lhs), SUBREG_REG (rhs));
8009
      return gen_lowpart (GET_MODE (x), tem);
8010
 
8011
    default:
8012
      return x;
8013
    }
8014
 
8015
  /* Set LHS and RHS to the inner operands (A and B in the example
8016
     above) and set OTHER to the common operand (C in the example).
8017
     There is only one way to do this unless the inner operation is
8018
     commutative.  */
8019
  if (COMMUTATIVE_ARITH_P (lhs)
8020
      && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
8021
    other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
8022
  else if (COMMUTATIVE_ARITH_P (lhs)
8023
           && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
8024
    other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
8025
  else if (COMMUTATIVE_ARITH_P (lhs)
8026
           && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
8027
    other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
8028
  else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
8029
    other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
8030
  else
8031
    return x;
8032
 
8033
  /* Form the new inner operation, seeing if it simplifies first.  */
8034
  tem = simplify_gen_binary (code, GET_MODE (x), lhs, rhs);
8035
 
8036
  /* There is one exception to the general way of distributing:
8037
     (a | c) ^ (b | c) -> (a ^ b) & ~c  */
8038
  if (code == XOR && inner_code == IOR)
8039
    {
8040
      inner_code = AND;
8041
      other = simplify_gen_unary (NOT, GET_MODE (x), other, GET_MODE (x));
8042
    }
8043
 
8044
  /* We may be able to continuing distributing the result, so call
8045
     ourselves recursively on the inner operation before forming the
8046
     outer operation, which we return.  */
8047
  return simplify_gen_binary (inner_code, GET_MODE (x),
8048
                              apply_distributive_law (tem), other);
8049
}
8050
 
8051
/* See if X is of the form (* (+ A B) C), and if so convert to
8052
   (+ (* A C) (* B C)) and try to simplify.
8053
 
8054
   Most of the time, this results in no change.  However, if some of
8055
   the operands are the same or inverses of each other, simplifications
8056
   will result.
8057
 
8058
   For example, (and (ior A B) (not B)) can occur as the result of
8059
   expanding a bit field assignment.  When we apply the distributive
8060
   law to this, we get (ior (and (A (not B))) (and (B (not B)))),
8061
   which then simplifies to (and (A (not B))).
8062
 
8063
   Note that no checks happen on the validity of applying the inverse
8064
   distributive law.  This is pointless since we can do it in the
8065
   few places where this routine is called.
8066
 
8067
   N is the index of the term that is decomposed (the arithmetic operation,
8068
   i.e. (+ A B) in the first example above).  !N is the index of the term that
8069
   is distributed, i.e. of C in the first example above.  */
8070
static rtx
8071
distribute_and_simplify_rtx (rtx x, int n)
8072
{
8073
  enum machine_mode mode;
8074
  enum rtx_code outer_code, inner_code;
8075
  rtx decomposed, distributed, inner_op0, inner_op1, new_op0, new_op1, tmp;
8076
 
8077
  decomposed = XEXP (x, n);
8078
  if (!ARITHMETIC_P (decomposed))
8079
    return NULL_RTX;
8080
 
8081
  mode = GET_MODE (x);
8082
  outer_code = GET_CODE (x);
8083
  distributed = XEXP (x, !n);
8084
 
8085
  inner_code = GET_CODE (decomposed);
8086
  inner_op0 = XEXP (decomposed, 0);
8087
  inner_op1 = XEXP (decomposed, 1);
8088
 
8089
  /* Special case (and (xor B C) (not A)), which is equivalent to
8090
     (xor (ior A B) (ior A C))  */
8091
  if (outer_code == AND && inner_code == XOR && GET_CODE (distributed) == NOT)
8092
    {
8093
      distributed = XEXP (distributed, 0);
8094
      outer_code = IOR;
8095
    }
8096
 
8097
  if (n == 0)
8098
    {
8099
      /* Distribute the second term.  */
8100
      new_op0 = simplify_gen_binary (outer_code, mode, inner_op0, distributed);
8101
      new_op1 = simplify_gen_binary (outer_code, mode, inner_op1, distributed);
8102
    }
8103
  else
8104
    {
8105
      /* Distribute the first term.  */
8106
      new_op0 = simplify_gen_binary (outer_code, mode, distributed, inner_op0);
8107
      new_op1 = simplify_gen_binary (outer_code, mode, distributed, inner_op1);
8108
    }
8109
 
8110
  tmp = apply_distributive_law (simplify_gen_binary (inner_code, mode,
8111
                                                     new_op0, new_op1));
8112
  if (GET_CODE (tmp) != outer_code
8113
      && rtx_cost (tmp, SET) < rtx_cost (x, SET))
8114
    return tmp;
8115
 
8116
  return NULL_RTX;
8117
}
8118
 
8119
/* Simplify a logical `and' of VAROP with the constant CONSTOP, to be done
8120
   in MODE.  Return an equivalent form, if different from (and VAROP
8121
   (const_int CONSTOP)).  Otherwise, return NULL_RTX.  */
8122
 
8123
static rtx
8124
simplify_and_const_int_1 (enum machine_mode mode, rtx varop,
8125
                          unsigned HOST_WIDE_INT constop)
8126
{
8127
  unsigned HOST_WIDE_INT nonzero;
8128
  unsigned HOST_WIDE_INT orig_constop;
8129
  rtx orig_varop;
8130
  int i;
8131
 
8132
  orig_varop = varop;
8133
  orig_constop = constop;
8134
  if (GET_CODE (varop) == CLOBBER)
8135
    return NULL_RTX;
8136
 
8137
  /* Simplify VAROP knowing that we will be only looking at some of the
8138
     bits in it.
8139
 
8140
     Note by passing in CONSTOP, we guarantee that the bits not set in
8141
     CONSTOP are not significant and will never be examined.  We must
8142
     ensure that is the case by explicitly masking out those bits
8143
     before returning.  */
8144
  varop = force_to_mode (varop, mode, constop, 0);
8145
 
8146
  /* If VAROP is a CLOBBER, we will fail so return it.  */
8147
  if (GET_CODE (varop) == CLOBBER)
8148
    return varop;
8149
 
8150
  /* If VAROP is a CONST_INT, then we need to apply the mask in CONSTOP
8151
     to VAROP and return the new constant.  */
8152
  if (GET_CODE (varop) == CONST_INT)
8153
    return gen_int_mode (INTVAL (varop) & constop, mode);
8154
 
8155
  /* See what bits may be nonzero in VAROP.  Unlike the general case of
8156
     a call to nonzero_bits, here we don't care about bits outside
8157
     MODE.  */
8158
 
8159
  nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
8160
 
8161
  /* Turn off all bits in the constant that are known to already be zero.
8162
     Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
8163
     which is tested below.  */
8164
 
8165
  constop &= nonzero;
8166
 
8167
  /* If we don't have any bits left, return zero.  */
8168
  if (constop == 0)
8169
    return const0_rtx;
8170
 
8171
  /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
8172
     a power of two, we can replace this with an ASHIFT.  */
8173
  if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
8174
      && (i = exact_log2 (constop)) >= 0)
8175
    return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
8176
 
8177
  /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
8178
     or XOR, then try to apply the distributive law.  This may eliminate
8179
     operations if either branch can be simplified because of the AND.
8180
     It may also make some cases more complex, but those cases probably
8181
     won't match a pattern either with or without this.  */
8182
 
8183
  if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
8184
    return
8185
      gen_lowpart
8186
        (mode,
8187
         apply_distributive_law
8188
         (simplify_gen_binary (GET_CODE (varop), GET_MODE (varop),
8189
                               simplify_and_const_int (NULL_RTX,
8190
                                                       GET_MODE (varop),
8191
                                                       XEXP (varop, 0),
8192
                                                       constop),
8193
                               simplify_and_const_int (NULL_RTX,
8194
                                                       GET_MODE (varop),
8195
                                                       XEXP (varop, 1),
8196
                                                       constop))));
8197
 
8198
  /* If VAROP is PLUS, and the constant is a mask of low bits, distribute
8199
     the AND and see if one of the operands simplifies to zero.  If so, we
8200
     may eliminate it.  */
8201
 
8202
  if (GET_CODE (varop) == PLUS
8203
      && exact_log2 (constop + 1) >= 0)
8204
    {
8205
      rtx o0, o1;
8206
 
8207
      o0 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 0), constop);
8208
      o1 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 1), constop);
8209
      if (o0 == const0_rtx)
8210
        return o1;
8211
      if (o1 == const0_rtx)
8212
        return o0;
8213
    }
8214
 
8215
  /* Make a SUBREG if necessary.  If we can't make it, fail.  */
8216
  varop = gen_lowpart (mode, varop);
8217
  if (varop == NULL_RTX || GET_CODE (varop) == CLOBBER)
8218
    return NULL_RTX;
8219
 
8220
  /* If we are only masking insignificant bits, return VAROP.  */
8221
  if (constop == nonzero)
8222
    return varop;
8223
 
8224
  if (varop == orig_varop && constop == orig_constop)
8225
    return NULL_RTX;
8226
 
8227
  /* Otherwise, return an AND.  */
8228
  return simplify_gen_binary (AND, mode, varop, gen_int_mode (constop, mode));
8229
}
8230
 
8231
 
8232
/* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
8233
   in MODE.
8234
 
8235
   Return an equivalent form, if different from X.  Otherwise, return X.  If
8236
   X is zero, we are to always construct the equivalent form.  */
8237
 
8238
static rtx
8239
simplify_and_const_int (rtx x, enum machine_mode mode, rtx varop,
8240
                        unsigned HOST_WIDE_INT constop)
8241
{
8242
  rtx tem = simplify_and_const_int_1 (mode, varop, constop);
8243
  if (tem)
8244
    return tem;
8245
 
8246
  if (!x)
8247
    x = simplify_gen_binary (AND, GET_MODE (varop), varop,
8248
                             gen_int_mode (constop, mode));
8249
  if (GET_MODE (x) != mode)
8250
    x = gen_lowpart (mode, x);
8251
  return x;
8252
}
8253
 
8254
/* Given a REG, X, compute which bits in X can be nonzero.
8255
   We don't care about bits outside of those defined in MODE.
8256
 
8257
   For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
8258
   a shift, AND, or zero_extract, we can do better.  */
8259
 
8260
static rtx
8261
reg_nonzero_bits_for_combine (rtx x, enum machine_mode mode,
8262
                              rtx known_x ATTRIBUTE_UNUSED,
8263
                              enum machine_mode known_mode ATTRIBUTE_UNUSED,
8264
                              unsigned HOST_WIDE_INT known_ret ATTRIBUTE_UNUSED,
8265
                              unsigned HOST_WIDE_INT *nonzero)
8266
{
8267
  rtx tem;
8268
 
8269
  /* If X is a register whose nonzero bits value is current, use it.
8270
     Otherwise, if X is a register whose value we can find, use that
8271
     value.  Otherwise, use the previously-computed global nonzero bits
8272
     for this register.  */
8273
 
8274
  if (reg_stat[REGNO (x)].last_set_value != 0
8275
      && (reg_stat[REGNO (x)].last_set_mode == mode
8276
          || (GET_MODE_CLASS (reg_stat[REGNO (x)].last_set_mode) == MODE_INT
8277
              && GET_MODE_CLASS (mode) == MODE_INT))
8278
      && (reg_stat[REGNO (x)].last_set_label == label_tick
8279
          || (REGNO (x) >= FIRST_PSEUDO_REGISTER
8280
              && REG_N_SETS (REGNO (x)) == 1
8281
              && ! REGNO_REG_SET_P
8282
                 (ENTRY_BLOCK_PTR->next_bb->il.rtl->global_live_at_start,
8283
                  REGNO (x))))
8284
      && INSN_CUID (reg_stat[REGNO (x)].last_set) < subst_low_cuid)
8285
    {
8286
      *nonzero &= reg_stat[REGNO (x)].last_set_nonzero_bits;
8287
      return NULL;
8288
    }
8289
 
8290
  tem = get_last_value (x);
8291
 
8292
  if (tem)
8293
    {
8294
#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
8295
      /* If X is narrower than MODE and TEM is a non-negative
8296
         constant that would appear negative in the mode of X,
8297
         sign-extend it for use in reg_nonzero_bits because some
8298
         machines (maybe most) will actually do the sign-extension
8299
         and this is the conservative approach.
8300
 
8301
         ??? For 2.5, try to tighten up the MD files in this regard
8302
         instead of this kludge.  */
8303
 
8304
      if (GET_MODE_BITSIZE (GET_MODE (x)) < GET_MODE_BITSIZE (mode)
8305
          && GET_CODE (tem) == CONST_INT
8306
          && INTVAL (tem) > 0
8307
          && 0 != (INTVAL (tem)
8308
                   & ((HOST_WIDE_INT) 1
8309
                      << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
8310
        tem = GEN_INT (INTVAL (tem)
8311
                       | ((HOST_WIDE_INT) (-1)
8312
                          << GET_MODE_BITSIZE (GET_MODE (x))));
8313
#endif
8314
      return tem;
8315
    }
8316
  else if (nonzero_sign_valid && reg_stat[REGNO (x)].nonzero_bits)
8317
    {
8318
      unsigned HOST_WIDE_INT mask = reg_stat[REGNO (x)].nonzero_bits;
8319
 
8320
      if (GET_MODE_BITSIZE (GET_MODE (x)) < GET_MODE_BITSIZE (mode))
8321
        /* We don't know anything about the upper bits.  */
8322
        mask |= GET_MODE_MASK (mode) ^ GET_MODE_MASK (GET_MODE (x));
8323
      *nonzero &= mask;
8324
    }
8325
 
8326
  return NULL;
8327
}
8328
 
8329
/* Return the number of bits at the high-order end of X that are known to
8330
   be equal to the sign bit.  X will be used in mode MODE; if MODE is
8331
   VOIDmode, X will be used in its own mode.  The returned value  will always
8332
   be between 1 and the number of bits in MODE.  */
8333
 
8334
static rtx
8335
reg_num_sign_bit_copies_for_combine (rtx x, enum machine_mode mode,
8336
                                     rtx known_x ATTRIBUTE_UNUSED,
8337
                                     enum machine_mode known_mode
8338
                                     ATTRIBUTE_UNUSED,
8339
                                     unsigned int known_ret ATTRIBUTE_UNUSED,
8340
                                     unsigned int *result)
8341
{
8342
  rtx tem;
8343
 
8344
  if (reg_stat[REGNO (x)].last_set_value != 0
8345
      && reg_stat[REGNO (x)].last_set_mode == mode
8346
      && (reg_stat[REGNO (x)].last_set_label == label_tick
8347
          || (REGNO (x) >= FIRST_PSEUDO_REGISTER
8348
              && REG_N_SETS (REGNO (x)) == 1
8349
              && ! REGNO_REG_SET_P
8350
                 (ENTRY_BLOCK_PTR->next_bb->il.rtl->global_live_at_start,
8351
                  REGNO (x))))
8352
      && INSN_CUID (reg_stat[REGNO (x)].last_set) < subst_low_cuid)
8353
    {
8354
      *result = reg_stat[REGNO (x)].last_set_sign_bit_copies;
8355
      return NULL;
8356
    }
8357
 
8358
  tem = get_last_value (x);
8359
  if (tem != 0)
8360
    return tem;
8361
 
8362
  if (nonzero_sign_valid && reg_stat[REGNO (x)].sign_bit_copies != 0
8363
      && GET_MODE_BITSIZE (GET_MODE (x)) == GET_MODE_BITSIZE (mode))
8364
    *result = reg_stat[REGNO (x)].sign_bit_copies;
8365
 
8366
  return NULL;
8367
}
8368
 
8369
/* Return the number of "extended" bits there are in X, when interpreted
8370
   as a quantity in MODE whose signedness is indicated by UNSIGNEDP.  For
8371
   unsigned quantities, this is the number of high-order zero bits.
8372
   For signed quantities, this is the number of copies of the sign bit
8373
   minus 1.  In both case, this function returns the number of "spare"
8374
   bits.  For example, if two quantities for which this function returns
8375
   at least 1 are added, the addition is known not to overflow.
8376
 
8377
   This function will always return 0 unless called during combine, which
8378
   implies that it must be called from a define_split.  */
8379
 
8380
unsigned int
8381
extended_count (rtx x, enum machine_mode mode, int unsignedp)
8382
{
8383
  if (nonzero_sign_valid == 0)
8384
    return 0;
8385
 
8386
  return (unsignedp
8387
          ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8388
             ? (unsigned int) (GET_MODE_BITSIZE (mode) - 1
8389
                               - floor_log2 (nonzero_bits (x, mode)))
8390
             : 0)
8391
          : num_sign_bit_copies (x, mode) - 1);
8392
}
8393
 
8394
/* This function is called from `simplify_shift_const' to merge two
8395
   outer operations.  Specifically, we have already found that we need
8396
   to perform operation *POP0 with constant *PCONST0 at the outermost
8397
   position.  We would now like to also perform OP1 with constant CONST1
8398
   (with *POP0 being done last).
8399
 
8400
   Return 1 if we can do the operation and update *POP0 and *PCONST0 with
8401
   the resulting operation.  *PCOMP_P is set to 1 if we would need to
8402
   complement the innermost operand, otherwise it is unchanged.
8403
 
8404
   MODE is the mode in which the operation will be done.  No bits outside
8405
   the width of this mode matter.  It is assumed that the width of this mode
8406
   is smaller than or equal to HOST_BITS_PER_WIDE_INT.
8407
 
8408
   If *POP0 or OP1 are UNKNOWN, it means no operation is required.  Only NEG, PLUS,
8409
   IOR, XOR, and AND are supported.  We may set *POP0 to SET if the proper
8410
   result is simply *PCONST0.
8411
 
8412
   If the resulting operation cannot be expressed as one operation, we
8413
   return 0 and do not change *POP0, *PCONST0, and *PCOMP_P.  */
8414
 
8415
static int
8416
merge_outer_ops (enum rtx_code *pop0, HOST_WIDE_INT *pconst0, enum rtx_code op1, HOST_WIDE_INT const1, enum machine_mode mode, int *pcomp_p)
8417
{
8418
  enum rtx_code op0 = *pop0;
8419
  HOST_WIDE_INT const0 = *pconst0;
8420
 
8421
  const0 &= GET_MODE_MASK (mode);
8422
  const1 &= GET_MODE_MASK (mode);
8423
 
8424
  /* If OP0 is an AND, clear unimportant bits in CONST1.  */
8425
  if (op0 == AND)
8426
    const1 &= const0;
8427
 
8428
  /* If OP0 or OP1 is UNKNOWN, this is easy.  Similarly if they are the same or
8429
     if OP0 is SET.  */
8430
 
8431
  if (op1 == UNKNOWN || op0 == SET)
8432
    return 1;
8433
 
8434
  else if (op0 == UNKNOWN)
8435
    op0 = op1, const0 = const1;
8436
 
8437
  else if (op0 == op1)
8438
    {
8439
      switch (op0)
8440
        {
8441
        case AND:
8442
          const0 &= const1;
8443
          break;
8444
        case IOR:
8445
          const0 |= const1;
8446
          break;
8447
        case XOR:
8448
          const0 ^= const1;
8449
          break;
8450
        case PLUS:
8451
          const0 += const1;
8452
          break;
8453
        case NEG:
8454
          op0 = UNKNOWN;
8455
          break;
8456
        default:
8457
          break;
8458
        }
8459
    }
8460
 
8461
  /* Otherwise, if either is a PLUS or NEG, we can't do anything.  */
8462
  else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
8463
    return 0;
8464
 
8465
  /* If the two constants aren't the same, we can't do anything.  The
8466
     remaining six cases can all be done.  */
8467
  else if (const0 != const1)
8468
    return 0;
8469
 
8470
  else
8471
    switch (op0)
8472
      {
8473
      case IOR:
8474
        if (op1 == AND)
8475
          /* (a & b) | b == b */
8476
          op0 = SET;
8477
        else /* op1 == XOR */
8478
          /* (a ^ b) | b == a | b */
8479
          {;}
8480
        break;
8481
 
8482
      case XOR:
8483
        if (op1 == AND)
8484
          /* (a & b) ^ b == (~a) & b */
8485
          op0 = AND, *pcomp_p = 1;
8486
        else /* op1 == IOR */
8487
          /* (a | b) ^ b == a & ~b */
8488
          op0 = AND, const0 = ~const0;
8489
        break;
8490
 
8491
      case AND:
8492
        if (op1 == IOR)
8493
          /* (a | b) & b == b */
8494
        op0 = SET;
8495
        else /* op1 == XOR */
8496
          /* (a ^ b) & b) == (~a) & b */
8497
          *pcomp_p = 1;
8498
        break;
8499
      default:
8500
        break;
8501
      }
8502
 
8503
  /* Check for NO-OP cases.  */
8504
  const0 &= GET_MODE_MASK (mode);
8505
  if (const0 == 0
8506
      && (op0 == IOR || op0 == XOR || op0 == PLUS))
8507
    op0 = UNKNOWN;
8508
  else if (const0 == 0 && op0 == AND)
8509
    op0 = SET;
8510
  else if ((unsigned HOST_WIDE_INT) const0 == GET_MODE_MASK (mode)
8511
           && op0 == AND)
8512
    op0 = UNKNOWN;
8513
 
8514
  /* ??? Slightly redundant with the above mask, but not entirely.
8515
     Moving this above means we'd have to sign-extend the mode mask
8516
     for the final test.  */
8517
  const0 = trunc_int_for_mode (const0, mode);
8518
 
8519
  *pop0 = op0;
8520
  *pconst0 = const0;
8521
 
8522
  return 1;
8523
}
8524
 
8525
/* Simplify a shift of VAROP by COUNT bits.  CODE says what kind of shift.
8526
   The result of the shift is RESULT_MODE.  Return NULL_RTX if we cannot
8527
   simplify it.  Otherwise, return a simplified value.
8528
 
8529
   The shift is normally computed in the widest mode we find in VAROP, as
8530
   long as it isn't a different number of words than RESULT_MODE.  Exceptions
8531
   are ASHIFTRT and ROTATE, which are always done in their original mode.  */
8532
 
8533
static rtx
8534
simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
8535
                        rtx varop, int orig_count)
8536
{
8537
  enum rtx_code orig_code = code;
8538
  rtx orig_varop = varop;
8539
  int count;
8540
  enum machine_mode mode = result_mode;
8541
  enum machine_mode shift_mode, tmode;
8542
  unsigned int mode_words
8543
    = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
8544
  /* We form (outer_op (code varop count) (outer_const)).  */
8545
  enum rtx_code outer_op = UNKNOWN;
8546
  HOST_WIDE_INT outer_const = 0;
8547
  int complement_p = 0;
8548
  rtx new, x;
8549
 
8550
  /* Make sure and truncate the "natural" shift on the way in.  We don't
8551
     want to do this inside the loop as it makes it more difficult to
8552
     combine shifts.  */
8553
  if (SHIFT_COUNT_TRUNCATED)
8554
    orig_count &= GET_MODE_BITSIZE (mode) - 1;
8555
 
8556
  /* If we were given an invalid count, don't do anything except exactly
8557
     what was requested.  */
8558
 
8559
  if (orig_count < 0 || orig_count >= (int) GET_MODE_BITSIZE (mode))
8560
    return NULL_RTX;
8561
 
8562
  count = orig_count;
8563
 
8564
  /* Unless one of the branches of the `if' in this loop does a `continue',
8565
     we will `break' the loop after the `if'.  */
8566
 
8567
  while (count != 0)
8568
    {
8569
      /* If we have an operand of (clobber (const_int 0)), fail.  */
8570
      if (GET_CODE (varop) == CLOBBER)
8571
        return NULL_RTX;
8572
 
8573
      /* If we discovered we had to complement VAROP, leave.  Making a NOT
8574
         here would cause an infinite loop.  */
8575
      if (complement_p)
8576
        break;
8577
 
8578
      /* Convert ROTATERT to ROTATE.  */
8579
      if (code == ROTATERT)
8580
        {
8581
          unsigned int bitsize = GET_MODE_BITSIZE (result_mode);;
8582
          code = ROTATE;
8583
          if (VECTOR_MODE_P (result_mode))
8584
            count = bitsize / GET_MODE_NUNITS (result_mode) - count;
8585
          else
8586
            count = bitsize - count;
8587
        }
8588
 
8589
      /* We need to determine what mode we will do the shift in.  If the
8590
         shift is a right shift or a ROTATE, we must always do it in the mode
8591
         it was originally done in.  Otherwise, we can do it in MODE, the
8592
         widest mode encountered.  */
8593
      shift_mode
8594
        = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
8595
           ? result_mode : mode);
8596
 
8597
      /* Handle cases where the count is greater than the size of the mode
8598
         minus 1.  For ASHIFT, use the size minus one as the count (this can
8599
         occur when simplifying (lshiftrt (ashiftrt ..))).  For rotates,
8600
         take the count modulo the size.  For other shifts, the result is
8601
         zero.
8602
 
8603
         Since these shifts are being produced by the compiler by combining
8604
         multiple operations, each of which are defined, we know what the
8605
         result is supposed to be.  */
8606
 
8607
      if (count > (GET_MODE_BITSIZE (shift_mode) - 1))
8608
        {
8609
          if (code == ASHIFTRT)
8610
            count = GET_MODE_BITSIZE (shift_mode) - 1;
8611
          else if (code == ROTATE || code == ROTATERT)
8612
            count %= GET_MODE_BITSIZE (shift_mode);
8613
          else
8614
            {
8615
              /* We can't simply return zero because there may be an
8616
                 outer op.  */
8617
              varop = const0_rtx;
8618
              count = 0;
8619
              break;
8620
            }
8621
        }
8622
 
8623
      /* An arithmetic right shift of a quantity known to be -1 or 0
8624
         is a no-op.  */
8625
      if (code == ASHIFTRT
8626
          && (num_sign_bit_copies (varop, shift_mode)
8627
              == GET_MODE_BITSIZE (shift_mode)))
8628
        {
8629
          count = 0;
8630
          break;
8631
        }
8632
 
8633
      /* If we are doing an arithmetic right shift and discarding all but
8634
         the sign bit copies, this is equivalent to doing a shift by the
8635
         bitsize minus one.  Convert it into that shift because it will often
8636
         allow other simplifications.  */
8637
 
8638
      if (code == ASHIFTRT
8639
          && (count + num_sign_bit_copies (varop, shift_mode)
8640
              >= GET_MODE_BITSIZE (shift_mode)))
8641
        count = GET_MODE_BITSIZE (shift_mode) - 1;
8642
 
8643
      /* We simplify the tests below and elsewhere by converting
8644
         ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
8645
         `make_compound_operation' will convert it to an ASHIFTRT for
8646
         those machines (such as VAX) that don't have an LSHIFTRT.  */
8647
      if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
8648
          && code == ASHIFTRT
8649
          && ((nonzero_bits (varop, shift_mode)
8650
               & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
8651
              == 0))
8652
        code = LSHIFTRT;
8653
 
8654
      if (((code == LSHIFTRT
8655
            && GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
8656
            && !(nonzero_bits (varop, shift_mode) >> count))
8657
           || (code == ASHIFT
8658
               && GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
8659
               && !((nonzero_bits (varop, shift_mode) << count)
8660
                    & GET_MODE_MASK (shift_mode))))
8661
          && !side_effects_p (varop))
8662
        varop = const0_rtx;
8663
 
8664
      switch (GET_CODE (varop))
8665
        {
8666
        case SIGN_EXTEND:
8667
        case ZERO_EXTEND:
8668
        case SIGN_EXTRACT:
8669
        case ZERO_EXTRACT:
8670
          new = expand_compound_operation (varop);
8671
          if (new != varop)
8672
            {
8673
              varop = new;
8674
              continue;
8675
            }
8676
          break;
8677
 
8678
        case MEM:
8679
          /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
8680
             minus the width of a smaller mode, we can do this with a
8681
             SIGN_EXTEND or ZERO_EXTEND from the narrower memory location.  */
8682
          if ((code == ASHIFTRT || code == LSHIFTRT)
8683
              && ! mode_dependent_address_p (XEXP (varop, 0))
8684
              && ! MEM_VOLATILE_P (varop)
8685
              && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
8686
                                         MODE_INT, 1)) != BLKmode)
8687
            {
8688
              new = adjust_address_nv (varop, tmode,
8689
                                       BYTES_BIG_ENDIAN ? 0
8690
                                       : count / BITS_PER_UNIT);
8691
 
8692
              varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND
8693
                                     : ZERO_EXTEND, mode, new);
8694
              count = 0;
8695
              continue;
8696
            }
8697
          break;
8698
 
8699
        case SUBREG:
8700
          /* If VAROP is a SUBREG, strip it as long as the inner operand has
8701
             the same number of words as what we've seen so far.  Then store
8702
             the widest mode in MODE.  */
8703
          if (subreg_lowpart_p (varop)
8704
              && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
8705
                  > GET_MODE_SIZE (GET_MODE (varop)))
8706
              && (unsigned int) ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
8707
                                  + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
8708
                 == mode_words)
8709
            {
8710
              varop = SUBREG_REG (varop);
8711
              if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
8712
                mode = GET_MODE (varop);
8713
              continue;
8714
            }
8715
          break;
8716
 
8717
        case MULT:
8718
          /* Some machines use MULT instead of ASHIFT because MULT
8719
             is cheaper.  But it is still better on those machines to
8720
             merge two shifts into one.  */
8721
          if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8722
              && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
8723
            {
8724
              varop
8725
                = simplify_gen_binary (ASHIFT, GET_MODE (varop),
8726
                                       XEXP (varop, 0),
8727
                                       GEN_INT (exact_log2 (
8728
                                                INTVAL (XEXP (varop, 1)))));
8729
              continue;
8730
            }
8731
          break;
8732
 
8733
        case UDIV:
8734
          /* Similar, for when divides are cheaper.  */
8735
          if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8736
              && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
8737
            {
8738
              varop
8739
                = simplify_gen_binary (LSHIFTRT, GET_MODE (varop),
8740
                                       XEXP (varop, 0),
8741
                                       GEN_INT (exact_log2 (
8742
                                                INTVAL (XEXP (varop, 1)))));
8743
              continue;
8744
            }
8745
          break;
8746
 
8747
        case ASHIFTRT:
8748
          /* If we are extracting just the sign bit of an arithmetic
8749
             right shift, that shift is not needed.  However, the sign
8750
             bit of a wider mode may be different from what would be
8751
             interpreted as the sign bit in a narrower mode, so, if
8752
             the result is narrower, don't discard the shift.  */
8753
          if (code == LSHIFTRT
8754
              && count == (GET_MODE_BITSIZE (result_mode) - 1)
8755
              && (GET_MODE_BITSIZE (result_mode)
8756
                  >= GET_MODE_BITSIZE (GET_MODE (varop))))
8757
            {
8758
              varop = XEXP (varop, 0);
8759
              continue;
8760
            }
8761
 
8762
          /* ... fall through ...  */
8763
 
8764
        case LSHIFTRT:
8765
        case ASHIFT:
8766
        case ROTATE:
8767
          /* Here we have two nested shifts.  The result is usually the
8768
             AND of a new shift with a mask.  We compute the result below.  */
8769
          if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8770
              && INTVAL (XEXP (varop, 1)) >= 0
8771
              && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
8772
              && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
8773
              && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8774
              && !VECTOR_MODE_P (result_mode))
8775
            {
8776
              enum rtx_code first_code = GET_CODE (varop);
8777
              unsigned int first_count = INTVAL (XEXP (varop, 1));
8778
              unsigned HOST_WIDE_INT mask;
8779
              rtx mask_rtx;
8780
 
8781
              /* We have one common special case.  We can't do any merging if
8782
                 the inner code is an ASHIFTRT of a smaller mode.  However, if
8783
                 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
8784
                 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
8785
                 we can convert it to
8786
                 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
8787
                 This simplifies certain SIGN_EXTEND operations.  */
8788
              if (code == ASHIFT && first_code == ASHIFTRT
8789
                  && count == (GET_MODE_BITSIZE (result_mode)
8790
                               - GET_MODE_BITSIZE (GET_MODE (varop))))
8791
                {
8792
                  /* C3 has the low-order C1 bits zero.  */
8793
 
8794
                  mask = (GET_MODE_MASK (mode)
8795
                          & ~(((HOST_WIDE_INT) 1 << first_count) - 1));
8796
 
8797
                  varop = simplify_and_const_int (NULL_RTX, result_mode,
8798
                                                  XEXP (varop, 0), mask);
8799
                  varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
8800
                                                varop, count);
8801
                  count = first_count;
8802
                  code = ASHIFTRT;
8803
                  continue;
8804
                }
8805
 
8806
              /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
8807
                 than C1 high-order bits equal to the sign bit, we can convert
8808
                 this to either an ASHIFT or an ASHIFTRT depending on the
8809
                 two counts.
8810
 
8811
                 We cannot do this if VAROP's mode is not SHIFT_MODE.  */
8812
 
8813
              if (code == ASHIFTRT && first_code == ASHIFT
8814
                  && GET_MODE (varop) == shift_mode
8815
                  && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
8816
                      > first_count))
8817
                {
8818
                  varop = XEXP (varop, 0);
8819
                  count -= first_count;
8820
                  if (count < 0)
8821
                    {
8822
                      count = -count;
8823
                      code = ASHIFT;
8824
                    }
8825
 
8826
                  continue;
8827
                }
8828
 
8829
              /* There are some cases we can't do.  If CODE is ASHIFTRT,
8830
                 we can only do this if FIRST_CODE is also ASHIFTRT.
8831
 
8832
                 We can't do the case when CODE is ROTATE and FIRST_CODE is
8833
                 ASHIFTRT.
8834
 
8835
                 If the mode of this shift is not the mode of the outer shift,
8836
                 we can't do this if either shift is a right shift or ROTATE.
8837
 
8838
                 Finally, we can't do any of these if the mode is too wide
8839
                 unless the codes are the same.
8840
 
8841
                 Handle the case where the shift codes are the same
8842
                 first.  */
8843
 
8844
              if (code == first_code)
8845
                {
8846
                  if (GET_MODE (varop) != result_mode
8847
                      && (code == ASHIFTRT || code == LSHIFTRT
8848
                          || code == ROTATE))
8849
                    break;
8850
 
8851
                  count += first_count;
8852
                  varop = XEXP (varop, 0);
8853
                  continue;
8854
                }
8855
 
8856
              if (code == ASHIFTRT
8857
                  || (code == ROTATE && first_code == ASHIFTRT)
8858
                  || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
8859
                  || (GET_MODE (varop) != result_mode
8860
                      && (first_code == ASHIFTRT || first_code == LSHIFTRT
8861
                          || first_code == ROTATE
8862
                          || code == ROTATE)))
8863
                break;
8864
 
8865
              /* To compute the mask to apply after the shift, shift the
8866
                 nonzero bits of the inner shift the same way the
8867
                 outer shift will.  */
8868
 
8869
              mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
8870
 
8871
              mask_rtx
8872
                = simplify_const_binary_operation (code, result_mode, mask_rtx,
8873
                                                   GEN_INT (count));
8874
 
8875
              /* Give up if we can't compute an outer operation to use.  */
8876
              if (mask_rtx == 0
8877
                  || GET_CODE (mask_rtx) != CONST_INT
8878
                  || ! merge_outer_ops (&outer_op, &outer_const, AND,
8879
                                        INTVAL (mask_rtx),
8880
                                        result_mode, &complement_p))
8881
                break;
8882
 
8883
              /* If the shifts are in the same direction, we add the
8884
                 counts.  Otherwise, we subtract them.  */
8885
              if ((code == ASHIFTRT || code == LSHIFTRT)
8886
                  == (first_code == ASHIFTRT || first_code == LSHIFTRT))
8887
                count += first_count;
8888
              else
8889
                count -= first_count;
8890
 
8891
              /* If COUNT is positive, the new shift is usually CODE,
8892
                 except for the two exceptions below, in which case it is
8893
                 FIRST_CODE.  If the count is negative, FIRST_CODE should
8894
                 always be used  */
8895
              if (count > 0
8896
                  && ((first_code == ROTATE && code == ASHIFT)
8897
                      || (first_code == ASHIFTRT && code == LSHIFTRT)))
8898
                code = first_code;
8899
              else if (count < 0)
8900
                code = first_code, count = -count;
8901
 
8902
              varop = XEXP (varop, 0);
8903
              continue;
8904
            }
8905
 
8906
          /* If we have (A << B << C) for any shift, we can convert this to
8907
             (A << C << B).  This wins if A is a constant.  Only try this if
8908
             B is not a constant.  */
8909
 
8910
          else if (GET_CODE (varop) == code
8911
                   && GET_CODE (XEXP (varop, 0)) == CONST_INT
8912
                   && GET_CODE (XEXP (varop, 1)) != CONST_INT)
8913
            {
8914
              rtx new = simplify_const_binary_operation (code, mode,
8915
                                                         XEXP (varop, 0),
8916
                                                         GEN_INT (count));
8917
              varop = gen_rtx_fmt_ee (code, mode, new, XEXP (varop, 1));
8918
              count = 0;
8919
              continue;
8920
            }
8921
          break;
8922
 
8923
        case NOT:
8924
          /* Make this fit the case below.  */
8925
          varop = gen_rtx_XOR (mode, XEXP (varop, 0),
8926
                               GEN_INT (GET_MODE_MASK (mode)));
8927
          continue;
8928
 
8929
        case IOR:
8930
        case AND:
8931
        case XOR:
8932
          /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
8933
             with C the size of VAROP - 1 and the shift is logical if
8934
             STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
8935
             we have an (le X 0) operation.   If we have an arithmetic shift
8936
             and STORE_FLAG_VALUE is 1 or we have a logical shift with
8937
             STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation.  */
8938
 
8939
          if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
8940
              && XEXP (XEXP (varop, 0), 1) == constm1_rtx
8941
              && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8942
              && (code == LSHIFTRT || code == ASHIFTRT)
8943
              && count == (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)
8944
              && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
8945
            {
8946
              count = 0;
8947
              varop = gen_rtx_LE (GET_MODE (varop), XEXP (varop, 1),
8948
                                  const0_rtx);
8949
 
8950
              if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
8951
                varop = gen_rtx_NEG (GET_MODE (varop), varop);
8952
 
8953
              continue;
8954
            }
8955
 
8956
          /* If we have (shift (logical)), move the logical to the outside
8957
             to allow it to possibly combine with another logical and the
8958
             shift to combine with another shift.  This also canonicalizes to
8959
             what a ZERO_EXTRACT looks like.  Also, some machines have
8960
             (and (shift)) insns.  */
8961
 
8962
          if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8963
              /* We can't do this if we have (ashiftrt (xor))  and the
8964
                 constant has its sign bit set in shift_mode.  */
8965
              && !(code == ASHIFTRT && GET_CODE (varop) == XOR
8966
                   && 0 > trunc_int_for_mode (INTVAL (XEXP (varop, 1)),
8967
                                              shift_mode))
8968
              && (new = simplify_const_binary_operation (code, result_mode,
8969
                                                         XEXP (varop, 1),
8970
                                                         GEN_INT (count))) != 0
8971
              && GET_CODE (new) == CONST_INT
8972
              && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
8973
                                  INTVAL (new), result_mode, &complement_p))
8974
            {
8975
              varop = XEXP (varop, 0);
8976
              continue;
8977
            }
8978
 
8979
          /* If we can't do that, try to simplify the shift in each arm of the
8980
             logical expression, make a new logical expression, and apply
8981
             the inverse distributive law.  This also can't be done
8982
             for some (ashiftrt (xor)).  */
8983
          if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8984
             && !(code == ASHIFTRT && GET_CODE (varop) == XOR
8985
                  && 0 > trunc_int_for_mode (INTVAL (XEXP (varop, 1)),
8986
                                             shift_mode)))
8987
            {
8988
              rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
8989
                                              XEXP (varop, 0), count);
8990
              rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
8991
                                              XEXP (varop, 1), count);
8992
 
8993
              varop = simplify_gen_binary (GET_CODE (varop), shift_mode,
8994
                                           lhs, rhs);
8995
              varop = apply_distributive_law (varop);
8996
 
8997
              count = 0;
8998
              continue;
8999
            }
9000
          break;
9001
 
9002
        case EQ:
9003
          /* Convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
9004
             says that the sign bit can be tested, FOO has mode MODE, C is
9005
             GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
9006
             that may be nonzero.  */
9007
          if (code == LSHIFTRT
9008
              && XEXP (varop, 1) == const0_rtx
9009
              && GET_MODE (XEXP (varop, 0)) == result_mode
9010
              && count == (GET_MODE_BITSIZE (result_mode) - 1)
9011
              && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9012
              && STORE_FLAG_VALUE == -1
9013
              && nonzero_bits (XEXP (varop, 0), result_mode) == 1
9014
              && merge_outer_ops (&outer_op, &outer_const, XOR,
9015
                                  (HOST_WIDE_INT) 1, result_mode,
9016
                                  &complement_p))
9017
            {
9018
              varop = XEXP (varop, 0);
9019
              count = 0;
9020
              continue;
9021
            }
9022
          break;
9023
 
9024
        case NEG:
9025
          /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
9026
             than the number of bits in the mode is equivalent to A.  */
9027
          if (code == LSHIFTRT
9028
              && count == (GET_MODE_BITSIZE (result_mode) - 1)
9029
              && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
9030
            {
9031
              varop = XEXP (varop, 0);
9032
              count = 0;
9033
              continue;
9034
            }
9035
 
9036
          /* NEG commutes with ASHIFT since it is multiplication.  Move the
9037
             NEG outside to allow shifts to combine.  */
9038
          if (code == ASHIFT
9039
              && merge_outer_ops (&outer_op, &outer_const, NEG,
9040
                                  (HOST_WIDE_INT) 0, result_mode,
9041
                                  &complement_p))
9042
            {
9043
              varop = XEXP (varop, 0);
9044
              continue;
9045
            }
9046
          break;
9047
 
9048
        case PLUS:
9049
          /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
9050
             is one less than the number of bits in the mode is
9051
             equivalent to (xor A 1).  */
9052
          if (code == LSHIFTRT
9053
              && count == (GET_MODE_BITSIZE (result_mode) - 1)
9054
              && XEXP (varop, 1) == constm1_rtx
9055
              && nonzero_bits (XEXP (varop, 0), result_mode) == 1
9056
              && merge_outer_ops (&outer_op, &outer_const, XOR,
9057
                                  (HOST_WIDE_INT) 1, result_mode,
9058
                                  &complement_p))
9059
            {
9060
              count = 0;
9061
              varop = XEXP (varop, 0);
9062
              continue;
9063
            }
9064
 
9065
          /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
9066
             that might be nonzero in BAR are those being shifted out and those
9067
             bits are known zero in FOO, we can replace the PLUS with FOO.
9068
             Similarly in the other operand order.  This code occurs when
9069
             we are computing the size of a variable-size array.  */
9070
 
9071
          if ((code == ASHIFTRT || code == LSHIFTRT)
9072
              && count < HOST_BITS_PER_WIDE_INT
9073
              && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
9074
              && (nonzero_bits (XEXP (varop, 1), result_mode)
9075
                  & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
9076
            {
9077
              varop = XEXP (varop, 0);
9078
              continue;
9079
            }
9080
          else if ((code == ASHIFTRT || code == LSHIFTRT)
9081
                   && count < HOST_BITS_PER_WIDE_INT
9082
                   && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9083
                   && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9084
                            >> count)
9085
                   && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9086
                            & nonzero_bits (XEXP (varop, 1),
9087
                                                 result_mode)))
9088
            {
9089
              varop = XEXP (varop, 1);
9090
              continue;
9091
            }
9092
 
9093
          /* (ashift (plus foo C) N) is (plus (ashift foo N) C').  */
9094
          if (code == ASHIFT
9095
              && GET_CODE (XEXP (varop, 1)) == CONST_INT
9096
              && (new = simplify_const_binary_operation (ASHIFT, result_mode,
9097
                                                         XEXP (varop, 1),
9098
                                                         GEN_INT (count))) != 0
9099
              && GET_CODE (new) == CONST_INT
9100
              && merge_outer_ops (&outer_op, &outer_const, PLUS,
9101
                                  INTVAL (new), result_mode, &complement_p))
9102
            {
9103
              varop = XEXP (varop, 0);
9104
              continue;
9105
            }
9106
 
9107
          /* Check for 'PLUS signbit', which is the canonical form of 'XOR
9108
             signbit', and attempt to change the PLUS to an XOR and move it to
9109
             the outer operation as is done above in the AND/IOR/XOR case
9110
             leg for shift(logical). See details in logical handling above
9111
             for reasoning in doing so.  */
9112
          if (code == LSHIFTRT
9113
              && GET_CODE (XEXP (varop, 1)) == CONST_INT
9114
              && mode_signbit_p (result_mode, XEXP (varop, 1))
9115
              && (new = simplify_const_binary_operation (code, result_mode,
9116
                                                         XEXP (varop, 1),
9117
                                                         GEN_INT (count))) != 0
9118
              && GET_CODE (new) == CONST_INT
9119
              && merge_outer_ops (&outer_op, &outer_const, XOR,
9120
                                  INTVAL (new), result_mode, &complement_p))
9121
            {
9122
              varop = XEXP (varop, 0);
9123
              continue;
9124
            }
9125
 
9126
          break;
9127
 
9128
        case MINUS:
9129
          /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
9130
             with C the size of VAROP - 1 and the shift is logical if
9131
             STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9132
             we have a (gt X 0) operation.  If the shift is arithmetic with
9133
             STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
9134
             we have a (neg (gt X 0)) operation.  */
9135
 
9136
          if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9137
              && GET_CODE (XEXP (varop, 0)) == ASHIFTRT
9138
              && count == (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)
9139
              && (code == LSHIFTRT || code == ASHIFTRT)
9140
              && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
9141
              && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
9142
              && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9143
            {
9144
              count = 0;
9145
              varop = gen_rtx_GT (GET_MODE (varop), XEXP (varop, 1),
9146
                                  const0_rtx);
9147
 
9148
              if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
9149
                varop = gen_rtx_NEG (GET_MODE (varop), varop);
9150
 
9151
              continue;
9152
            }
9153
          break;
9154
 
9155
        case TRUNCATE:
9156
          /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt))
9157
             if the truncate does not affect the value.  */
9158
          if (code == LSHIFTRT
9159
              && GET_CODE (XEXP (varop, 0)) == LSHIFTRT
9160
              && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
9161
              && (INTVAL (XEXP (XEXP (varop, 0), 1))
9162
                  >= (GET_MODE_BITSIZE (GET_MODE (XEXP (varop, 0)))
9163
                      - GET_MODE_BITSIZE (GET_MODE (varop)))))
9164
            {
9165
              rtx varop_inner = XEXP (varop, 0);
9166
 
9167
              varop_inner
9168
                = gen_rtx_LSHIFTRT (GET_MODE (varop_inner),
9169
                                    XEXP (varop_inner, 0),
9170
                                    GEN_INT
9171
                                    (count + INTVAL (XEXP (varop_inner, 1))));
9172
              varop = gen_rtx_TRUNCATE (GET_MODE (varop), varop_inner);
9173
              count = 0;
9174
              continue;
9175
            }
9176
          break;
9177
 
9178
        default:
9179
          break;
9180
        }
9181
 
9182
      break;
9183
    }
9184
 
9185
  /* We need to determine what mode to do the shift in.  If the shift is
9186
     a right shift or ROTATE, we must always do it in the mode it was
9187
     originally done in.  Otherwise, we can do it in MODE, the widest mode
9188
     encountered.  The code we care about is that of the shift that will
9189
     actually be done, not the shift that was originally requested.  */
9190
  shift_mode
9191
    = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
9192
       ? result_mode : mode);
9193
 
9194
  /* We have now finished analyzing the shift.  The result should be
9195
     a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places.  If
9196
     OUTER_OP is non-UNKNOWN, it is an operation that needs to be applied
9197
     to the result of the shift.  OUTER_CONST is the relevant constant,
9198
     but we must turn off all bits turned off in the shift.  */
9199
 
9200
  if (outer_op == UNKNOWN
9201
      && orig_code == code && orig_count == count
9202
      && varop == orig_varop
9203
      && shift_mode == GET_MODE (varop))
9204
    return NULL_RTX;
9205
 
9206
  /* Make a SUBREG if necessary.  If we can't make it, fail.  */
9207
  varop = gen_lowpart (shift_mode, varop);
9208
  if (varop == NULL_RTX || GET_CODE (varop) == CLOBBER)
9209
    return NULL_RTX;
9210
 
9211
  /* If we have an outer operation and we just made a shift, it is
9212
     possible that we could have simplified the shift were it not
9213
     for the outer operation.  So try to do the simplification
9214
     recursively.  */
9215
 
9216
  if (outer_op != UNKNOWN)
9217
    x = simplify_shift_const_1 (code, shift_mode, varop, count);
9218
  else
9219
    x = NULL_RTX;
9220
 
9221
  if (x == NULL_RTX)
9222
    x = simplify_gen_binary (code, shift_mode, varop, GEN_INT (count));
9223
 
9224
  /* If we were doing an LSHIFTRT in a wider mode than it was originally,
9225
     turn off all the bits that the shift would have turned off.  */
9226
  if (orig_code == LSHIFTRT && result_mode != shift_mode)
9227
    x = simplify_and_const_int (NULL_RTX, shift_mode, x,
9228
                                GET_MODE_MASK (result_mode) >> orig_count);
9229
 
9230
  /* Do the remainder of the processing in RESULT_MODE.  */
9231
  x = gen_lowpart_or_truncate (result_mode, x);
9232
 
9233
  /* If COMPLEMENT_P is set, we have to complement X before doing the outer
9234
     operation.  */
9235
  if (complement_p)
9236
    x = simplify_gen_unary (NOT, result_mode, x, result_mode);
9237
 
9238
  if (outer_op != UNKNOWN)
9239
    {
9240
      if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
9241
        outer_const = trunc_int_for_mode (outer_const, result_mode);
9242
 
9243
      if (outer_op == AND)
9244
        x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
9245
      else if (outer_op == SET)
9246
        {
9247
          /* This means that we have determined that the result is
9248
             equivalent to a constant.  This should be rare.  */
9249
          if (!side_effects_p (x))
9250
            x = GEN_INT (outer_const);
9251
        }
9252
      else if (GET_RTX_CLASS (outer_op) == RTX_UNARY)
9253
        x = simplify_gen_unary (outer_op, result_mode, x, result_mode);
9254
      else
9255
        x = simplify_gen_binary (outer_op, result_mode, x,
9256
                                 GEN_INT (outer_const));
9257
    }
9258
 
9259
  return x;
9260
}
9261
 
9262
/* Simplify a shift of VAROP by COUNT bits.  CODE says what kind of shift.
9263
   The result of the shift is RESULT_MODE.  If we cannot simplify it,
9264
   return X or, if it is NULL, synthesize the expression with
9265
   simplify_gen_binary.  Otherwise, return a simplified value.
9266
 
9267
   The shift is normally computed in the widest mode we find in VAROP, as
9268
   long as it isn't a different number of words than RESULT_MODE.  Exceptions
9269
   are ASHIFTRT and ROTATE, which are always done in their original mode.  */
9270
 
9271
static rtx
9272
simplify_shift_const (rtx x, enum rtx_code code, enum machine_mode result_mode,
9273
                      rtx varop, int count)
9274
{
9275
  rtx tem = simplify_shift_const_1 (code, result_mode, varop, count);
9276
  if (tem)
9277
    return tem;
9278
 
9279
  if (!x)
9280
    x = simplify_gen_binary (code, GET_MODE (varop), varop, GEN_INT (count));
9281
  if (GET_MODE (x) != result_mode)
9282
    x = gen_lowpart (result_mode, x);
9283
  return x;
9284
}
9285
 
9286
 
9287
/* Like recog, but we receive the address of a pointer to a new pattern.
9288
   We try to match the rtx that the pointer points to.
9289
   If that fails, we may try to modify or replace the pattern,
9290
   storing the replacement into the same pointer object.
9291
 
9292
   Modifications include deletion or addition of CLOBBERs.
9293
 
9294
   PNOTES is a pointer to a location where any REG_UNUSED notes added for
9295
   the CLOBBERs are placed.
9296
 
9297
   The value is the final insn code from the pattern ultimately matched,
9298
   or -1.  */
9299
 
9300
static int
9301
recog_for_combine (rtx *pnewpat, rtx insn, rtx *pnotes)
9302
{
9303
  rtx pat = *pnewpat;
9304
  int insn_code_number;
9305
  int num_clobbers_to_add = 0;
9306
  int i;
9307
  rtx notes = 0;
9308
  rtx old_notes, old_pat;
9309
 
9310
  /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
9311
     we use to indicate that something didn't match.  If we find such a
9312
     thing, force rejection.  */
9313
  if (GET_CODE (pat) == PARALLEL)
9314
    for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
9315
      if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
9316
          && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
9317
        return -1;
9318
 
9319
  old_pat = PATTERN (insn);
9320
  old_notes = REG_NOTES (insn);
9321
  PATTERN (insn) = pat;
9322
  REG_NOTES (insn) = 0;
9323
 
9324
  insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9325
 
9326
  /* If it isn't, there is the possibility that we previously had an insn
9327
     that clobbered some register as a side effect, but the combined
9328
     insn doesn't need to do that.  So try once more without the clobbers
9329
     unless this represents an ASM insn.  */
9330
 
9331
  if (insn_code_number < 0 && ! check_asm_operands (pat)
9332
      && GET_CODE (pat) == PARALLEL)
9333
    {
9334
      int pos;
9335
 
9336
      for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
9337
        if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
9338
          {
9339
            if (i != pos)
9340
              SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
9341
            pos++;
9342
          }
9343
 
9344
      SUBST_INT (XVECLEN (pat, 0), pos);
9345
 
9346
      if (pos == 1)
9347
        pat = XVECEXP (pat, 0, 0);
9348
 
9349
      PATTERN (insn) = pat;
9350
      insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9351
    }
9352
  PATTERN (insn) = old_pat;
9353
  REG_NOTES (insn) = old_notes;
9354
 
9355
  /* Recognize all noop sets, these will be killed by followup pass.  */
9356
  if (insn_code_number < 0 && GET_CODE (pat) == SET && set_noop_p (pat))
9357
    insn_code_number = NOOP_MOVE_INSN_CODE, num_clobbers_to_add = 0;
9358
 
9359
  /* If we had any clobbers to add, make a new pattern than contains
9360
     them.  Then check to make sure that all of them are dead.  */
9361
  if (num_clobbers_to_add)
9362
    {
9363
      rtx newpat = gen_rtx_PARALLEL (VOIDmode,
9364
                                     rtvec_alloc (GET_CODE (pat) == PARALLEL
9365
                                                  ? (XVECLEN (pat, 0)
9366
                                                     + num_clobbers_to_add)
9367
                                                  : num_clobbers_to_add + 1));
9368
 
9369
      if (GET_CODE (pat) == PARALLEL)
9370
        for (i = 0; i < XVECLEN (pat, 0); i++)
9371
          XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
9372
      else
9373
        XVECEXP (newpat, 0, 0) = pat;
9374
 
9375
      add_clobbers (newpat, insn_code_number);
9376
 
9377
      for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
9378
           i < XVECLEN (newpat, 0); i++)
9379
        {
9380
          if (REG_P (XEXP (XVECEXP (newpat, 0, i), 0))
9381
              && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
9382
            return -1;
9383
          notes = gen_rtx_EXPR_LIST (REG_UNUSED,
9384
                                     XEXP (XVECEXP (newpat, 0, i), 0), notes);
9385
        }
9386
      pat = newpat;
9387
    }
9388
 
9389
  *pnewpat = pat;
9390
  *pnotes = notes;
9391
 
9392
  return insn_code_number;
9393
}
9394
 
9395
/* Like gen_lowpart_general but for use by combine.  In combine it
9396
   is not possible to create any new pseudoregs.  However, it is
9397
   safe to create invalid memory addresses, because combine will
9398
   try to recognize them and all they will do is make the combine
9399
   attempt fail.
9400
 
9401
   If for some reason this cannot do its job, an rtx
9402
   (clobber (const_int 0)) is returned.
9403
   An insn containing that will not be recognized.  */
9404
 
9405
static rtx
9406
gen_lowpart_for_combine (enum machine_mode omode, rtx x)
9407
{
9408
  enum machine_mode imode = GET_MODE (x);
9409
  unsigned int osize = GET_MODE_SIZE (omode);
9410
  unsigned int isize = GET_MODE_SIZE (imode);
9411
  rtx result;
9412
 
9413
  if (omode == imode)
9414
    return x;
9415
 
9416
  /* Return identity if this is a CONST or symbolic reference.  */
9417
  if (omode == Pmode
9418
      && (GET_CODE (x) == CONST
9419
          || GET_CODE (x) == SYMBOL_REF
9420
          || GET_CODE (x) == LABEL_REF))
9421
    return x;
9422
 
9423
  /* We can only support MODE being wider than a word if X is a
9424
     constant integer or has a mode the same size.  */
9425
  if (GET_MODE_SIZE (omode) > UNITS_PER_WORD
9426
      && ! ((imode == VOIDmode
9427
             && (GET_CODE (x) == CONST_INT
9428
                 || GET_CODE (x) == CONST_DOUBLE))
9429
            || isize == osize))
9430
    goto fail;
9431
 
9432
  /* X might be a paradoxical (subreg (mem)).  In that case, gen_lowpart
9433
     won't know what to do.  So we will strip off the SUBREG here and
9434
     process normally.  */
9435
  if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
9436
    {
9437
      x = SUBREG_REG (x);
9438
 
9439
      /* For use in case we fall down into the address adjustments
9440
         further below, we need to adjust the known mode and size of
9441
         x; imode and isize, since we just adjusted x.  */
9442
      imode = GET_MODE (x);
9443
 
9444
      if (imode == omode)
9445
        return x;
9446
 
9447
      isize = GET_MODE_SIZE (imode);
9448
    }
9449
 
9450
  result = gen_lowpart_common (omode, x);
9451
 
9452
#ifdef CANNOT_CHANGE_MODE_CLASS
9453
  if (result != 0 && GET_CODE (result) == SUBREG)
9454
    record_subregs_of_mode (result);
9455
#endif
9456
 
9457
  if (result)
9458
    return result;
9459
 
9460
  if (MEM_P (x))
9461
    {
9462
      int offset = 0;
9463
 
9464
      /* Refuse to work on a volatile memory ref or one with a mode-dependent
9465
         address.  */
9466
      if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
9467
        goto fail;
9468
 
9469
      /* If we want to refer to something bigger than the original memref,
9470
         generate a paradoxical subreg instead.  That will force a reload
9471
         of the original memref X.  */
9472
      if (isize < osize)
9473
        return gen_rtx_SUBREG (omode, x, 0);
9474
 
9475
      if (WORDS_BIG_ENDIAN)
9476
        offset = MAX (isize, UNITS_PER_WORD) - MAX (osize, UNITS_PER_WORD);
9477
 
9478
      /* Adjust the address so that the address-after-the-data is
9479
         unchanged.  */
9480
      if (BYTES_BIG_ENDIAN)
9481
        offset -= MIN (UNITS_PER_WORD, osize) - MIN (UNITS_PER_WORD, isize);
9482
 
9483
      return adjust_address_nv (x, omode, offset);
9484
    }
9485
 
9486
  /* If X is a comparison operator, rewrite it in a new mode.  This
9487
     probably won't match, but may allow further simplifications.  */
9488
  else if (COMPARISON_P (x))
9489
    return gen_rtx_fmt_ee (GET_CODE (x), omode, XEXP (x, 0), XEXP (x, 1));
9490
 
9491
  /* If we couldn't simplify X any other way, just enclose it in a
9492
     SUBREG.  Normally, this SUBREG won't match, but some patterns may
9493
     include an explicit SUBREG or we may simplify it further in combine.  */
9494
  else
9495
    {
9496
      int offset = 0;
9497
      rtx res;
9498
 
9499
      offset = subreg_lowpart_offset (omode, imode);
9500
      if (imode == VOIDmode)
9501
        {
9502
          imode = int_mode_for_mode (omode);
9503
          x = gen_lowpart_common (imode, x);
9504
          if (x == NULL)
9505
            goto fail;
9506
        }
9507
      res = simplify_gen_subreg (omode, x, imode, offset);
9508
      if (res)
9509
        return res;
9510
    }
9511
 
9512
 fail:
9513
  return gen_rtx_CLOBBER (imode, const0_rtx);
9514
}
9515
 
9516
/* Simplify a comparison between *POP0 and *POP1 where CODE is the
9517
   comparison code that will be tested.
9518
 
9519
   The result is a possibly different comparison code to use.  *POP0 and
9520
   *POP1 may be updated.
9521
 
9522
   It is possible that we might detect that a comparison is either always
9523
   true or always false.  However, we do not perform general constant
9524
   folding in combine, so this knowledge isn't useful.  Such tautologies
9525
   should have been detected earlier.  Hence we ignore all such cases.  */
9526
 
9527
static enum rtx_code
9528
simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
9529
{
9530
  rtx op0 = *pop0;
9531
  rtx op1 = *pop1;
9532
  rtx tem, tem1;
9533
  int i;
9534
  enum machine_mode mode, tmode;
9535
 
9536
  /* Try a few ways of applying the same transformation to both operands.  */
9537
  while (1)
9538
    {
9539
#ifndef WORD_REGISTER_OPERATIONS
9540
      /* The test below this one won't handle SIGN_EXTENDs on these machines,
9541
         so check specially.  */
9542
      if (code != GTU && code != GEU && code != LTU && code != LEU
9543
          && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
9544
          && GET_CODE (XEXP (op0, 0)) == ASHIFT
9545
          && GET_CODE (XEXP (op1, 0)) == ASHIFT
9546
          && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
9547
          && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
9548
          && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
9549
              == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
9550
          && GET_CODE (XEXP (op0, 1)) == CONST_INT
9551
          && XEXP (op0, 1) == XEXP (op1, 1)
9552
          && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
9553
          && XEXP (op0, 1) == XEXP (XEXP (op1, 0), 1)
9554
          && (INTVAL (XEXP (op0, 1))
9555
              == (GET_MODE_BITSIZE (GET_MODE (op0))
9556
                  - (GET_MODE_BITSIZE
9557
                     (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
9558
        {
9559
          op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
9560
          op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
9561
        }
9562
#endif
9563
 
9564
      /* If both operands are the same constant shift, see if we can ignore the
9565
         shift.  We can if the shift is a rotate or if the bits shifted out of
9566
         this shift are known to be zero for both inputs and if the type of
9567
         comparison is compatible with the shift.  */
9568
      if (GET_CODE (op0) == GET_CODE (op1)
9569
          && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
9570
          && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
9571
              || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
9572
                  && (code != GT && code != LT && code != GE && code != LE))
9573
              || (GET_CODE (op0) == ASHIFTRT
9574
                  && (code != GTU && code != LTU
9575
                      && code != GEU && code != LEU)))
9576
          && GET_CODE (XEXP (op0, 1)) == CONST_INT
9577
          && INTVAL (XEXP (op0, 1)) >= 0
9578
          && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
9579
          && XEXP (op0, 1) == XEXP (op1, 1))
9580
        {
9581
          enum machine_mode mode = GET_MODE (op0);
9582
          unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
9583
          int shift_count = INTVAL (XEXP (op0, 1));
9584
 
9585
          if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
9586
            mask &= (mask >> shift_count) << shift_count;
9587
          else if (GET_CODE (op0) == ASHIFT)
9588
            mask = (mask & (mask << shift_count)) >> shift_count;
9589
 
9590
          if ((nonzero_bits (XEXP (op0, 0), mode) & ~mask) == 0
9591
              && (nonzero_bits (XEXP (op1, 0), mode) & ~mask) == 0)
9592
            op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
9593
          else
9594
            break;
9595
        }
9596
 
9597
      /* If both operands are AND's of a paradoxical SUBREG by constant, the
9598
         SUBREGs are of the same mode, and, in both cases, the AND would
9599
         be redundant if the comparison was done in the narrower mode,
9600
         do the comparison in the narrower mode (e.g., we are AND'ing with 1
9601
         and the operand's possibly nonzero bits are 0xffffff01; in that case
9602
         if we only care about QImode, we don't need the AND).  This case
9603
         occurs if the output mode of an scc insn is not SImode and
9604
         STORE_FLAG_VALUE == 1 (e.g., the 386).
9605
 
9606
         Similarly, check for a case where the AND's are ZERO_EXTEND
9607
         operations from some narrower mode even though a SUBREG is not
9608
         present.  */
9609
 
9610
      else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
9611
               && GET_CODE (XEXP (op0, 1)) == CONST_INT
9612
               && GET_CODE (XEXP (op1, 1)) == CONST_INT)
9613
        {
9614
          rtx inner_op0 = XEXP (op0, 0);
9615
          rtx inner_op1 = XEXP (op1, 0);
9616
          HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
9617
          HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
9618
          int changed = 0;
9619
 
9620
          if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG
9621
              && (GET_MODE_SIZE (GET_MODE (inner_op0))
9622
                  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0))))
9623
              && (GET_MODE (SUBREG_REG (inner_op0))
9624
                  == GET_MODE (SUBREG_REG (inner_op1)))
9625
              && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (inner_op0)))
9626
                  <= HOST_BITS_PER_WIDE_INT)
9627
              && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0),
9628
                                             GET_MODE (SUBREG_REG (inner_op0)))))
9629
              && (0 == ((~c1) & nonzero_bits (SUBREG_REG (inner_op1),
9630
                                             GET_MODE (SUBREG_REG (inner_op1))))))
9631
            {
9632
              op0 = SUBREG_REG (inner_op0);
9633
              op1 = SUBREG_REG (inner_op1);
9634
 
9635
              /* The resulting comparison is always unsigned since we masked
9636
                 off the original sign bit.  */
9637
              code = unsigned_condition (code);
9638
 
9639
              changed = 1;
9640
            }
9641
 
9642
          else if (c0 == c1)
9643
            for (tmode = GET_CLASS_NARROWEST_MODE
9644
                 (GET_MODE_CLASS (GET_MODE (op0)));
9645
                 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
9646
              if ((unsigned HOST_WIDE_INT) c0 == GET_MODE_MASK (tmode))
9647
                {
9648
                  op0 = gen_lowpart (tmode, inner_op0);
9649
                  op1 = gen_lowpart (tmode, inner_op1);
9650
                  code = unsigned_condition (code);
9651
                  changed = 1;
9652
                  break;
9653
                }
9654
 
9655
          if (! changed)
9656
            break;
9657
        }
9658
 
9659
      /* If both operands are NOT, we can strip off the outer operation
9660
         and adjust the comparison code for swapped operands; similarly for
9661
         NEG, except that this must be an equality comparison.  */
9662
      else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
9663
               || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
9664
                   && (code == EQ || code == NE)))
9665
        op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
9666
 
9667
      else
9668
        break;
9669
    }
9670
 
9671
  /* If the first operand is a constant, swap the operands and adjust the
9672
     comparison code appropriately, but don't do this if the second operand
9673
     is already a constant integer.  */
9674
  if (swap_commutative_operands_p (op0, op1))
9675
    {
9676
      tem = op0, op0 = op1, op1 = tem;
9677
      code = swap_condition (code);
9678
    }
9679
 
9680
  /* We now enter a loop during which we will try to simplify the comparison.
9681
     For the most part, we only are concerned with comparisons with zero,
9682
     but some things may really be comparisons with zero but not start
9683
     out looking that way.  */
9684
 
9685
  while (GET_CODE (op1) == CONST_INT)
9686
    {
9687
      enum machine_mode mode = GET_MODE (op0);
9688
      unsigned int mode_width = GET_MODE_BITSIZE (mode);
9689
      unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
9690
      int equality_comparison_p;
9691
      int sign_bit_comparison_p;
9692
      int unsigned_comparison_p;
9693
      HOST_WIDE_INT const_op;
9694
 
9695
      /* We only want to handle integral modes.  This catches VOIDmode,
9696
         CCmode, and the floating-point modes.  An exception is that we
9697
         can handle VOIDmode if OP0 is a COMPARE or a comparison
9698
         operation.  */
9699
 
9700
      if (GET_MODE_CLASS (mode) != MODE_INT
9701
          && ! (mode == VOIDmode
9702
                && (GET_CODE (op0) == COMPARE || COMPARISON_P (op0))))
9703
        break;
9704
 
9705
      /* Get the constant we are comparing against and turn off all bits
9706
         not on in our mode.  */
9707
      const_op = INTVAL (op1);
9708
      if (mode != VOIDmode)
9709
        const_op = trunc_int_for_mode (const_op, mode);
9710
      op1 = GEN_INT (const_op);
9711
 
9712
      /* If we are comparing against a constant power of two and the value
9713
         being compared can only have that single bit nonzero (e.g., it was
9714
         `and'ed with that bit), we can replace this with a comparison
9715
         with zero.  */
9716
      if (const_op
9717
          && (code == EQ || code == NE || code == GE || code == GEU
9718
              || code == LT || code == LTU)
9719
          && mode_width <= HOST_BITS_PER_WIDE_INT
9720
          && exact_log2 (const_op) >= 0
9721
          && nonzero_bits (op0, mode) == (unsigned HOST_WIDE_INT) const_op)
9722
        {
9723
          code = (code == EQ || code == GE || code == GEU ? NE : EQ);
9724
          op1 = const0_rtx, const_op = 0;
9725
        }
9726
 
9727
      /* Similarly, if we are comparing a value known to be either -1 or
9728
 
9729
 
9730
      if (const_op == -1
9731
          && (code == EQ || code == NE || code == GT || code == LE
9732
              || code == GEU || code == LTU)
9733
          && num_sign_bit_copies (op0, mode) == mode_width)
9734
        {
9735
          code = (code == EQ || code == LE || code == GEU ? NE : EQ);
9736
          op1 = const0_rtx, const_op = 0;
9737
        }
9738
 
9739
      /* Do some canonicalizations based on the comparison code.  We prefer
9740
         comparisons against zero and then prefer equality comparisons.
9741
         If we can reduce the size of a constant, we will do that too.  */
9742
 
9743
      switch (code)
9744
        {
9745
        case LT:
9746
          /* < C is equivalent to <= (C - 1) */
9747
          if (const_op > 0)
9748
            {
9749
              const_op -= 1;
9750
              op1 = GEN_INT (const_op);
9751
              code = LE;
9752
              /* ... fall through to LE case below.  */
9753
            }
9754
          else
9755
            break;
9756
 
9757
        case LE:
9758
          /* <= C is equivalent to < (C + 1); we do this for C < 0  */
9759
          if (const_op < 0)
9760
            {
9761
              const_op += 1;
9762
              op1 = GEN_INT (const_op);
9763
              code = LT;
9764
            }
9765
 
9766
          /* If we are doing a <= 0 comparison on a value known to have
9767
             a zero sign bit, we can replace this with == 0.  */
9768
          else if (const_op == 0
9769
                   && mode_width <= HOST_BITS_PER_WIDE_INT
9770
                   && (nonzero_bits (op0, mode)
9771
                       & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
9772
            code = EQ;
9773
          break;
9774
 
9775
        case GE:
9776
          /* >= C is equivalent to > (C - 1).  */
9777
          if (const_op > 0)
9778
            {
9779
              const_op -= 1;
9780
              op1 = GEN_INT (const_op);
9781
              code = GT;
9782
              /* ... fall through to GT below.  */
9783
            }
9784
          else
9785
            break;
9786
 
9787
        case GT:
9788
          /* > C is equivalent to >= (C + 1); we do this for C < 0.  */
9789
          if (const_op < 0)
9790
            {
9791
              const_op += 1;
9792
              op1 = GEN_INT (const_op);
9793
              code = GE;
9794
            }
9795
 
9796
          /* If we are doing a > 0 comparison on a value known to have
9797
             a zero sign bit, we can replace this with != 0.  */
9798
          else if (const_op == 0
9799
                   && mode_width <= HOST_BITS_PER_WIDE_INT
9800
                   && (nonzero_bits (op0, mode)
9801
                       & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
9802
            code = NE;
9803
          break;
9804
 
9805
        case LTU:
9806
          /* < C is equivalent to <= (C - 1).  */
9807
          if (const_op > 0)
9808
            {
9809
              const_op -= 1;
9810
              op1 = GEN_INT (const_op);
9811
              code = LEU;
9812
              /* ... fall through ...  */
9813
            }
9814
 
9815
          /* (unsigned) < 0x80000000 is equivalent to >= 0.  */
9816
          else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9817
                   && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
9818
            {
9819
              const_op = 0, op1 = const0_rtx;
9820
              code = GE;
9821
              break;
9822
            }
9823
          else
9824
            break;
9825
 
9826
        case LEU:
9827
          /* unsigned <= 0 is equivalent to == 0 */
9828
          if (const_op == 0)
9829
            code = EQ;
9830
 
9831
          /* (unsigned) <= 0x7fffffff is equivalent to >= 0.  */
9832
          else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9833
                   && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
9834
            {
9835
              const_op = 0, op1 = const0_rtx;
9836
              code = GE;
9837
            }
9838
          break;
9839
 
9840
        case GEU:
9841
          /* >= C is equivalent to > (C - 1).  */
9842
          if (const_op > 1)
9843
            {
9844
              const_op -= 1;
9845
              op1 = GEN_INT (const_op);
9846
              code = GTU;
9847
              /* ... fall through ...  */
9848
            }
9849
 
9850
          /* (unsigned) >= 0x80000000 is equivalent to < 0.  */
9851
          else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9852
                   && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
9853
            {
9854
              const_op = 0, op1 = const0_rtx;
9855
              code = LT;
9856
              break;
9857
            }
9858
          else
9859
            break;
9860
 
9861
        case GTU:
9862
          /* unsigned > 0 is equivalent to != 0 */
9863
          if (const_op == 0)
9864
            code = NE;
9865
 
9866
          /* (unsigned) > 0x7fffffff is equivalent to < 0.  */
9867
          else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9868
                   && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
9869
            {
9870
              const_op = 0, op1 = const0_rtx;
9871
              code = LT;
9872
            }
9873
          break;
9874
 
9875
        default:
9876
          break;
9877
        }
9878
 
9879
      /* Compute some predicates to simplify code below.  */
9880
 
9881
      equality_comparison_p = (code == EQ || code == NE);
9882
      sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
9883
      unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
9884
                               || code == GEU);
9885
 
9886
      /* If this is a sign bit comparison and we can do arithmetic in
9887
         MODE, say that we will only be needing the sign bit of OP0.  */
9888
      if (sign_bit_comparison_p
9889
          && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
9890
        op0 = force_to_mode (op0, mode,
9891
                             ((HOST_WIDE_INT) 1
9892
                              << (GET_MODE_BITSIZE (mode) - 1)),
9893
                             0);
9894
 
9895
      /* Now try cases based on the opcode of OP0.  If none of the cases
9896
         does a "continue", we exit this loop immediately after the
9897
         switch.  */
9898
 
9899
      switch (GET_CODE (op0))
9900
        {
9901
        case ZERO_EXTRACT:
9902
          /* If we are extracting a single bit from a variable position in
9903
             a constant that has only a single bit set and are comparing it
9904
             with zero, we can convert this into an equality comparison
9905
             between the position and the location of the single bit.  */
9906
          /* Except we can't if SHIFT_COUNT_TRUNCATED is set, since we might
9907
             have already reduced the shift count modulo the word size.  */
9908
          if (!SHIFT_COUNT_TRUNCATED
9909
              && GET_CODE (XEXP (op0, 0)) == CONST_INT
9910
              && XEXP (op0, 1) == const1_rtx
9911
              && equality_comparison_p && const_op == 0
9912
              && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
9913
            {
9914
              if (BITS_BIG_ENDIAN)
9915
                {
9916
                  enum machine_mode new_mode
9917
                    = mode_for_extraction (EP_extzv, 1);
9918
                  if (new_mode == MAX_MACHINE_MODE)
9919
                    i = BITS_PER_WORD - 1 - i;
9920
                  else
9921
                    {
9922
                      mode = new_mode;
9923
                      i = (GET_MODE_BITSIZE (mode) - 1 - i);
9924
                    }
9925
                }
9926
 
9927
              op0 = XEXP (op0, 2);
9928
              op1 = GEN_INT (i);
9929
              const_op = i;
9930
 
9931
              /* Result is nonzero iff shift count is equal to I.  */
9932
              code = reverse_condition (code);
9933
              continue;
9934
            }
9935
 
9936
          /* ... fall through ...  */
9937
 
9938
        case SIGN_EXTRACT:
9939
          tem = expand_compound_operation (op0);
9940
          if (tem != op0)
9941
            {
9942
              op0 = tem;
9943
              continue;
9944
            }
9945
          break;
9946
 
9947
        case NOT:
9948
          /* If testing for equality, we can take the NOT of the constant.  */
9949
          if (equality_comparison_p
9950
              && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
9951
            {
9952
              op0 = XEXP (op0, 0);
9953
              op1 = tem;
9954
              continue;
9955
            }
9956
 
9957
          /* If just looking at the sign bit, reverse the sense of the
9958
             comparison.  */
9959
          if (sign_bit_comparison_p)
9960
            {
9961
              op0 = XEXP (op0, 0);
9962
              code = (code == GE ? LT : GE);
9963
              continue;
9964
            }
9965
          break;
9966
 
9967
        case NEG:
9968
          /* If testing for equality, we can take the NEG of the constant.  */
9969
          if (equality_comparison_p
9970
              && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
9971
            {
9972
              op0 = XEXP (op0, 0);
9973
              op1 = tem;
9974
              continue;
9975
            }
9976
 
9977
          /* The remaining cases only apply to comparisons with zero.  */
9978
          if (const_op != 0)
9979
            break;
9980
 
9981
          /* When X is ABS or is known positive,
9982
             (neg X) is < 0 if and only if X != 0.  */
9983
 
9984
          if (sign_bit_comparison_p
9985
              && (GET_CODE (XEXP (op0, 0)) == ABS
9986
                  || (mode_width <= HOST_BITS_PER_WIDE_INT
9987
                      && (nonzero_bits (XEXP (op0, 0), mode)
9988
                          & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
9989
            {
9990
              op0 = XEXP (op0, 0);
9991
              code = (code == LT ? NE : EQ);
9992
              continue;
9993
            }
9994
 
9995
          /* If we have NEG of something whose two high-order bits are the
9996
             same, we know that "(-a) < 0" is equivalent to "a > 0".  */
9997
          if (num_sign_bit_copies (op0, mode) >= 2)
9998
            {
9999
              op0 = XEXP (op0, 0);
10000
              code = swap_condition (code);
10001
              continue;
10002
            }
10003
          break;
10004
 
10005
        case ROTATE:
10006
          /* If we are testing equality and our count is a constant, we
10007
             can perform the inverse operation on our RHS.  */
10008
          if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
10009
              && (tem = simplify_binary_operation (ROTATERT, mode,
10010
                                                   op1, XEXP (op0, 1))) != 0)
10011
            {
10012
              op0 = XEXP (op0, 0);
10013
              op1 = tem;
10014
              continue;
10015
            }
10016
 
10017
          /* If we are doing a < 0 or >= 0 comparison, it means we are testing
10018
             a particular bit.  Convert it to an AND of a constant of that
10019
             bit.  This will be converted into a ZERO_EXTRACT.  */
10020
          if (const_op == 0 && sign_bit_comparison_p
10021
              && GET_CODE (XEXP (op0, 1)) == CONST_INT
10022
              && mode_width <= HOST_BITS_PER_WIDE_INT)
10023
            {
10024
              op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10025
                                            ((HOST_WIDE_INT) 1
10026
                                             << (mode_width - 1
10027
                                                 - INTVAL (XEXP (op0, 1)))));
10028
              code = (code == LT ? NE : EQ);
10029
              continue;
10030
            }
10031
 
10032
          /* Fall through.  */
10033
 
10034
        case ABS:
10035
          /* ABS is ignorable inside an equality comparison with zero.  */
10036
          if (const_op == 0 && equality_comparison_p)
10037
            {
10038
              op0 = XEXP (op0, 0);
10039
              continue;
10040
            }
10041
          break;
10042
 
10043
        case SIGN_EXTEND:
10044
          /* Can simplify (compare (zero/sign_extend FOO) CONST) to
10045
             (compare FOO CONST) if CONST fits in FOO's mode and we
10046
             are either testing inequality or have an unsigned
10047
             comparison with ZERO_EXTEND or a signed comparison with
10048
             SIGN_EXTEND.  But don't do it if we don't have a compare
10049
             insn of the given mode, since we'd have to revert it
10050
             later on, and then we wouldn't know whether to sign- or
10051
             zero-extend.  */
10052
          mode = GET_MODE (XEXP (op0, 0));
10053
          if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
10054
              && ! unsigned_comparison_p
10055
              && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10056
              && ((unsigned HOST_WIDE_INT) const_op
10057
                  < (((unsigned HOST_WIDE_INT) 1
10058
                      << (GET_MODE_BITSIZE (mode) - 1))))
10059
              && cmp_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
10060
            {
10061
              op0 = XEXP (op0, 0);
10062
              continue;
10063
            }
10064
          break;
10065
 
10066
        case SUBREG:
10067
          /* Check for the case where we are comparing A - C1 with C2, that is
10068
 
10069
               (subreg:MODE (plus (A) (-C1))) op (C2)
10070
 
10071
             with C1 a constant, and try to lift the SUBREG, i.e. to do the
10072
             comparison in the wider mode.  One of the following two conditions
10073
             must be true in order for this to be valid:
10074
 
10075
               1. The mode extension results in the same bit pattern being added
10076
                  on both sides and the comparison is equality or unsigned.  As
10077
                  C2 has been truncated to fit in MODE, the pattern can only be
10078
                  all 0s or all 1s.
10079
 
10080
               2. The mode extension results in the sign bit being copied on
10081
                  each side.
10082
 
10083
             The difficulty here is that we have predicates for A but not for
10084
             (A - C1) so we need to check that C1 is within proper bounds so
10085
             as to perturbate A as little as possible.  */
10086
 
10087
          if (mode_width <= HOST_BITS_PER_WIDE_INT
10088
              && subreg_lowpart_p (op0)
10089
              && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) > mode_width
10090
              && GET_CODE (SUBREG_REG (op0)) == PLUS
10091
              && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT)
10092
            {
10093
              enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
10094
              rtx a = XEXP (SUBREG_REG (op0), 0);
10095
              HOST_WIDE_INT c1 = -INTVAL (XEXP (SUBREG_REG (op0), 1));
10096
 
10097
              if ((c1 > 0
10098
                   && (unsigned HOST_WIDE_INT) c1
10099
                       < (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)
10100
                   && (equality_comparison_p || unsigned_comparison_p)
10101
                   /* (A - C1) zero-extends if it is positive and sign-extends
10102
                      if it is negative, C2 both zero- and sign-extends.  */
10103
                   && ((0 == (nonzero_bits (a, inner_mode)
10104
                              & ~GET_MODE_MASK (mode))
10105
                        && const_op >= 0)
10106
                       /* (A - C1) sign-extends if it is positive and 1-extends
10107
                          if it is negative, C2 both sign- and 1-extends.  */
10108
                       || (num_sign_bit_copies (a, inner_mode)
10109
                           > (unsigned int) (GET_MODE_BITSIZE (inner_mode)
10110
                                             - mode_width)
10111
                           && const_op < 0)))
10112
                  || ((unsigned HOST_WIDE_INT) c1
10113
                       < (unsigned HOST_WIDE_INT) 1 << (mode_width - 2)
10114
                      /* (A - C1) always sign-extends, like C2.  */
10115
                      && num_sign_bit_copies (a, inner_mode)
10116
                         > (unsigned int) (GET_MODE_BITSIZE (inner_mode)
10117
                                           - (mode_width - 1))))
10118
                {
10119
                  op0 = SUBREG_REG (op0);
10120
                  continue;
10121
                }
10122
            }
10123
 
10124
          /* If the inner mode is narrower and we are extracting the low part,
10125
             we can treat the SUBREG as if it were a ZERO_EXTEND.  */
10126
          if (subreg_lowpart_p (op0)
10127
              && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
10128
            /* Fall through */ ;
10129
          else
10130
            break;
10131
 
10132
          /* ... fall through ...  */
10133
 
10134
        case ZERO_EXTEND:
10135
          mode = GET_MODE (XEXP (op0, 0));
10136
          if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
10137
              && (unsigned_comparison_p || equality_comparison_p)
10138
              && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10139
              && ((unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode))
10140
              && cmp_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
10141
            {
10142
              op0 = XEXP (op0, 0);
10143
              continue;
10144
            }
10145
          break;
10146
 
10147
        case PLUS:
10148
          /* (eq (plus X A) B) -> (eq X (minus B A)).  We can only do
10149
             this for equality comparisons due to pathological cases involving
10150
             overflows.  */
10151
          if (equality_comparison_p
10152
              && 0 != (tem = simplify_binary_operation (MINUS, mode,
10153
                                                        op1, XEXP (op0, 1))))
10154
            {
10155
              op0 = XEXP (op0, 0);
10156
              op1 = tem;
10157
              continue;
10158
            }
10159
 
10160
          /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0.  */
10161
          if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
10162
              && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
10163
            {
10164
              op0 = XEXP (XEXP (op0, 0), 0);
10165
              code = (code == LT ? EQ : NE);
10166
              continue;
10167
            }
10168
          break;
10169
 
10170
        case MINUS:
10171
          /* We used to optimize signed comparisons against zero, but that
10172
             was incorrect.  Unsigned comparisons against zero (GTU, LEU)
10173
             arrive here as equality comparisons, or (GEU, LTU) are
10174
             optimized away.  No need to special-case them.  */
10175
 
10176
          /* (eq (minus A B) C) -> (eq A (plus B C)) or
10177
             (eq B (minus A C)), whichever simplifies.  We can only do
10178
             this for equality comparisons due to pathological cases involving
10179
             overflows.  */
10180
          if (equality_comparison_p
10181
              && 0 != (tem = simplify_binary_operation (PLUS, mode,
10182
                                                        XEXP (op0, 1), op1)))
10183
            {
10184
              op0 = XEXP (op0, 0);
10185
              op1 = tem;
10186
              continue;
10187
            }
10188
 
10189
          if (equality_comparison_p
10190
              && 0 != (tem = simplify_binary_operation (MINUS, mode,
10191
                                                        XEXP (op0, 0), op1)))
10192
            {
10193
              op0 = XEXP (op0, 1);
10194
              op1 = tem;
10195
              continue;
10196
            }
10197
 
10198
          /* The sign bit of (minus (ashiftrt X C) X), where C is the number
10199
             of bits in X minus 1, is one iff X > 0.  */
10200
          if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
10201
              && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10202
              && (unsigned HOST_WIDE_INT) INTVAL (XEXP (XEXP (op0, 0), 1))
10203
                 == mode_width - 1
10204
              && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10205
            {
10206
              op0 = XEXP (op0, 1);
10207
              code = (code == GE ? LE : GT);
10208
              continue;
10209
            }
10210
          break;
10211
 
10212
        case XOR:
10213
          /* (eq (xor A B) C) -> (eq A (xor B C)).  This is a simplification
10214
             if C is zero or B is a constant.  */
10215
          if (equality_comparison_p
10216
              && 0 != (tem = simplify_binary_operation (XOR, mode,
10217
                                                        XEXP (op0, 1), op1)))
10218
            {
10219
              op0 = XEXP (op0, 0);
10220
              op1 = tem;
10221
              continue;
10222
            }
10223
          break;
10224
 
10225
        case EQ:  case NE:
10226
        case UNEQ:  case LTGT:
10227
        case LT:  case LTU:  case UNLT:  case LE:  case LEU:  case UNLE:
10228
        case GT:  case GTU:  case UNGT:  case GE:  case GEU:  case UNGE:
10229
        case UNORDERED: case ORDERED:
10230
          /* We can't do anything if OP0 is a condition code value, rather
10231
             than an actual data value.  */
10232
          if (const_op != 0
10233
              || CC0_P (XEXP (op0, 0))
10234
              || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
10235
            break;
10236
 
10237
          /* Get the two operands being compared.  */
10238
          if (GET_CODE (XEXP (op0, 0)) == COMPARE)
10239
            tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
10240
          else
10241
            tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
10242
 
10243
          /* Check for the cases where we simply want the result of the
10244
             earlier test or the opposite of that result.  */
10245
          if (code == NE || code == EQ
10246
              || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
10247
                  && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10248
                  && (STORE_FLAG_VALUE
10249
                      & (((HOST_WIDE_INT) 1
10250
                          << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
10251
                  && (code == LT || code == GE)))
10252
            {
10253
              enum rtx_code new_code;
10254
              if (code == LT || code == NE)
10255
                new_code = GET_CODE (op0);
10256
              else
10257
                new_code = reversed_comparison_code (op0, NULL);
10258
 
10259
              if (new_code != UNKNOWN)
10260
                {
10261
                  code = new_code;
10262
                  op0 = tem;
10263
                  op1 = tem1;
10264
                  continue;
10265
                }
10266
            }
10267
          break;
10268
 
10269
        case IOR:
10270
          /* The sign bit of (ior (plus X (const_int -1)) X) is nonzero
10271
             iff X <= 0.  */
10272
          if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
10273
              && XEXP (XEXP (op0, 0), 1) == constm1_rtx
10274
              && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10275
            {
10276
              op0 = XEXP (op0, 1);
10277
              code = (code == GE ? GT : LE);
10278
              continue;
10279
            }
10280
          break;
10281
 
10282
        case AND:
10283
          /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1).  This
10284
             will be converted to a ZERO_EXTRACT later.  */
10285
          if (const_op == 0 && equality_comparison_p
10286
              && GET_CODE (XEXP (op0, 0)) == ASHIFT
10287
              && XEXP (XEXP (op0, 0), 0) == const1_rtx)
10288
            {
10289
              op0 = simplify_and_const_int
10290
                (NULL_RTX, mode, gen_rtx_LSHIFTRT (mode,
10291
                                                   XEXP (op0, 1),
10292
                                                   XEXP (XEXP (op0, 0), 1)),
10293
                 (HOST_WIDE_INT) 1);
10294
              continue;
10295
            }
10296
 
10297
          /* If we are comparing (and (lshiftrt X C1) C2) for equality with
10298
             zero and X is a comparison and C1 and C2 describe only bits set
10299
             in STORE_FLAG_VALUE, we can compare with X.  */
10300
          if (const_op == 0 && equality_comparison_p
10301
              && mode_width <= HOST_BITS_PER_WIDE_INT
10302
              && GET_CODE (XEXP (op0, 1)) == CONST_INT
10303
              && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
10304
              && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10305
              && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
10306
              && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
10307
            {
10308
              mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
10309
                      << INTVAL (XEXP (XEXP (op0, 0), 1)));
10310
              if ((~STORE_FLAG_VALUE & mask) == 0
10311
                  && (COMPARISON_P (XEXP (XEXP (op0, 0), 0))
10312
                      || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
10313
                          && COMPARISON_P (tem))))
10314
                {
10315
                  op0 = XEXP (XEXP (op0, 0), 0);
10316
                  continue;
10317
                }
10318
            }
10319
 
10320
          /* If we are doing an equality comparison of an AND of a bit equal
10321
             to the sign bit, replace this with a LT or GE comparison of
10322
             the underlying value.  */
10323
          if (equality_comparison_p
10324
              && const_op == 0
10325
              && GET_CODE (XEXP (op0, 1)) == CONST_INT
10326
              && mode_width <= HOST_BITS_PER_WIDE_INT
10327
              && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
10328
                  == (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)))
10329
            {
10330
              op0 = XEXP (op0, 0);
10331
              code = (code == EQ ? GE : LT);
10332
              continue;
10333
            }
10334
 
10335
          /* If this AND operation is really a ZERO_EXTEND from a narrower
10336
             mode, the constant fits within that mode, and this is either an
10337
             equality or unsigned comparison, try to do this comparison in
10338
             the narrower mode.
10339
 
10340
             Note that in:
10341
 
10342
             (ne:DI (and:DI (reg:DI 4) (const_int 0xffffffff)) (const_int 0))
10343
             -> (ne:DI (reg:SI 4) (const_int 0))
10344
 
10345
             unless TRULY_NOOP_TRUNCATION allows it or the register is
10346
             known to hold a value of the required mode the
10347
             transformation is invalid.  */
10348
          if ((equality_comparison_p || unsigned_comparison_p)
10349
              && GET_CODE (XEXP (op0, 1)) == CONST_INT
10350
              && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
10351
                                   & GET_MODE_MASK (mode))
10352
                                  + 1)) >= 0
10353
              && const_op >> i == 0
10354
              && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode
10355
              && (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (tmode),
10356
                                         GET_MODE_BITSIZE (GET_MODE (op0)))
10357
                  || (REG_P (XEXP (op0, 0))
10358
                      && reg_truncated_to_mode (tmode, XEXP (op0, 0)))))
10359
            {
10360
              op0 = gen_lowpart (tmode, XEXP (op0, 0));
10361
              continue;
10362
            }
10363
 
10364
          /* If this is (and:M1 (subreg:M2 X 0) (const_int C1)) where C1
10365
             fits in both M1 and M2 and the SUBREG is either paradoxical
10366
             or represents the low part, permute the SUBREG and the AND
10367
             and try again.  */
10368
          if (GET_CODE (XEXP (op0, 0)) == SUBREG)
10369
            {
10370
              unsigned HOST_WIDE_INT c1;
10371
              tmode = GET_MODE (SUBREG_REG (XEXP (op0, 0)));
10372
              /* Require an integral mode, to avoid creating something like
10373
                 (AND:SF ...).  */
10374
              if (SCALAR_INT_MODE_P (tmode)
10375
                  /* It is unsafe to commute the AND into the SUBREG if the
10376
                     SUBREG is paradoxical and WORD_REGISTER_OPERATIONS is
10377
                     not defined.  As originally written the upper bits
10378
                     have a defined value due to the AND operation.
10379
                     However, if we commute the AND inside the SUBREG then
10380
                     they no longer have defined values and the meaning of
10381
                     the code has been changed.  */
10382
                  && (0
10383
#ifdef WORD_REGISTER_OPERATIONS
10384
                      || (mode_width > GET_MODE_BITSIZE (tmode)
10385
                          && mode_width <= BITS_PER_WORD)
10386
#endif
10387
                      || (mode_width <= GET_MODE_BITSIZE (tmode)
10388
                          && subreg_lowpart_p (XEXP (op0, 0))))
10389
                  && GET_CODE (XEXP (op0, 1)) == CONST_INT
10390
                  && mode_width <= HOST_BITS_PER_WIDE_INT
10391
                  && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT
10392
                  && ((c1 = INTVAL (XEXP (op0, 1))) & ~mask) == 0
10393
                  && (c1 & ~GET_MODE_MASK (tmode)) == 0
10394
                  && c1 != mask
10395
                  && c1 != GET_MODE_MASK (tmode))
10396
                {
10397
                  op0 = simplify_gen_binary (AND, tmode,
10398
                                             SUBREG_REG (XEXP (op0, 0)),
10399
                                             gen_int_mode (c1, tmode));
10400
                  op0 = gen_lowpart (mode, op0);
10401
                  continue;
10402
                }
10403
            }
10404
 
10405
          /* Convert (ne (and (not X) 1) 0) to (eq (and X 1) 0).  */
10406
          if (const_op == 0 && equality_comparison_p
10407
              && XEXP (op0, 1) == const1_rtx
10408
              && GET_CODE (XEXP (op0, 0)) == NOT)
10409
            {
10410
              op0 = simplify_and_const_int
10411
                (NULL_RTX, mode, XEXP (XEXP (op0, 0), 0), (HOST_WIDE_INT) 1);
10412
              code = (code == NE ? EQ : NE);
10413
              continue;
10414
            }
10415
 
10416
          /* Convert (ne (and (lshiftrt (not X)) 1) 0) to
10417
             (eq (and (lshiftrt X) 1) 0).
10418
             Also handle the case where (not X) is expressed using xor.  */
10419
          if (const_op == 0 && equality_comparison_p
10420
              && XEXP (op0, 1) == const1_rtx
10421
              && GET_CODE (XEXP (op0, 0)) == LSHIFTRT)
10422
            {
10423
              rtx shift_op = XEXP (XEXP (op0, 0), 0);
10424
              rtx shift_count = XEXP (XEXP (op0, 0), 1);
10425
 
10426
              if (GET_CODE (shift_op) == NOT
10427
                  || (GET_CODE (shift_op) == XOR
10428
                      && GET_CODE (XEXP (shift_op, 1)) == CONST_INT
10429
                      && GET_CODE (shift_count) == CONST_INT
10430
                      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
10431
                      && (INTVAL (XEXP (shift_op, 1))
10432
                          == (HOST_WIDE_INT) 1 << INTVAL (shift_count))))
10433
                {
10434
                  op0 = simplify_and_const_int
10435
                    (NULL_RTX, mode,
10436
                     gen_rtx_LSHIFTRT (mode, XEXP (shift_op, 0), shift_count),
10437
                     (HOST_WIDE_INT) 1);
10438
                  code = (code == NE ? EQ : NE);
10439
                  continue;
10440
                }
10441
            }
10442
          break;
10443
 
10444
        case ASHIFT:
10445
          /* If we have (compare (ashift FOO N) (const_int C)) and
10446
             the high order N bits of FOO (N+1 if an inequality comparison)
10447
             are known to be zero, we can do this by comparing FOO with C
10448
             shifted right N bits so long as the low-order N bits of C are
10449
             zero.  */
10450
          if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10451
              && INTVAL (XEXP (op0, 1)) >= 0
10452
              && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
10453
                  < HOST_BITS_PER_WIDE_INT)
10454
              && ((const_op
10455
                   & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
10456
              && mode_width <= HOST_BITS_PER_WIDE_INT
10457
              && (nonzero_bits (XEXP (op0, 0), mode)
10458
                  & ~(mask >> (INTVAL (XEXP (op0, 1))
10459
                               + ! equality_comparison_p))) == 0)
10460
            {
10461
              /* We must perform a logical shift, not an arithmetic one,
10462
                 as we want the top N bits of C to be zero.  */
10463
              unsigned HOST_WIDE_INT temp = const_op & GET_MODE_MASK (mode);
10464
 
10465
              temp >>= INTVAL (XEXP (op0, 1));
10466
              op1 = gen_int_mode (temp, mode);
10467
              op0 = XEXP (op0, 0);
10468
              continue;
10469
            }
10470
 
10471
          /* If we are doing a sign bit comparison, it means we are testing
10472
             a particular bit.  Convert it to the appropriate AND.  */
10473
          if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
10474
              && mode_width <= HOST_BITS_PER_WIDE_INT)
10475
            {
10476
              op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10477
                                            ((HOST_WIDE_INT) 1
10478
                                             << (mode_width - 1
10479
                                                 - INTVAL (XEXP (op0, 1)))));
10480
              code = (code == LT ? NE : EQ);
10481
              continue;
10482
            }
10483
 
10484
          /* If this an equality comparison with zero and we are shifting
10485
             the low bit to the sign bit, we can convert this to an AND of the
10486
             low-order bit.  */
10487
          if (const_op == 0 && equality_comparison_p
10488
              && GET_CODE (XEXP (op0, 1)) == CONST_INT
10489
              && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
10490
                 == mode_width - 1)
10491
            {
10492
              op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10493
                                            (HOST_WIDE_INT) 1);
10494
              continue;
10495
            }
10496
          break;
10497
 
10498
        case ASHIFTRT:
10499
          /* If this is an equality comparison with zero, we can do this
10500
             as a logical shift, which might be much simpler.  */
10501
          if (equality_comparison_p && const_op == 0
10502
              && GET_CODE (XEXP (op0, 1)) == CONST_INT)
10503
            {
10504
              op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
10505
                                          XEXP (op0, 0),
10506
                                          INTVAL (XEXP (op0, 1)));
10507
              continue;
10508
            }
10509
 
10510
          /* If OP0 is a sign extension and CODE is not an unsigned comparison,
10511
             do the comparison in a narrower mode.  */
10512
          if (! unsigned_comparison_p
10513
              && GET_CODE (XEXP (op0, 1)) == CONST_INT
10514
              && GET_CODE (XEXP (op0, 0)) == ASHIFT
10515
              && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
10516
              && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
10517
                                         MODE_INT, 1)) != BLKmode
10518
              && (((unsigned HOST_WIDE_INT) const_op
10519
                   + (GET_MODE_MASK (tmode) >> 1) + 1)
10520
                  <= GET_MODE_MASK (tmode)))
10521
            {
10522
              op0 = gen_lowpart (tmode, XEXP (XEXP (op0, 0), 0));
10523
              continue;
10524
            }
10525
 
10526
          /* Likewise if OP0 is a PLUS of a sign extension with a
10527
             constant, which is usually represented with the PLUS
10528
             between the shifts.  */
10529
          if (! unsigned_comparison_p
10530
              && GET_CODE (XEXP (op0, 1)) == CONST_INT
10531
              && GET_CODE (XEXP (op0, 0)) == PLUS
10532
              && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10533
              && GET_CODE (XEXP (XEXP (op0, 0), 0)) == ASHIFT
10534
              && XEXP (op0, 1) == XEXP (XEXP (XEXP (op0, 0), 0), 1)
10535
              && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
10536
                                         MODE_INT, 1)) != BLKmode
10537
              && (((unsigned HOST_WIDE_INT) const_op
10538
                   + (GET_MODE_MASK (tmode) >> 1) + 1)
10539
                  <= GET_MODE_MASK (tmode)))
10540
            {
10541
              rtx inner = XEXP (XEXP (XEXP (op0, 0), 0), 0);
10542
              rtx add_const = XEXP (XEXP (op0, 0), 1);
10543
              rtx new_const = simplify_gen_binary (ASHIFTRT, GET_MODE (op0),
10544
                                                   add_const, XEXP (op0, 1));
10545
 
10546
              op0 = simplify_gen_binary (PLUS, tmode,
10547
                                         gen_lowpart (tmode, inner),
10548
                                         new_const);
10549
              continue;
10550
            }
10551
 
10552
          /* ... fall through ...  */
10553
        case LSHIFTRT:
10554
          /* If we have (compare (xshiftrt FOO N) (const_int C)) and
10555
             the low order N bits of FOO are known to be zero, we can do this
10556
             by comparing FOO with C shifted left N bits so long as no
10557
             overflow occurs.  */
10558
          if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10559
              && INTVAL (XEXP (op0, 1)) >= 0
10560
              && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
10561
              && mode_width <= HOST_BITS_PER_WIDE_INT
10562
              && (nonzero_bits (XEXP (op0, 0), mode)
10563
                  & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
10564
              && (((unsigned HOST_WIDE_INT) const_op
10565
                   + (GET_CODE (op0) != LSHIFTRT
10566
                      ? ((GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1)) >> 1)
10567
                         + 1)
10568
                      : 0))
10569
                  <= GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1))))
10570
            {
10571
              /* If the shift was logical, then we must make the condition
10572
                 unsigned.  */
10573
              if (GET_CODE (op0) == LSHIFTRT)
10574
                code = unsigned_condition (code);
10575
 
10576
              const_op <<= INTVAL (XEXP (op0, 1));
10577
              op1 = GEN_INT (const_op);
10578
              op0 = XEXP (op0, 0);
10579
              continue;
10580
            }
10581
 
10582
          /* If we are using this shift to extract just the sign bit, we
10583
             can replace this with an LT or GE comparison.  */
10584
          if (const_op == 0
10585
              && (equality_comparison_p || sign_bit_comparison_p)
10586
              && GET_CODE (XEXP (op0, 1)) == CONST_INT
10587
              && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
10588
                 == mode_width - 1)
10589
            {
10590
              op0 = XEXP (op0, 0);
10591
              code = (code == NE || code == GT ? LT : GE);
10592
              continue;
10593
            }
10594
          break;
10595
 
10596
        default:
10597
          break;
10598
        }
10599
 
10600
      break;
10601
    }
10602
 
10603
  /* Now make any compound operations involved in this comparison.  Then,
10604
     check for an outmost SUBREG on OP0 that is not doing anything or is
10605
     paradoxical.  The latter transformation must only be performed when
10606
     it is known that the "extra" bits will be the same in op0 and op1 or
10607
     that they don't matter.  There are three cases to consider:
10608
 
10609
     1. SUBREG_REG (op0) is a register.  In this case the bits are don't
10610
     care bits and we can assume they have any convenient value.  So
10611
     making the transformation is safe.
10612
 
10613
     2. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is not defined.
10614
     In this case the upper bits of op0 are undefined.  We should not make
10615
     the simplification in that case as we do not know the contents of
10616
     those bits.
10617
 
10618
     3. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is defined and not
10619
     UNKNOWN.  In that case we know those bits are zeros or ones.  We must
10620
     also be sure that they are the same as the upper bits of op1.
10621
 
10622
     We can never remove a SUBREG for a non-equality comparison because
10623
     the sign bit is in a different place in the underlying object.  */
10624
 
10625
  op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
10626
  op1 = make_compound_operation (op1, SET);
10627
 
10628
  if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
10629
      && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10630
      && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op0))) == MODE_INT
10631
      && (code == NE || code == EQ))
10632
    {
10633
      if (GET_MODE_SIZE (GET_MODE (op0))
10634
          > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))
10635
        {
10636
          /* For paradoxical subregs, allow case 1 as above.  Case 3 isn't
10637
             implemented.  */
10638
          if (REG_P (SUBREG_REG (op0)))
10639
            {
10640
              op0 = SUBREG_REG (op0);
10641
              op1 = gen_lowpart (GET_MODE (op0), op1);
10642
            }
10643
        }
10644
      else if ((GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
10645
                <= HOST_BITS_PER_WIDE_INT)
10646
               && (nonzero_bits (SUBREG_REG (op0),
10647
                                 GET_MODE (SUBREG_REG (op0)))
10648
                   & ~GET_MODE_MASK (GET_MODE (op0))) == 0)
10649
        {
10650
          tem = gen_lowpart (GET_MODE (SUBREG_REG (op0)), op1);
10651
 
10652
          if ((nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
10653
               & ~GET_MODE_MASK (GET_MODE (op0))) == 0)
10654
            op0 = SUBREG_REG (op0), op1 = tem;
10655
        }
10656
    }
10657
 
10658
  /* We now do the opposite procedure: Some machines don't have compare
10659
     insns in all modes.  If OP0's mode is an integer mode smaller than a
10660
     word and we can't do a compare in that mode, see if there is a larger
10661
     mode for which we can do the compare.  There are a number of cases in
10662
     which we can use the wider mode.  */
10663
 
10664
  mode = GET_MODE (op0);
10665
  if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
10666
      && GET_MODE_SIZE (mode) < UNITS_PER_WORD
10667
      && ! have_insn_for (COMPARE, mode))
10668
    for (tmode = GET_MODE_WIDER_MODE (mode);
10669
         (tmode != VOIDmode
10670
          && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
10671
         tmode = GET_MODE_WIDER_MODE (tmode))
10672
      if (have_insn_for (COMPARE, tmode))
10673
        {
10674
          int zero_extended;
10675
 
10676
          /* If the only nonzero bits in OP0 and OP1 are those in the
10677
             narrower mode and this is an equality or unsigned comparison,
10678
             we can use the wider mode.  Similarly for sign-extended
10679
             values, in which case it is true for all comparisons.  */
10680
          zero_extended = ((code == EQ || code == NE
10681
                            || code == GEU || code == GTU
10682
                            || code == LEU || code == LTU)
10683
                           && (nonzero_bits (op0, tmode)
10684
                               & ~GET_MODE_MASK (mode)) == 0
10685
                           && ((GET_CODE (op1) == CONST_INT
10686
                                || (nonzero_bits (op1, tmode)
10687
                                    & ~GET_MODE_MASK (mode)) == 0)));
10688
 
10689
          if (zero_extended
10690
              || ((num_sign_bit_copies (op0, tmode)
10691
                   > (unsigned int) (GET_MODE_BITSIZE (tmode)
10692
                                     - GET_MODE_BITSIZE (mode)))
10693
                  && (num_sign_bit_copies (op1, tmode)
10694
                      > (unsigned int) (GET_MODE_BITSIZE (tmode)
10695
                                        - GET_MODE_BITSIZE (mode)))))
10696
            {
10697
              /* If OP0 is an AND and we don't have an AND in MODE either,
10698
                 make a new AND in the proper mode.  */
10699
              if (GET_CODE (op0) == AND
10700
                  && !have_insn_for (AND, mode))
10701
                op0 = simplify_gen_binary (AND, tmode,
10702
                                           gen_lowpart (tmode,
10703
                                                        XEXP (op0, 0)),
10704
                                           gen_lowpart (tmode,
10705
                                                        XEXP (op0, 1)));
10706
 
10707
              op0 = gen_lowpart (tmode, op0);
10708
              if (zero_extended && GET_CODE (op1) == CONST_INT)
10709
                op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (mode));
10710
              op1 = gen_lowpart (tmode, op1);
10711
              break;
10712
            }
10713
 
10714
          /* If this is a test for negative, we can make an explicit
10715
             test of the sign bit.  */
10716
 
10717
          if (op1 == const0_rtx && (code == LT || code == GE)
10718
              && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10719
            {
10720
              op0 = simplify_gen_binary (AND, tmode,
10721
                                         gen_lowpart (tmode, op0),
10722
                                         GEN_INT ((HOST_WIDE_INT) 1
10723
                                                  << (GET_MODE_BITSIZE (mode)
10724
                                                      - 1)));
10725
              code = (code == LT) ? NE : EQ;
10726
              break;
10727
            }
10728
        }
10729
 
10730
#ifdef CANONICALIZE_COMPARISON
10731
  /* If this machine only supports a subset of valid comparisons, see if we
10732
     can convert an unsupported one into a supported one.  */
10733
  CANONICALIZE_COMPARISON (code, op0, op1);
10734
#endif
10735
 
10736
  *pop0 = op0;
10737
  *pop1 = op1;
10738
 
10739
  return code;
10740
}
10741
 
10742
/* Utility function for record_value_for_reg.  Count number of
10743
   rtxs in X.  */
10744
static int
10745
count_rtxs (rtx x)
10746
{
10747
  enum rtx_code code = GET_CODE (x);
10748
  const char *fmt;
10749
  int i, ret = 1;
10750
 
10751
  if (GET_RTX_CLASS (code) == '2'
10752
      || GET_RTX_CLASS (code) == 'c')
10753
    {
10754
      rtx x0 = XEXP (x, 0);
10755
      rtx x1 = XEXP (x, 1);
10756
 
10757
      if (x0 == x1)
10758
        return 1 + 2 * count_rtxs (x0);
10759
 
10760
      if ((GET_RTX_CLASS (GET_CODE (x1)) == '2'
10761
           || GET_RTX_CLASS (GET_CODE (x1)) == 'c')
10762
          && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
10763
        return 2 + 2 * count_rtxs (x0)
10764
               + count_rtxs (x == XEXP (x1, 0)
10765
                             ? XEXP (x1, 1) : XEXP (x1, 0));
10766
 
10767
      if ((GET_RTX_CLASS (GET_CODE (x0)) == '2'
10768
           || GET_RTX_CLASS (GET_CODE (x0)) == 'c')
10769
          && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
10770
        return 2 + 2 * count_rtxs (x1)
10771
               + count_rtxs (x == XEXP (x0, 0)
10772
                             ? XEXP (x0, 1) : XEXP (x0, 0));
10773
    }
10774
 
10775
  fmt = GET_RTX_FORMAT (code);
10776
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10777
    if (fmt[i] == 'e')
10778
      ret += count_rtxs (XEXP (x, i));
10779
 
10780
  return ret;
10781
}
10782
 
10783
/* Utility function for following routine.  Called when X is part of a value
10784
   being stored into last_set_value.  Sets last_set_table_tick
10785
   for each register mentioned.  Similar to mention_regs in cse.c  */
10786
 
10787
static void
10788
update_table_tick (rtx x)
10789
{
10790
  enum rtx_code code = GET_CODE (x);
10791
  const char *fmt = GET_RTX_FORMAT (code);
10792
  int i;
10793
 
10794
  if (code == REG)
10795
    {
10796
      unsigned int regno = REGNO (x);
10797
      unsigned int endregno
10798
        = regno + (regno < FIRST_PSEUDO_REGISTER
10799
                   ? hard_regno_nregs[regno][GET_MODE (x)] : 1);
10800
      unsigned int r;
10801
 
10802
      for (r = regno; r < endregno; r++)
10803
        reg_stat[r].last_set_table_tick = label_tick;
10804
 
10805
      return;
10806
    }
10807
 
10808
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10809
    /* Note that we can't have an "E" in values stored; see
10810
       get_last_value_validate.  */
10811
    if (fmt[i] == 'e')
10812
      {
10813
        /* Check for identical subexpressions.  If x contains
10814
           identical subexpression we only have to traverse one of
10815
           them.  */
10816
        if (i == 0 && ARITHMETIC_P (x))
10817
          {
10818
            /* Note that at this point x1 has already been
10819
               processed.  */
10820
            rtx x0 = XEXP (x, 0);
10821
            rtx x1 = XEXP (x, 1);
10822
 
10823
            /* If x0 and x1 are identical then there is no need to
10824
               process x0.  */
10825
            if (x0 == x1)
10826
              break;
10827
 
10828
            /* If x0 is identical to a subexpression of x1 then while
10829
               processing x1, x0 has already been processed.  Thus we
10830
               are done with x.  */
10831
            if (ARITHMETIC_P (x1)
10832
                && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
10833
              break;
10834
 
10835
            /* If x1 is identical to a subexpression of x0 then we
10836
               still have to process the rest of x0.  */
10837
            if (ARITHMETIC_P (x0)
10838
                && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
10839
              {
10840
                update_table_tick (XEXP (x0, x1 == XEXP (x0, 0) ? 1 : 0));
10841
                break;
10842
              }
10843
          }
10844
 
10845
        update_table_tick (XEXP (x, i));
10846
      }
10847
}
10848
 
10849
/* Record that REG is set to VALUE in insn INSN.  If VALUE is zero, we
10850
   are saying that the register is clobbered and we no longer know its
10851
   value.  If INSN is zero, don't update reg_stat[].last_set; this is
10852
   only permitted with VALUE also zero and is used to invalidate the
10853
   register.  */
10854
 
10855
static void
10856
record_value_for_reg (rtx reg, rtx insn, rtx value)
10857
{
10858
  unsigned int regno = REGNO (reg);
10859
  unsigned int endregno
10860
    = regno + (regno < FIRST_PSEUDO_REGISTER
10861
               ? hard_regno_nregs[regno][GET_MODE (reg)] : 1);
10862
  unsigned int i;
10863
 
10864
  /* If VALUE contains REG and we have a previous value for REG, substitute
10865
     the previous value.  */
10866
  if (value && insn && reg_overlap_mentioned_p (reg, value))
10867
    {
10868
      rtx tem;
10869
 
10870
      /* Set things up so get_last_value is allowed to see anything set up to
10871
         our insn.  */
10872
      subst_low_cuid = INSN_CUID (insn);
10873
      tem = get_last_value (reg);
10874
 
10875
      /* If TEM is simply a binary operation with two CLOBBERs as operands,
10876
         it isn't going to be useful and will take a lot of time to process,
10877
         so just use the CLOBBER.  */
10878
 
10879
      if (tem)
10880
        {
10881
          if (ARITHMETIC_P (tem)
10882
              && GET_CODE (XEXP (tem, 0)) == CLOBBER
10883
              && GET_CODE (XEXP (tem, 1)) == CLOBBER)
10884
            tem = XEXP (tem, 0);
10885
          else if (count_occurrences (value, reg, 1) >= 2)
10886
            {
10887
              /* If there are two or more occurrences of REG in VALUE,
10888
                 prevent the value from growing too much.  */
10889
              if (count_rtxs (tem) > MAX_LAST_VALUE_RTL)
10890
                tem = gen_rtx_CLOBBER (GET_MODE (tem), const0_rtx);
10891
            }
10892
 
10893
          value = replace_rtx (copy_rtx (value), reg, tem);
10894
        }
10895
    }
10896
 
10897
  /* For each register modified, show we don't know its value, that
10898
     we don't know about its bitwise content, that its value has been
10899
     updated, and that we don't know the location of the death of the
10900
     register.  */
10901
  for (i = regno; i < endregno; i++)
10902
    {
10903
      if (insn)
10904
        reg_stat[i].last_set = insn;
10905
 
10906
      reg_stat[i].last_set_value = 0;
10907
      reg_stat[i].last_set_mode = 0;
10908
      reg_stat[i].last_set_nonzero_bits = 0;
10909
      reg_stat[i].last_set_sign_bit_copies = 0;
10910
      reg_stat[i].last_death = 0;
10911
      reg_stat[i].truncated_to_mode = 0;
10912
    }
10913
 
10914
  /* Mark registers that are being referenced in this value.  */
10915
  if (value)
10916
    update_table_tick (value);
10917
 
10918
  /* Now update the status of each register being set.
10919
     If someone is using this register in this block, set this register
10920
     to invalid since we will get confused between the two lives in this
10921
     basic block.  This makes using this register always invalid.  In cse, we
10922
     scan the table to invalidate all entries using this register, but this
10923
     is too much work for us.  */
10924
 
10925
  for (i = regno; i < endregno; i++)
10926
    {
10927
      reg_stat[i].last_set_label = label_tick;
10928
      if (!insn || (value && reg_stat[i].last_set_table_tick == label_tick))
10929
        reg_stat[i].last_set_invalid = 1;
10930
      else
10931
        reg_stat[i].last_set_invalid = 0;
10932
    }
10933
 
10934
  /* The value being assigned might refer to X (like in "x++;").  In that
10935
     case, we must replace it with (clobber (const_int 0)) to prevent
10936
     infinite loops.  */
10937
  if (value && ! get_last_value_validate (&value, insn,
10938
                                          reg_stat[regno].last_set_label, 0))
10939
    {
10940
      value = copy_rtx (value);
10941
      if (! get_last_value_validate (&value, insn,
10942
                                     reg_stat[regno].last_set_label, 1))
10943
        value = 0;
10944
    }
10945
 
10946
  /* For the main register being modified, update the value, the mode, the
10947
     nonzero bits, and the number of sign bit copies.  */
10948
 
10949
  reg_stat[regno].last_set_value = value;
10950
 
10951
  if (value)
10952
    {
10953
      enum machine_mode mode = GET_MODE (reg);
10954
      subst_low_cuid = INSN_CUID (insn);
10955
      reg_stat[regno].last_set_mode = mode;
10956
      if (GET_MODE_CLASS (mode) == MODE_INT
10957
          && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10958
        mode = nonzero_bits_mode;
10959
      reg_stat[regno].last_set_nonzero_bits = nonzero_bits (value, mode);
10960
      reg_stat[regno].last_set_sign_bit_copies
10961
        = num_sign_bit_copies (value, GET_MODE (reg));
10962
    }
10963
}
10964
 
10965
/* Called via note_stores from record_dead_and_set_regs to handle one
10966
   SET or CLOBBER in an insn.  DATA is the instruction in which the
10967
   set is occurring.  */
10968
 
10969
static void
10970
record_dead_and_set_regs_1 (rtx dest, rtx setter, void *data)
10971
{
10972
  rtx record_dead_insn = (rtx) data;
10973
 
10974
  if (GET_CODE (dest) == SUBREG)
10975
    dest = SUBREG_REG (dest);
10976
 
10977
  if (!record_dead_insn)
10978
    {
10979
      if (REG_P (dest))
10980
        record_value_for_reg (dest, NULL_RTX, NULL_RTX);
10981
      return;
10982
    }
10983
 
10984
  if (REG_P (dest))
10985
    {
10986
      /* If we are setting the whole register, we know its value.  Otherwise
10987
         show that we don't know the value.  We can handle SUBREG in
10988
         some cases.  */
10989
      if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
10990
        record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
10991
      else if (GET_CODE (setter) == SET
10992
               && GET_CODE (SET_DEST (setter)) == SUBREG
10993
               && SUBREG_REG (SET_DEST (setter)) == dest
10994
               && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
10995
               && subreg_lowpart_p (SET_DEST (setter)))
10996
        record_value_for_reg (dest, record_dead_insn,
10997
                              gen_lowpart (GET_MODE (dest),
10998
                                                       SET_SRC (setter)));
10999
      else
11000
        record_value_for_reg (dest, record_dead_insn, NULL_RTX);
11001
    }
11002
  else if (MEM_P (dest)
11003
           /* Ignore pushes, they clobber nothing.  */
11004
           && ! push_operand (dest, GET_MODE (dest)))
11005
    mem_last_set = INSN_CUID (record_dead_insn);
11006
}
11007
 
11008
/* Update the records of when each REG was most recently set or killed
11009
   for the things done by INSN.  This is the last thing done in processing
11010
   INSN in the combiner loop.
11011
 
11012
   We update reg_stat[], in particular fields last_set, last_set_value,
11013
   last_set_mode, last_set_nonzero_bits, last_set_sign_bit_copies,
11014
   last_death, and also the similar information mem_last_set (which insn
11015
   most recently modified memory) and last_call_cuid (which insn was the
11016
   most recent subroutine call).  */
11017
 
11018
static void
11019
record_dead_and_set_regs (rtx insn)
11020
{
11021
  rtx link;
11022
  unsigned int i;
11023
 
11024
  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
11025
    {
11026
      if (REG_NOTE_KIND (link) == REG_DEAD
11027
          && REG_P (XEXP (link, 0)))
11028
        {
11029
          unsigned int regno = REGNO (XEXP (link, 0));
11030
          unsigned int endregno
11031
            = regno + (regno < FIRST_PSEUDO_REGISTER
11032
                       ? hard_regno_nregs[regno][GET_MODE (XEXP (link, 0))]
11033
                       : 1);
11034
 
11035
          for (i = regno; i < endregno; i++)
11036
            reg_stat[i].last_death = insn;
11037
        }
11038
      else if (REG_NOTE_KIND (link) == REG_INC)
11039
        record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
11040
    }
11041
 
11042
  if (CALL_P (insn))
11043
    {
11044
      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
11045
        if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
11046
          {
11047
            reg_stat[i].last_set_value = 0;
11048
            reg_stat[i].last_set_mode = 0;
11049
            reg_stat[i].last_set_nonzero_bits = 0;
11050
            reg_stat[i].last_set_sign_bit_copies = 0;
11051
            reg_stat[i].last_death = 0;
11052
            reg_stat[i].truncated_to_mode = 0;
11053
          }
11054
 
11055
      last_call_cuid = mem_last_set = INSN_CUID (insn);
11056
 
11057
      /* We can't combine into a call pattern.  Remember, though, that
11058
         the return value register is set at this CUID.  We could
11059
         still replace a register with the return value from the
11060
         wrong subroutine call!  */
11061
      note_stores (PATTERN (insn), record_dead_and_set_regs_1, NULL_RTX);
11062
    }
11063
  else
11064
    note_stores (PATTERN (insn), record_dead_and_set_regs_1, insn);
11065
}
11066
 
11067
/* If a SUBREG has the promoted bit set, it is in fact a property of the
11068
   register present in the SUBREG, so for each such SUBREG go back and
11069
   adjust nonzero and sign bit information of the registers that are
11070
   known to have some zero/sign bits set.
11071
 
11072
   This is needed because when combine blows the SUBREGs away, the
11073
   information on zero/sign bits is lost and further combines can be
11074
   missed because of that.  */
11075
 
11076
static void
11077
record_promoted_value (rtx insn, rtx subreg)
11078
{
11079
  rtx links, set;
11080
  unsigned int regno = REGNO (SUBREG_REG (subreg));
11081
  enum machine_mode mode = GET_MODE (subreg);
11082
 
11083
  if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
11084
    return;
11085
 
11086
  for (links = LOG_LINKS (insn); links;)
11087
    {
11088
      insn = XEXP (links, 0);
11089
      set = single_set (insn);
11090
 
11091
      if (! set || !REG_P (SET_DEST (set))
11092
          || REGNO (SET_DEST (set)) != regno
11093
          || GET_MODE (SET_DEST (set)) != GET_MODE (SUBREG_REG (subreg)))
11094
        {
11095
          links = XEXP (links, 1);
11096
          continue;
11097
        }
11098
 
11099
      if (reg_stat[regno].last_set == insn)
11100
        {
11101
          if (SUBREG_PROMOTED_UNSIGNED_P (subreg) > 0)
11102
            reg_stat[regno].last_set_nonzero_bits &= GET_MODE_MASK (mode);
11103
        }
11104
 
11105
      if (REG_P (SET_SRC (set)))
11106
        {
11107
          regno = REGNO (SET_SRC (set));
11108
          links = LOG_LINKS (insn);
11109
        }
11110
      else
11111
        break;
11112
    }
11113
}
11114
 
11115
/* Check if X, a register, is known to contain a value already
11116
   truncated to MODE.  In this case we can use a subreg to refer to
11117
   the truncated value even though in the generic case we would need
11118
   an explicit truncation.  */
11119
 
11120
static bool
11121
reg_truncated_to_mode (enum machine_mode mode, rtx x)
11122
{
11123
  enum machine_mode truncated = reg_stat[REGNO (x)].truncated_to_mode;
11124
 
11125
  if (truncated == 0 || reg_stat[REGNO (x)].truncation_label != label_tick)
11126
    return false;
11127
  if (GET_MODE_SIZE (truncated) <= GET_MODE_SIZE (mode))
11128
    return true;
11129
  if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
11130
                             GET_MODE_BITSIZE (truncated)))
11131
    return true;
11132
  return false;
11133
}
11134
 
11135
/* X is a REG or a SUBREG.  If X is some sort of a truncation record
11136
   it.  For non-TRULY_NOOP_TRUNCATION targets we might be able to turn
11137
   a truncate into a subreg using this information.  */
11138
 
11139
static void
11140
record_truncated_value (rtx x)
11141
{
11142
  enum machine_mode truncated_mode;
11143
 
11144
  if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x)))
11145
    {
11146
      enum machine_mode original_mode = GET_MODE (SUBREG_REG (x));
11147
      truncated_mode = GET_MODE (x);
11148
 
11149
      if (GET_MODE_SIZE (original_mode) <= GET_MODE_SIZE (truncated_mode))
11150
        return;
11151
 
11152
      if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (truncated_mode),
11153
                                 GET_MODE_BITSIZE (original_mode)))
11154
        return;
11155
 
11156
      x = SUBREG_REG (x);
11157
    }
11158
  /* ??? For hard-regs we now record everything.  We might be able to
11159
     optimize this using last_set_mode.  */
11160
  else if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
11161
    truncated_mode = GET_MODE (x);
11162
  else
11163
    return;
11164
 
11165
  if (reg_stat[REGNO (x)].truncated_to_mode == 0
11166
      || reg_stat[REGNO (x)].truncation_label < label_tick
11167
      || (GET_MODE_SIZE (truncated_mode)
11168
          < GET_MODE_SIZE (reg_stat[REGNO (x)].truncated_to_mode)))
11169
    {
11170
      reg_stat[REGNO (x)].truncated_to_mode = truncated_mode;
11171
      reg_stat[REGNO (x)].truncation_label = label_tick;
11172
    }
11173
}
11174
 
11175
/* Scan X for promoted SUBREGs and truncated REGs.  For each one
11176
   found, note what it implies to the registers used in it.  */
11177
 
11178
static void
11179
check_conversions (rtx insn, rtx x)
11180
{
11181
  if (GET_CODE (x) == SUBREG || REG_P (x))
11182
    {
11183
      if (GET_CODE (x) == SUBREG
11184
          && SUBREG_PROMOTED_VAR_P (x)
11185
          && REG_P (SUBREG_REG (x)))
11186
        record_promoted_value (insn, x);
11187
 
11188
      record_truncated_value (x);
11189
    }
11190
  else
11191
    {
11192
      const char *format = GET_RTX_FORMAT (GET_CODE (x));
11193
      int i, j;
11194
 
11195
      for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
11196
        switch (format[i])
11197
          {
11198
          case 'e':
11199
            check_conversions (insn, XEXP (x, i));
11200
            break;
11201
          case 'V':
11202
          case 'E':
11203
            if (XVEC (x, i) != 0)
11204
              for (j = 0; j < XVECLEN (x, i); j++)
11205
                check_conversions (insn, XVECEXP (x, i, j));
11206
            break;
11207
          }
11208
    }
11209
}
11210
 
11211
/* Utility routine for the following function.  Verify that all the registers
11212
   mentioned in *LOC are valid when *LOC was part of a value set when
11213
   label_tick == TICK.  Return 0 if some are not.
11214
 
11215
   If REPLACE is nonzero, replace the invalid reference with
11216
   (clobber (const_int 0)) and return 1.  This replacement is useful because
11217
   we often can get useful information about the form of a value (e.g., if
11218
   it was produced by a shift that always produces -1 or 0) even though
11219
   we don't know exactly what registers it was produced from.  */
11220
 
11221
static int
11222
get_last_value_validate (rtx *loc, rtx insn, int tick, int replace)
11223
{
11224
  rtx x = *loc;
11225
  const char *fmt = GET_RTX_FORMAT (GET_CODE (x));
11226
  int len = GET_RTX_LENGTH (GET_CODE (x));
11227
  int i;
11228
 
11229
  if (REG_P (x))
11230
    {
11231
      unsigned int regno = REGNO (x);
11232
      unsigned int endregno
11233
        = regno + (regno < FIRST_PSEUDO_REGISTER
11234
                   ? hard_regno_nregs[regno][GET_MODE (x)] : 1);
11235
      unsigned int j;
11236
 
11237
      for (j = regno; j < endregno; j++)
11238
        if (reg_stat[j].last_set_invalid
11239
            /* If this is a pseudo-register that was only set once and not
11240
               live at the beginning of the function, it is always valid.  */
11241
            || (! (regno >= FIRST_PSEUDO_REGISTER
11242
                   && REG_N_SETS (regno) == 1
11243
                   && (! REGNO_REG_SET_P
11244
                       (ENTRY_BLOCK_PTR->next_bb->il.rtl->global_live_at_start,
11245
                        regno)))
11246
                && reg_stat[j].last_set_label > tick))
11247
          {
11248
            if (replace)
11249
              *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
11250
            return replace;
11251
          }
11252
 
11253
      return 1;
11254
    }
11255
  /* If this is a memory reference, make sure that there were
11256
     no stores after it that might have clobbered the value.  We don't
11257
     have alias info, so we assume any store invalidates it.  */
11258
  else if (MEM_P (x) && !MEM_READONLY_P (x)
11259
           && INSN_CUID (insn) <= mem_last_set)
11260
    {
11261
      if (replace)
11262
        *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
11263
      return replace;
11264
    }
11265
 
11266
  for (i = 0; i < len; i++)
11267
    {
11268
      if (fmt[i] == 'e')
11269
        {
11270
          /* Check for identical subexpressions.  If x contains
11271
             identical subexpression we only have to traverse one of
11272
             them.  */
11273
          if (i == 1 && ARITHMETIC_P (x))
11274
            {
11275
              /* Note that at this point x0 has already been checked
11276
                 and found valid.  */
11277
              rtx x0 = XEXP (x, 0);
11278
              rtx x1 = XEXP (x, 1);
11279
 
11280
              /* If x0 and x1 are identical then x is also valid.  */
11281
              if (x0 == x1)
11282
                return 1;
11283
 
11284
              /* If x1 is identical to a subexpression of x0 then
11285
                 while checking x0, x1 has already been checked.  Thus
11286
                 it is valid and so as x.  */
11287
              if (ARITHMETIC_P (x0)
11288
                  && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
11289
                return 1;
11290
 
11291
              /* If x0 is identical to a subexpression of x1 then x is
11292
                 valid iff the rest of x1 is valid.  */
11293
              if (ARITHMETIC_P (x1)
11294
                  && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
11295
                return
11296
                  get_last_value_validate (&XEXP (x1,
11297
                                                  x0 == XEXP (x1, 0) ? 1 : 0),
11298
                                           insn, tick, replace);
11299
            }
11300
 
11301
          if (get_last_value_validate (&XEXP (x, i), insn, tick,
11302
                                       replace) == 0)
11303
            return 0;
11304
        }
11305
      /* Don't bother with these.  They shouldn't occur anyway.  */
11306
      else if (fmt[i] == 'E')
11307
        return 0;
11308
    }
11309
 
11310
  /* If we haven't found a reason for it to be invalid, it is valid.  */
11311
  return 1;
11312
}
11313
 
11314
/* Get the last value assigned to X, if known.  Some registers
11315
   in the value may be replaced with (clobber (const_int 0)) if their value
11316
   is known longer known reliably.  */
11317
 
11318
static rtx
11319
get_last_value (rtx x)
11320
{
11321
  unsigned int regno;
11322
  rtx value;
11323
 
11324
  /* If this is a non-paradoxical SUBREG, get the value of its operand and
11325
     then convert it to the desired mode.  If this is a paradoxical SUBREG,
11326
     we cannot predict what values the "extra" bits might have.  */
11327
  if (GET_CODE (x) == SUBREG
11328
      && subreg_lowpart_p (x)
11329
      && (GET_MODE_SIZE (GET_MODE (x))
11330
          <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
11331
      && (value = get_last_value (SUBREG_REG (x))) != 0)
11332
    return gen_lowpart (GET_MODE (x), value);
11333
 
11334
  if (!REG_P (x))
11335
    return 0;
11336
 
11337
  regno = REGNO (x);
11338
  value = reg_stat[regno].last_set_value;
11339
 
11340
  /* If we don't have a value, or if it isn't for this basic block and
11341
     it's either a hard register, set more than once, or it's a live
11342
     at the beginning of the function, return 0.
11343
 
11344
     Because if it's not live at the beginning of the function then the reg
11345
     is always set before being used (is never used without being set).
11346
     And, if it's set only once, and it's always set before use, then all
11347
     uses must have the same last value, even if it's not from this basic
11348
     block.  */
11349
 
11350
  if (value == 0
11351
      || (reg_stat[regno].last_set_label != label_tick
11352
          && (regno < FIRST_PSEUDO_REGISTER
11353
              || REG_N_SETS (regno) != 1
11354
              || (REGNO_REG_SET_P
11355
                  (ENTRY_BLOCK_PTR->next_bb->il.rtl->global_live_at_start,
11356
                   regno)))))
11357
    return 0;
11358
 
11359
  /* If the value was set in a later insn than the ones we are processing,
11360
     we can't use it even if the register was only set once.  */
11361
  if (INSN_CUID (reg_stat[regno].last_set) >= subst_low_cuid)
11362
    return 0;
11363
 
11364
  /* If the value has all its registers valid, return it.  */
11365
  if (get_last_value_validate (&value, reg_stat[regno].last_set,
11366
                               reg_stat[regno].last_set_label, 0))
11367
    return value;
11368
 
11369
  /* Otherwise, make a copy and replace any invalid register with
11370
     (clobber (const_int 0)).  If that fails for some reason, return 0.  */
11371
 
11372
  value = copy_rtx (value);
11373
  if (get_last_value_validate (&value, reg_stat[regno].last_set,
11374
                               reg_stat[regno].last_set_label, 1))
11375
    return value;
11376
 
11377
  return 0;
11378
}
11379
 
11380
/* Return nonzero if expression X refers to a REG or to memory
11381
   that is set in an instruction more recent than FROM_CUID.  */
11382
 
11383
static int
11384
use_crosses_set_p (rtx x, int from_cuid)
11385
{
11386
  const char *fmt;
11387
  int i;
11388
  enum rtx_code code = GET_CODE (x);
11389
 
11390
  if (code == REG)
11391
    {
11392
      unsigned int regno = REGNO (x);
11393
      unsigned endreg = regno + (regno < FIRST_PSEUDO_REGISTER
11394
                                 ? hard_regno_nregs[regno][GET_MODE (x)] : 1);
11395
 
11396
#ifdef PUSH_ROUNDING
11397
      /* Don't allow uses of the stack pointer to be moved,
11398
         because we don't know whether the move crosses a push insn.  */
11399
      if (regno == STACK_POINTER_REGNUM && PUSH_ARGS)
11400
        return 1;
11401
#endif
11402
      for (; regno < endreg; regno++)
11403
        if (reg_stat[regno].last_set
11404
            && INSN_CUID (reg_stat[regno].last_set) > from_cuid)
11405
          return 1;
11406
      return 0;
11407
    }
11408
 
11409
  if (code == MEM && mem_last_set > from_cuid)
11410
    return 1;
11411
 
11412
  fmt = GET_RTX_FORMAT (code);
11413
 
11414
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11415
    {
11416
      if (fmt[i] == 'E')
11417
        {
11418
          int j;
11419
          for (j = XVECLEN (x, i) - 1; j >= 0; j--)
11420
            if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
11421
              return 1;
11422
        }
11423
      else if (fmt[i] == 'e'
11424
               && use_crosses_set_p (XEXP (x, i), from_cuid))
11425
        return 1;
11426
    }
11427
  return 0;
11428
}
11429
 
11430
/* Define three variables used for communication between the following
11431
   routines.  */
11432
 
11433
static unsigned int reg_dead_regno, reg_dead_endregno;
11434
static int reg_dead_flag;
11435
 
11436
/* Function called via note_stores from reg_dead_at_p.
11437
 
11438
   If DEST is within [reg_dead_regno, reg_dead_endregno), set
11439
   reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET.  */
11440
 
11441
static void
11442
reg_dead_at_p_1 (rtx dest, rtx x, void *data ATTRIBUTE_UNUSED)
11443
{
11444
  unsigned int regno, endregno;
11445
 
11446
  if (!REG_P (dest))
11447
    return;
11448
 
11449
  regno = REGNO (dest);
11450
  endregno = regno + (regno < FIRST_PSEUDO_REGISTER
11451
                      ? hard_regno_nregs[regno][GET_MODE (dest)] : 1);
11452
 
11453
  if (reg_dead_endregno > regno && reg_dead_regno < endregno)
11454
    reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
11455
}
11456
 
11457
/* Return nonzero if REG is known to be dead at INSN.
11458
 
11459
   We scan backwards from INSN.  If we hit a REG_DEAD note or a CLOBBER
11460
   referencing REG, it is dead.  If we hit a SET referencing REG, it is
11461
   live.  Otherwise, see if it is live or dead at the start of the basic
11462
   block we are in.  Hard regs marked as being live in NEWPAT_USED_REGS
11463
   must be assumed to be always live.  */
11464
 
11465
static int
11466
reg_dead_at_p (rtx reg, rtx insn)
11467
{
11468
  basic_block block;
11469
  unsigned int i;
11470
 
11471
  /* Set variables for reg_dead_at_p_1.  */
11472
  reg_dead_regno = REGNO (reg);
11473
  reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
11474
                                        ? hard_regno_nregs[reg_dead_regno]
11475
                                                          [GET_MODE (reg)]
11476
                                        : 1);
11477
 
11478
  reg_dead_flag = 0;
11479
 
11480
  /* Check that reg isn't mentioned in NEWPAT_USED_REGS.  For fixed registers
11481
     we allow the machine description to decide whether use-and-clobber
11482
     patterns are OK.  */
11483
  if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
11484
    {
11485
      for (i = reg_dead_regno; i < reg_dead_endregno; i++)
11486
        if (!fixed_regs[i] && TEST_HARD_REG_BIT (newpat_used_regs, i))
11487
          return 0;
11488
    }
11489
 
11490
  /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
11491
     beginning of function.  */
11492
  for (; insn && !LABEL_P (insn) && !BARRIER_P (insn);
11493
       insn = prev_nonnote_insn (insn))
11494
    {
11495
      note_stores (PATTERN (insn), reg_dead_at_p_1, NULL);
11496
      if (reg_dead_flag)
11497
        return reg_dead_flag == 1 ? 1 : 0;
11498
 
11499
      if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
11500
        return 1;
11501
    }
11502
 
11503
  /* Get the basic block that we were in.  */
11504
  if (insn == 0)
11505
    block = ENTRY_BLOCK_PTR->next_bb;
11506
  else
11507
    {
11508
      FOR_EACH_BB (block)
11509
        if (insn == BB_HEAD (block))
11510
          break;
11511
 
11512
      if (block == EXIT_BLOCK_PTR)
11513
        return 0;
11514
    }
11515
 
11516
  for (i = reg_dead_regno; i < reg_dead_endregno; i++)
11517
    if (REGNO_REG_SET_P (block->il.rtl->global_live_at_start, i))
11518
      return 0;
11519
 
11520
  return 1;
11521
}
11522
 
11523
/* Note hard registers in X that are used.  This code is similar to
11524
   that in flow.c, but much simpler since we don't care about pseudos.  */
11525
 
11526
static void
11527
mark_used_regs_combine (rtx x)
11528
{
11529
  RTX_CODE code = GET_CODE (x);
11530
  unsigned int regno;
11531
  int i;
11532
 
11533
  switch (code)
11534
    {
11535
    case LABEL_REF:
11536
    case SYMBOL_REF:
11537
    case CONST_INT:
11538
    case CONST:
11539
    case CONST_DOUBLE:
11540
    case CONST_VECTOR:
11541
    case PC:
11542
    case ADDR_VEC:
11543
    case ADDR_DIFF_VEC:
11544
    case ASM_INPUT:
11545
#ifdef HAVE_cc0
11546
    /* CC0 must die in the insn after it is set, so we don't need to take
11547
       special note of it here.  */
11548
    case CC0:
11549
#endif
11550
      return;
11551
 
11552
    case CLOBBER:
11553
      /* If we are clobbering a MEM, mark any hard registers inside the
11554
         address as used.  */
11555
      if (MEM_P (XEXP (x, 0)))
11556
        mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
11557
      return;
11558
 
11559
    case REG:
11560
      regno = REGNO (x);
11561
      /* A hard reg in a wide mode may really be multiple registers.
11562
         If so, mark all of them just like the first.  */
11563
      if (regno < FIRST_PSEUDO_REGISTER)
11564
        {
11565
          unsigned int endregno, r;
11566
 
11567
          /* None of this applies to the stack, frame or arg pointers.  */
11568
          if (regno == STACK_POINTER_REGNUM
11569
#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
11570
              || regno == HARD_FRAME_POINTER_REGNUM
11571
#endif
11572
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
11573
              || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
11574
#endif
11575
              || regno == FRAME_POINTER_REGNUM)
11576
            return;
11577
 
11578
          endregno = regno + hard_regno_nregs[regno][GET_MODE (x)];
11579
          for (r = regno; r < endregno; r++)
11580
            SET_HARD_REG_BIT (newpat_used_regs, r);
11581
        }
11582
      return;
11583
 
11584
    case SET:
11585
      {
11586
        /* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
11587
           the address.  */
11588
        rtx testreg = SET_DEST (x);
11589
 
11590
        while (GET_CODE (testreg) == SUBREG
11591
               || GET_CODE (testreg) == ZERO_EXTRACT
11592
               || GET_CODE (testreg) == STRICT_LOW_PART)
11593
          testreg = XEXP (testreg, 0);
11594
 
11595
        if (MEM_P (testreg))
11596
          mark_used_regs_combine (XEXP (testreg, 0));
11597
 
11598
        mark_used_regs_combine (SET_SRC (x));
11599
      }
11600
      return;
11601
 
11602
    default:
11603
      break;
11604
    }
11605
 
11606
  /* Recursively scan the operands of this expression.  */
11607
 
11608
  {
11609
    const char *fmt = GET_RTX_FORMAT (code);
11610
 
11611
    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11612
      {
11613
        if (fmt[i] == 'e')
11614
          mark_used_regs_combine (XEXP (x, i));
11615
        else if (fmt[i] == 'E')
11616
          {
11617
            int j;
11618
 
11619
            for (j = 0; j < XVECLEN (x, i); j++)
11620
              mark_used_regs_combine (XVECEXP (x, i, j));
11621
          }
11622
      }
11623
  }
11624
}
11625
 
11626
/* Remove register number REGNO from the dead registers list of INSN.
11627
 
11628
   Return the note used to record the death, if there was one.  */
11629
 
11630
rtx
11631
remove_death (unsigned int regno, rtx insn)
11632
{
11633
  rtx note = find_regno_note (insn, REG_DEAD, regno);
11634
 
11635
  if (note)
11636
    {
11637
      REG_N_DEATHS (regno)--;
11638
      remove_note (insn, note);
11639
    }
11640
 
11641
  return note;
11642
}
11643
 
11644
/* For each register (hardware or pseudo) used within expression X, if its
11645
   death is in an instruction with cuid between FROM_CUID (inclusive) and
11646
   TO_INSN (exclusive), put a REG_DEAD note for that register in the
11647
   list headed by PNOTES.
11648
 
11649
   That said, don't move registers killed by maybe_kill_insn.
11650
 
11651
   This is done when X is being merged by combination into TO_INSN.  These
11652
   notes will then be distributed as needed.  */
11653
 
11654
static void
11655
move_deaths (rtx x, rtx maybe_kill_insn, int from_cuid, rtx to_insn,
11656
             rtx *pnotes)
11657
{
11658
  const char *fmt;
11659
  int len, i;
11660
  enum rtx_code code = GET_CODE (x);
11661
 
11662
  if (code == REG)
11663
    {
11664
      unsigned int regno = REGNO (x);
11665
      rtx where_dead = reg_stat[regno].last_death;
11666
      rtx before_dead, after_dead;
11667
 
11668
      /* Don't move the register if it gets killed in between from and to.  */
11669
      if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
11670
          && ! reg_referenced_p (x, maybe_kill_insn))
11671
        return;
11672
 
11673
      /* WHERE_DEAD could be a USE insn made by combine, so first we
11674
         make sure that we have insns with valid INSN_CUID values.  */
11675
      before_dead = where_dead;
11676
      while (before_dead && INSN_UID (before_dead) > max_uid_cuid)
11677
        before_dead = PREV_INSN (before_dead);
11678
 
11679
      after_dead = where_dead;
11680
      while (after_dead && INSN_UID (after_dead) > max_uid_cuid)
11681
        after_dead = NEXT_INSN (after_dead);
11682
 
11683
      if (before_dead && after_dead
11684
          && INSN_CUID (before_dead) >= from_cuid
11685
          && (INSN_CUID (after_dead) < INSN_CUID (to_insn)
11686
              || (where_dead != after_dead
11687
                  && INSN_CUID (after_dead) == INSN_CUID (to_insn))))
11688
        {
11689
          rtx note = remove_death (regno, where_dead);
11690
 
11691
          /* It is possible for the call above to return 0.  This can occur
11692
             when last_death points to I2 or I1 that we combined with.
11693
             In that case make a new note.
11694
 
11695
             We must also check for the case where X is a hard register
11696
             and NOTE is a death note for a range of hard registers
11697
             including X.  In that case, we must put REG_DEAD notes for
11698
             the remaining registers in place of NOTE.  */
11699
 
11700
          if (note != 0 && regno < FIRST_PSEUDO_REGISTER
11701
              && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
11702
                  > GET_MODE_SIZE (GET_MODE (x))))
11703
            {
11704
              unsigned int deadregno = REGNO (XEXP (note, 0));
11705
              unsigned int deadend
11706
                = (deadregno + hard_regno_nregs[deadregno]
11707
                                               [GET_MODE (XEXP (note, 0))]);
11708
              unsigned int ourend
11709
                = regno + hard_regno_nregs[regno][GET_MODE (x)];
11710
              unsigned int i;
11711
 
11712
              for (i = deadregno; i < deadend; i++)
11713
                if (i < regno || i >= ourend)
11714
                  REG_NOTES (where_dead)
11715
                    = gen_rtx_EXPR_LIST (REG_DEAD,
11716
                                         regno_reg_rtx[i],
11717
                                         REG_NOTES (where_dead));
11718
            }
11719
 
11720
          /* If we didn't find any note, or if we found a REG_DEAD note that
11721
             covers only part of the given reg, and we have a multi-reg hard
11722
             register, then to be safe we must check for REG_DEAD notes
11723
             for each register other than the first.  They could have
11724
             their own REG_DEAD notes lying around.  */
11725
          else if ((note == 0
11726
                    || (note != 0
11727
                        && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
11728
                            < GET_MODE_SIZE (GET_MODE (x)))))
11729
                   && regno < FIRST_PSEUDO_REGISTER
11730
                   && hard_regno_nregs[regno][GET_MODE (x)] > 1)
11731
            {
11732
              unsigned int ourend
11733
                = regno + hard_regno_nregs[regno][GET_MODE (x)];
11734
              unsigned int i, offset;
11735
              rtx oldnotes = 0;
11736
 
11737
              if (note)
11738
                offset = hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))];
11739
              else
11740
                offset = 1;
11741
 
11742
              for (i = regno + offset; i < ourend; i++)
11743
                move_deaths (regno_reg_rtx[i],
11744
                             maybe_kill_insn, from_cuid, to_insn, &oldnotes);
11745
            }
11746
 
11747
          if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
11748
            {
11749
              XEXP (note, 1) = *pnotes;
11750
              *pnotes = note;
11751
            }
11752
          else
11753
            *pnotes = gen_rtx_EXPR_LIST (REG_DEAD, x, *pnotes);
11754
 
11755
          REG_N_DEATHS (regno)++;
11756
        }
11757
 
11758
      return;
11759
    }
11760
 
11761
  else if (GET_CODE (x) == SET)
11762
    {
11763
      rtx dest = SET_DEST (x);
11764
 
11765
      move_deaths (SET_SRC (x), maybe_kill_insn, from_cuid, to_insn, pnotes);
11766
 
11767
      /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
11768
         that accesses one word of a multi-word item, some
11769
         piece of everything register in the expression is used by
11770
         this insn, so remove any old death.  */
11771
      /* ??? So why do we test for equality of the sizes?  */
11772
 
11773
      if (GET_CODE (dest) == ZERO_EXTRACT
11774
          || GET_CODE (dest) == STRICT_LOW_PART
11775
          || (GET_CODE (dest) == SUBREG
11776
              && (((GET_MODE_SIZE (GET_MODE (dest))
11777
                    + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
11778
                  == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
11779
                       + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
11780
        {
11781
          move_deaths (dest, maybe_kill_insn, from_cuid, to_insn, pnotes);
11782
          return;
11783
        }
11784
 
11785
      /* If this is some other SUBREG, we know it replaces the entire
11786
         value, so use that as the destination.  */
11787
      if (GET_CODE (dest) == SUBREG)
11788
        dest = SUBREG_REG (dest);
11789
 
11790
      /* If this is a MEM, adjust deaths of anything used in the address.
11791
         For a REG (the only other possibility), the entire value is
11792
         being replaced so the old value is not used in this insn.  */
11793
 
11794
      if (MEM_P (dest))
11795
        move_deaths (XEXP (dest, 0), maybe_kill_insn, from_cuid,
11796
                     to_insn, pnotes);
11797
      return;
11798
    }
11799
 
11800
  else if (GET_CODE (x) == CLOBBER)
11801
    return;
11802
 
11803
  len = GET_RTX_LENGTH (code);
11804
  fmt = GET_RTX_FORMAT (code);
11805
 
11806
  for (i = 0; i < len; i++)
11807
    {
11808
      if (fmt[i] == 'E')
11809
        {
11810
          int j;
11811
          for (j = XVECLEN (x, i) - 1; j >= 0; j--)
11812
            move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_cuid,
11813
                         to_insn, pnotes);
11814
        }
11815
      else if (fmt[i] == 'e')
11816
        move_deaths (XEXP (x, i), maybe_kill_insn, from_cuid, to_insn, pnotes);
11817
    }
11818
}
11819
 
11820
/* Return 1 if X is the target of a bit-field assignment in BODY, the
11821
   pattern of an insn.  X must be a REG.  */
11822
 
11823
static int
11824
reg_bitfield_target_p (rtx x, rtx body)
11825
{
11826
  int i;
11827
 
11828
  if (GET_CODE (body) == SET)
11829
    {
11830
      rtx dest = SET_DEST (body);
11831
      rtx target;
11832
      unsigned int regno, tregno, endregno, endtregno;
11833
 
11834
      if (GET_CODE (dest) == ZERO_EXTRACT)
11835
        target = XEXP (dest, 0);
11836
      else if (GET_CODE (dest) == STRICT_LOW_PART)
11837
        target = SUBREG_REG (XEXP (dest, 0));
11838
      else
11839
        return 0;
11840
 
11841
      if (GET_CODE (target) == SUBREG)
11842
        target = SUBREG_REG (target);
11843
 
11844
      if (!REG_P (target))
11845
        return 0;
11846
 
11847
      tregno = REGNO (target), regno = REGNO (x);
11848
      if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
11849
        return target == x;
11850
 
11851
      endtregno = tregno + hard_regno_nregs[tregno][GET_MODE (target)];
11852
      endregno = regno + hard_regno_nregs[regno][GET_MODE (x)];
11853
 
11854
      return endregno > tregno && regno < endtregno;
11855
    }
11856
 
11857
  else if (GET_CODE (body) == PARALLEL)
11858
    for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
11859
      if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
11860
        return 1;
11861
 
11862
  return 0;
11863
}
11864
 
11865
/* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
11866
   as appropriate.  I3 and I2 are the insns resulting from the combination
11867
   insns including FROM (I2 may be zero).
11868
 
11869
   ELIM_I2 and ELIM_I1 are either zero or registers that we know will
11870
   not need REG_DEAD notes because they are being substituted for.  This
11871
   saves searching in the most common cases.
11872
 
11873
   Each note in the list is either ignored or placed on some insns, depending
11874
   on the type of note.  */
11875
 
11876
static void
11877
distribute_notes (rtx notes, rtx from_insn, rtx i3, rtx i2, rtx elim_i2,
11878
                  rtx elim_i1)
11879
{
11880
  rtx note, next_note;
11881
  rtx tem;
11882
 
11883
  for (note = notes; note; note = next_note)
11884
    {
11885
      rtx place = 0, place2 = 0;
11886
 
11887
      next_note = XEXP (note, 1);
11888
      switch (REG_NOTE_KIND (note))
11889
        {
11890
        case REG_BR_PROB:
11891
        case REG_BR_PRED:
11892
          /* Doesn't matter much where we put this, as long as it's somewhere.
11893
             It is preferable to keep these notes on branches, which is most
11894
             likely to be i3.  */
11895
          place = i3;
11896
          break;
11897
 
11898
        case REG_VALUE_PROFILE:
11899
          /* Just get rid of this note, as it is unused later anyway.  */
11900
          break;
11901
 
11902
        case REG_NON_LOCAL_GOTO:
11903
          if (JUMP_P (i3))
11904
            place = i3;
11905
          else
11906
            {
11907
              gcc_assert (i2 && JUMP_P (i2));
11908
              place = i2;
11909
            }
11910
          break;
11911
 
11912
        case REG_EH_REGION:
11913
          /* These notes must remain with the call or trapping instruction.  */
11914
          if (CALL_P (i3))
11915
            place = i3;
11916
          else if (i2 && CALL_P (i2))
11917
            place = i2;
11918
          else
11919
            {
11920
              gcc_assert (flag_non_call_exceptions);
11921
              if (may_trap_p (i3))
11922
                place = i3;
11923
              else if (i2 && may_trap_p (i2))
11924
                place = i2;
11925
              /* ??? Otherwise assume we've combined things such that we
11926
                 can now prove that the instructions can't trap.  Drop the
11927
                 note in this case.  */
11928
            }
11929
          break;
11930
 
11931
        case REG_NORETURN:
11932
        case REG_SETJMP:
11933
          /* These notes must remain with the call.  It should not be
11934
             possible for both I2 and I3 to be a call.  */
11935
          if (CALL_P (i3))
11936
            place = i3;
11937
          else
11938
            {
11939
              gcc_assert (i2 && CALL_P (i2));
11940
              place = i2;
11941
            }
11942
          break;
11943
 
11944
        case REG_UNUSED:
11945
          /* Any clobbers for i3 may still exist, and so we must process
11946
             REG_UNUSED notes from that insn.
11947
 
11948
             Any clobbers from i2 or i1 can only exist if they were added by
11949
             recog_for_combine.  In that case, recog_for_combine created the
11950
             necessary REG_UNUSED notes.  Trying to keep any original
11951
             REG_UNUSED notes from these insns can cause incorrect output
11952
             if it is for the same register as the original i3 dest.
11953
             In that case, we will notice that the register is set in i3,
11954
             and then add a REG_UNUSED note for the destination of i3, which
11955
             is wrong.  However, it is possible to have REG_UNUSED notes from
11956
             i2 or i1 for register which were both used and clobbered, so
11957
             we keep notes from i2 or i1 if they will turn into REG_DEAD
11958
             notes.  */
11959
 
11960
          /* If this register is set or clobbered in I3, put the note there
11961
             unless there is one already.  */
11962
          if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
11963
            {
11964
              if (from_insn != i3)
11965
                break;
11966
 
11967
              if (! (REG_P (XEXP (note, 0))
11968
                     ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
11969
                     : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
11970
                place = i3;
11971
            }
11972
          /* Otherwise, if this register is used by I3, then this register
11973
             now dies here, so we must put a REG_DEAD note here unless there
11974
             is one already.  */
11975
          else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
11976
                   && ! (REG_P (XEXP (note, 0))
11977
                         ? find_regno_note (i3, REG_DEAD,
11978
                                            REGNO (XEXP (note, 0)))
11979
                         : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
11980
            {
11981
              PUT_REG_NOTE_KIND (note, REG_DEAD);
11982
              place = i3;
11983
            }
11984
          break;
11985
 
11986
        case REG_EQUAL:
11987
        case REG_EQUIV:
11988
        case REG_NOALIAS:
11989
          /* These notes say something about results of an insn.  We can
11990
             only support them if they used to be on I3 in which case they
11991
             remain on I3.  Otherwise they are ignored.
11992
 
11993
             If the note refers to an expression that is not a constant, we
11994
             must also ignore the note since we cannot tell whether the
11995
             equivalence is still true.  It might be possible to do
11996
             slightly better than this (we only have a problem if I2DEST
11997
             or I1DEST is present in the expression), but it doesn't
11998
             seem worth the trouble.  */
11999
 
12000
          if (from_insn == i3
12001
              && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
12002
            place = i3;
12003
          break;
12004
 
12005
        case REG_INC:
12006
        case REG_NO_CONFLICT:
12007
          /* These notes say something about how a register is used.  They must
12008
             be present on any use of the register in I2 or I3.  */
12009
          if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
12010
            place = i3;
12011
 
12012
          if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
12013
            {
12014
              if (place)
12015
                place2 = i2;
12016
              else
12017
                place = i2;
12018
            }
12019
          break;
12020
 
12021
        case REG_LABEL:
12022
          /* This can show up in several ways -- either directly in the
12023
             pattern, or hidden off in the constant pool with (or without?)
12024
             a REG_EQUAL note.  */
12025
          /* ??? Ignore the without-reg_equal-note problem for now.  */
12026
          if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3))
12027
              || ((tem = find_reg_note (i3, REG_EQUAL, NULL_RTX))
12028
                  && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12029
                  && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0)))
12030
            place = i3;
12031
 
12032
          if (i2
12033
              && (reg_mentioned_p (XEXP (note, 0), PATTERN (i2))
12034
                  || ((tem = find_reg_note (i2, REG_EQUAL, NULL_RTX))
12035
                      && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12036
                      && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0))))
12037
            {
12038
              if (place)
12039
                place2 = i2;
12040
              else
12041
                place = i2;
12042
            }
12043
 
12044
          /* Don't attach REG_LABEL note to a JUMP_INSN.  Add
12045
             a JUMP_LABEL instead or decrement LABEL_NUSES.  */
12046
          if (place && JUMP_P (place))
12047
            {
12048
              rtx label = JUMP_LABEL (place);
12049
 
12050
              if (!label)
12051
                JUMP_LABEL (place) = XEXP (note, 0);
12052
              else
12053
                {
12054
                  gcc_assert (label == XEXP (note, 0));
12055
                  if (LABEL_P (label))
12056
                    LABEL_NUSES (label)--;
12057
                }
12058
              place = 0;
12059
            }
12060
          if (place2 && JUMP_P (place2))
12061
            {
12062
              rtx label = JUMP_LABEL (place2);
12063
 
12064
              if (!label)
12065
                JUMP_LABEL (place2) = XEXP (note, 0);
12066
              else
12067
                {
12068
                  gcc_assert (label == XEXP (note, 0));
12069
                  if (LABEL_P (label))
12070
                    LABEL_NUSES (label)--;
12071
                }
12072
              place2 = 0;
12073
            }
12074
          break;
12075
 
12076
        case REG_NONNEG:
12077
          /* This note says something about the value of a register prior
12078
             to the execution of an insn.  It is too much trouble to see
12079
             if the note is still correct in all situations.  It is better
12080
             to simply delete it.  */
12081
          break;
12082
 
12083
        case REG_RETVAL:
12084
          /* If the insn previously containing this note still exists,
12085
             put it back where it was.  Otherwise move it to the previous
12086
             insn.  Adjust the corresponding REG_LIBCALL note.  */
12087
          if (!NOTE_P (from_insn))
12088
            place = from_insn;
12089
          else
12090
            {
12091
              tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
12092
              place = prev_real_insn (from_insn);
12093
              if (tem && place)
12094
                XEXP (tem, 0) = place;
12095
              /* If we're deleting the last remaining instruction of a
12096
                 libcall sequence, don't add the notes.  */
12097
              else if (XEXP (note, 0) == from_insn)
12098
                tem = place = 0;
12099
              /* Don't add the dangling REG_RETVAL note.  */
12100
              else if (! tem)
12101
                place = 0;
12102
            }
12103
          break;
12104
 
12105
        case REG_LIBCALL:
12106
          /* This is handled similarly to REG_RETVAL.  */
12107
          if (!NOTE_P (from_insn))
12108
            place = from_insn;
12109
          else
12110
            {
12111
              tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
12112
              place = next_real_insn (from_insn);
12113
              if (tem && place)
12114
                XEXP (tem, 0) = place;
12115
              /* If we're deleting the last remaining instruction of a
12116
                 libcall sequence, don't add the notes.  */
12117
              else if (XEXP (note, 0) == from_insn)
12118
                tem = place = 0;
12119
              /* Don't add the dangling REG_LIBCALL note.  */
12120
              else if (! tem)
12121
                place = 0;
12122
            }
12123
          break;
12124
 
12125
        case REG_DEAD:
12126
          /* If we replaced the right hand side of FROM_INSN with a
12127
             REG_EQUAL note, the original use of the dying register
12128
             will not have been combined into I3 and I2.  In such cases,
12129
             FROM_INSN is guaranteed to be the first of the combined
12130
             instructions, so we simply need to search back before
12131
             FROM_INSN for the previous use or set of this register,
12132
             then alter the notes there appropriately.
12133
 
12134
             If the register is used as an input in I3, it dies there.
12135
             Similarly for I2, if it is nonzero and adjacent to I3.
12136
 
12137
             If the register is not used as an input in either I3 or I2
12138
             and it is not one of the registers we were supposed to eliminate,
12139
             there are two possibilities.  We might have a non-adjacent I2
12140
             or we might have somehow eliminated an additional register
12141
             from a computation.  For example, we might have had A & B where
12142
             we discover that B will always be zero.  In this case we will
12143
             eliminate the reference to A.
12144
 
12145
             In both cases, we must search to see if we can find a previous
12146
             use of A and put the death note there.  */
12147
 
12148
          if (from_insn
12149
              && from_insn == i2mod
12150
              && !reg_overlap_mentioned_p (XEXP (note, 0), i2mod_new_rhs))
12151
            tem = from_insn;
12152
          else
12153
            {
12154
              if (from_insn
12155
                  && CALL_P (from_insn)
12156
                  && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
12157
                place = from_insn;
12158
              else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
12159
                place = i3;
12160
              else if (i2 != 0 && next_nonnote_insn (i2) == i3
12161
                       && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12162
                place = i2;
12163
              else if ((rtx_equal_p (XEXP (note, 0), elim_i2)
12164
                        && !(i2mod
12165
                             && reg_overlap_mentioned_p (XEXP (note, 0),
12166
                                                         i2mod_old_rhs)))
12167
                       || rtx_equal_p (XEXP (note, 0), elim_i1))
12168
                break;
12169
              tem = i3;
12170
            }
12171
 
12172
          if (place == 0)
12173
            {
12174
              basic_block bb = this_basic_block;
12175
 
12176
              for (tem = PREV_INSN (tem); place == 0; tem = PREV_INSN (tem))
12177
                {
12178
                  if (! INSN_P (tem))
12179
                    {
12180
                      if (tem == BB_HEAD (bb))
12181
                        break;
12182
                      continue;
12183
                    }
12184
 
12185
                  /* If the register is being set at TEM, see if that is all
12186
                     TEM is doing.  If so, delete TEM.  Otherwise, make this
12187
                     into a REG_UNUSED note instead. Don't delete sets to
12188
                     global register vars.  */
12189
                  if ((REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER
12190
                       || !global_regs[REGNO (XEXP (note, 0))])
12191
                      && reg_set_p (XEXP (note, 0), PATTERN (tem)))
12192
                    {
12193
                      rtx set = single_set (tem);
12194
                      rtx inner_dest = 0;
12195
#ifdef HAVE_cc0
12196
                      rtx cc0_setter = NULL_RTX;
12197
#endif
12198
 
12199
                      if (set != 0)
12200
                        for (inner_dest = SET_DEST (set);
12201
                             (GET_CODE (inner_dest) == STRICT_LOW_PART
12202
                              || GET_CODE (inner_dest) == SUBREG
12203
                              || GET_CODE (inner_dest) == ZERO_EXTRACT);
12204
                             inner_dest = XEXP (inner_dest, 0))
12205
                          ;
12206
 
12207
                      /* Verify that it was the set, and not a clobber that
12208
                         modified the register.
12209
 
12210
                         CC0 targets must be careful to maintain setter/user
12211
                         pairs.  If we cannot delete the setter due to side
12212
                         effects, mark the user with an UNUSED note instead
12213
                         of deleting it.  */
12214
 
12215
                      if (set != 0 && ! side_effects_p (SET_SRC (set))
12216
                          && rtx_equal_p (XEXP (note, 0), inner_dest)
12217
#ifdef HAVE_cc0
12218
                          && (! reg_mentioned_p (cc0_rtx, SET_SRC (set))
12219
                              || ((cc0_setter = prev_cc0_setter (tem)) != NULL
12220
                                  && sets_cc0_p (PATTERN (cc0_setter)) > 0))
12221
#endif
12222
                          )
12223
                        {
12224
                          /* Move the notes and links of TEM elsewhere.
12225
                             This might delete other dead insns recursively.
12226
                             First set the pattern to something that won't use
12227
                             any register.  */
12228
                          rtx old_notes = REG_NOTES (tem);
12229
 
12230
                          PATTERN (tem) = pc_rtx;
12231
                          REG_NOTES (tem) = NULL;
12232
 
12233
                          distribute_notes (old_notes, tem, tem, NULL_RTX,
12234
                                            NULL_RTX, NULL_RTX);
12235
                          distribute_links (LOG_LINKS (tem));
12236
 
12237
                          SET_INSN_DELETED (tem);
12238
 
12239
#ifdef HAVE_cc0
12240
                          /* Delete the setter too.  */
12241
                          if (cc0_setter)
12242
                            {
12243
                              PATTERN (cc0_setter) = pc_rtx;
12244
                              old_notes = REG_NOTES (cc0_setter);
12245
                              REG_NOTES (cc0_setter) = NULL;
12246
 
12247
                              distribute_notes (old_notes, cc0_setter,
12248
                                                cc0_setter, NULL_RTX,
12249
                                                NULL_RTX, NULL_RTX);
12250
                              distribute_links (LOG_LINKS (cc0_setter));
12251
 
12252
                              SET_INSN_DELETED (cc0_setter);
12253
                            }
12254
#endif
12255
                        }
12256
                      else
12257
                        {
12258
                          PUT_REG_NOTE_KIND (note, REG_UNUSED);
12259
 
12260
                          /*  If there isn't already a REG_UNUSED note, put one
12261
                              here.  Do not place a REG_DEAD note, even if
12262
                              the register is also used here; that would not
12263
                              match the algorithm used in lifetime analysis
12264
                              and can cause the consistency check in the
12265
                              scheduler to fail.  */
12266
                          if (! find_regno_note (tem, REG_UNUSED,
12267
                                                 REGNO (XEXP (note, 0))))
12268
                            place = tem;
12269
                          break;
12270
                        }
12271
                    }
12272
                  else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
12273
                           || (CALL_P (tem)
12274
                               && find_reg_fusage (tem, USE, XEXP (note, 0))))
12275
                    {
12276
                      place = tem;
12277
 
12278
                      /* If we are doing a 3->2 combination, and we have a
12279
                         register which formerly died in i3 and was not used
12280
                         by i2, which now no longer dies in i3 and is used in
12281
                         i2 but does not die in i2, and place is between i2
12282
                         and i3, then we may need to move a link from place to
12283
                         i2.  */
12284
                      if (i2 && INSN_UID (place) <= max_uid_cuid
12285
                          && INSN_CUID (place) > INSN_CUID (i2)
12286
                          && from_insn
12287
                          && INSN_CUID (from_insn) > INSN_CUID (i2)
12288
                          && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12289
                        {
12290
                          rtx links = LOG_LINKS (place);
12291
                          LOG_LINKS (place) = 0;
12292
                          distribute_links (links);
12293
                        }
12294
                      break;
12295
                    }
12296
 
12297
                  if (tem == BB_HEAD (bb))
12298
                    break;
12299
                }
12300
 
12301
              /* We haven't found an insn for the death note and it
12302
                 is still a REG_DEAD note, but we have hit the beginning
12303
                 of the block.  If the existing life info says the reg
12304
                 was dead, there's nothing left to do.  Otherwise, we'll
12305
                 need to do a global life update after combine.  */
12306
              if (REG_NOTE_KIND (note) == REG_DEAD && place == 0
12307
                  && REGNO_REG_SET_P (bb->il.rtl->global_live_at_start,
12308
                                      REGNO (XEXP (note, 0))))
12309
                SET_BIT (refresh_blocks, this_basic_block->index);
12310
            }
12311
 
12312
          /* If the register is set or already dead at PLACE, we needn't do
12313
             anything with this note if it is still a REG_DEAD note.
12314
             We check here if it is set at all, not if is it totally replaced,
12315
             which is what `dead_or_set_p' checks, so also check for it being
12316
             set partially.  */
12317
 
12318
          if (place && REG_NOTE_KIND (note) == REG_DEAD)
12319
            {
12320
              unsigned int regno = REGNO (XEXP (note, 0));
12321
 
12322
              /* Similarly, if the instruction on which we want to place
12323
                 the note is a noop, we'll need do a global live update
12324
                 after we remove them in delete_noop_moves.  */
12325
              if (noop_move_p (place))
12326
                SET_BIT (refresh_blocks, this_basic_block->index);
12327
 
12328
              if (dead_or_set_p (place, XEXP (note, 0))
12329
                  || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
12330
                {
12331
                  /* Unless the register previously died in PLACE, clear
12332
                     last_death.  [I no longer understand why this is
12333
                     being done.] */
12334
                  if (reg_stat[regno].last_death != place)
12335
                    reg_stat[regno].last_death = 0;
12336
                  place = 0;
12337
                }
12338
              else
12339
                reg_stat[regno].last_death = place;
12340
 
12341
              /* If this is a death note for a hard reg that is occupying
12342
                 multiple registers, ensure that we are still using all
12343
                 parts of the object.  If we find a piece of the object
12344
                 that is unused, we must arrange for an appropriate REG_DEAD
12345
                 note to be added for it.  However, we can't just emit a USE
12346
                 and tag the note to it, since the register might actually
12347
                 be dead; so we recourse, and the recursive call then finds
12348
                 the previous insn that used this register.  */
12349
 
12350
              if (place && regno < FIRST_PSEUDO_REGISTER
12351
                  && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] > 1)
12352
                {
12353
                  unsigned int endregno
12354
                    = regno + hard_regno_nregs[regno]
12355
                                              [GET_MODE (XEXP (note, 0))];
12356
                  int all_used = 1;
12357
                  unsigned int i;
12358
 
12359
                  for (i = regno; i < endregno; i++)
12360
                    if ((! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
12361
                         && ! find_regno_fusage (place, USE, i))
12362
                        || dead_or_set_regno_p (place, i))
12363
                      all_used = 0;
12364
 
12365
                  if (! all_used)
12366
                    {
12367
                      /* Put only REG_DEAD notes for pieces that are
12368
                         not already dead or set.  */
12369
 
12370
                      for (i = regno; i < endregno;
12371
                           i += hard_regno_nregs[i][reg_raw_mode[i]])
12372
                        {
12373
                          rtx piece = regno_reg_rtx[i];
12374
                          basic_block bb = this_basic_block;
12375
 
12376
                          if (! dead_or_set_p (place, piece)
12377
                              && ! reg_bitfield_target_p (piece,
12378
                                                          PATTERN (place)))
12379
                            {
12380
                              rtx new_note
12381
                                = gen_rtx_EXPR_LIST (REG_DEAD, piece, NULL_RTX);
12382
 
12383
                              distribute_notes (new_note, place, place,
12384
                                                NULL_RTX, NULL_RTX, NULL_RTX);
12385
                            }
12386
                          else if (! refers_to_regno_p (i, i + 1,
12387
                                                        PATTERN (place), 0)
12388
                                   && ! find_regno_fusage (place, USE, i))
12389
                            for (tem = PREV_INSN (place); ;
12390
                                 tem = PREV_INSN (tem))
12391
                              {
12392
                                if (! INSN_P (tem))
12393
                                  {
12394
                                    if (tem == BB_HEAD (bb))
12395
                                      {
12396
                                        SET_BIT (refresh_blocks,
12397
                                                 this_basic_block->index);
12398
                                        break;
12399
                                      }
12400
                                    continue;
12401
                                  }
12402
                                if (dead_or_set_p (tem, piece)
12403
                                    || reg_bitfield_target_p (piece,
12404
                                                              PATTERN (tem)))
12405
                                  {
12406
                                    REG_NOTES (tem)
12407
                                      = gen_rtx_EXPR_LIST (REG_UNUSED, piece,
12408
                                                           REG_NOTES (tem));
12409
                                    break;
12410
                                  }
12411
                              }
12412
 
12413
                        }
12414
 
12415
                      place = 0;
12416
                    }
12417
                }
12418
            }
12419
          break;
12420
 
12421
        default:
12422
          /* Any other notes should not be present at this point in the
12423
             compilation.  */
12424
          gcc_unreachable ();
12425
        }
12426
 
12427
      if (place)
12428
        {
12429
          XEXP (note, 1) = REG_NOTES (place);
12430
          REG_NOTES (place) = note;
12431
        }
12432
      else if ((REG_NOTE_KIND (note) == REG_DEAD
12433
                || REG_NOTE_KIND (note) == REG_UNUSED)
12434
               && REG_P (XEXP (note, 0)))
12435
        REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
12436
 
12437
      if (place2)
12438
        {
12439
          if ((REG_NOTE_KIND (note) == REG_DEAD
12440
               || REG_NOTE_KIND (note) == REG_UNUSED)
12441
              && REG_P (XEXP (note, 0)))
12442
            REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
12443
 
12444
          REG_NOTES (place2) = gen_rtx_fmt_ee (GET_CODE (note),
12445
                                               REG_NOTE_KIND (note),
12446
                                               XEXP (note, 0),
12447
                                               REG_NOTES (place2));
12448
        }
12449
    }
12450
}
12451
 
12452
/* Similarly to above, distribute the LOG_LINKS that used to be present on
12453
   I3, I2, and I1 to new locations.  This is also called to add a link
12454
   pointing at I3 when I3's destination is changed.  */
12455
 
12456
static void
12457
distribute_links (rtx links)
12458
{
12459
  rtx link, next_link;
12460
 
12461
  for (link = links; link; link = next_link)
12462
    {
12463
      rtx place = 0;
12464
      rtx insn;
12465
      rtx set, reg;
12466
 
12467
      next_link = XEXP (link, 1);
12468
 
12469
      /* If the insn that this link points to is a NOTE or isn't a single
12470
         set, ignore it.  In the latter case, it isn't clear what we
12471
         can do other than ignore the link, since we can't tell which
12472
         register it was for.  Such links wouldn't be used by combine
12473
         anyway.
12474
 
12475
         It is not possible for the destination of the target of the link to
12476
         have been changed by combine.  The only potential of this is if we
12477
         replace I3, I2, and I1 by I3 and I2.  But in that case the
12478
         destination of I2 also remains unchanged.  */
12479
 
12480
      if (NOTE_P (XEXP (link, 0))
12481
          || (set = single_set (XEXP (link, 0))) == 0)
12482
        continue;
12483
 
12484
      reg = SET_DEST (set);
12485
      while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
12486
             || GET_CODE (reg) == STRICT_LOW_PART)
12487
        reg = XEXP (reg, 0);
12488
 
12489
      /* A LOG_LINK is defined as being placed on the first insn that uses
12490
         a register and points to the insn that sets the register.  Start
12491
         searching at the next insn after the target of the link and stop
12492
         when we reach a set of the register or the end of the basic block.
12493
 
12494
         Note that this correctly handles the link that used to point from
12495
         I3 to I2.  Also note that not much searching is typically done here
12496
         since most links don't point very far away.  */
12497
 
12498
      for (insn = NEXT_INSN (XEXP (link, 0));
12499
           (insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR
12500
                     || BB_HEAD (this_basic_block->next_bb) != insn));
12501
           insn = NEXT_INSN (insn))
12502
        if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
12503
          {
12504
            if (reg_referenced_p (reg, PATTERN (insn)))
12505
              place = insn;
12506
            break;
12507
          }
12508
        else if (CALL_P (insn)
12509
                 && find_reg_fusage (insn, USE, reg))
12510
          {
12511
            place = insn;
12512
            break;
12513
          }
12514
        else if (INSN_P (insn) && reg_set_p (reg, insn))
12515
          break;
12516
 
12517
      /* If we found a place to put the link, place it there unless there
12518
         is already a link to the same insn as LINK at that point.  */
12519
 
12520
      if (place)
12521
        {
12522
          rtx link2;
12523
 
12524
          for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
12525
            if (XEXP (link2, 0) == XEXP (link, 0))
12526
              break;
12527
 
12528
          if (link2 == 0)
12529
            {
12530
              XEXP (link, 1) = LOG_LINKS (place);
12531
              LOG_LINKS (place) = link;
12532
 
12533
              /* Set added_links_insn to the earliest insn we added a
12534
                 link to.  */
12535
              if (added_links_insn == 0
12536
                  || INSN_CUID (added_links_insn) > INSN_CUID (place))
12537
                added_links_insn = place;
12538
            }
12539
        }
12540
    }
12541
}
12542
 
12543
/* Subroutine of unmentioned_reg_p and callback from for_each_rtx.
12544
   Check whether the expression pointer to by LOC is a register or
12545
   memory, and if so return 1 if it isn't mentioned in the rtx EXPR.
12546
   Otherwise return zero.  */
12547
 
12548
static int
12549
unmentioned_reg_p_1 (rtx *loc, void *expr)
12550
{
12551
  rtx x = *loc;
12552
 
12553
  if (x != NULL_RTX
12554
      && (REG_P (x) || MEM_P (x))
12555
      && ! reg_mentioned_p (x, (rtx) expr))
12556
    return 1;
12557
  return 0;
12558
}
12559
 
12560
/* Check for any register or memory mentioned in EQUIV that is not
12561
   mentioned in EXPR.  This is used to restrict EQUIV to "specializations"
12562
   of EXPR where some registers may have been replaced by constants.  */
12563
 
12564
static bool
12565
unmentioned_reg_p (rtx equiv, rtx expr)
12566
{
12567
  return for_each_rtx (&equiv, unmentioned_reg_p_1, expr);
12568
}
12569
 
12570
/* Compute INSN_CUID for INSN, which is an insn made by combine.  */
12571
 
12572
static int
12573
insn_cuid (rtx insn)
12574
{
12575
  while (insn != 0 && INSN_UID (insn) > max_uid_cuid
12576
         && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE)
12577
    insn = NEXT_INSN (insn);
12578
 
12579
  gcc_assert (INSN_UID (insn) <= max_uid_cuid);
12580
 
12581
  return INSN_CUID (insn);
12582
}
12583
 
12584
void
12585
dump_combine_stats (FILE *file)
12586
{
12587
  fprintf
12588
    (file,
12589
     ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
12590
     combine_attempts, combine_merges, combine_extras, combine_successes);
12591
}
12592
 
12593
void
12594
dump_combine_total_stats (FILE *file)
12595
{
12596
  fprintf
12597
    (file,
12598
     "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
12599
     total_attempts, total_merges, total_extras, total_successes);
12600
}
12601
 
12602
 
12603
static bool
12604
gate_handle_combine (void)
12605
{
12606
  return (optimize > 0);
12607
}
12608
 
12609
/* Try combining insns through substitution.  */
12610
static unsigned int
12611
rest_of_handle_combine (void)
12612
{
12613
  int rebuild_jump_labels_after_combine
12614
    = combine_instructions (get_insns (), max_reg_num ());
12615
 
12616
  /* Combining insns may have turned an indirect jump into a
12617
     direct jump.  Rebuild the JUMP_LABEL fields of jumping
12618
     instructions.  */
12619
  if (rebuild_jump_labels_after_combine)
12620
    {
12621
      timevar_push (TV_JUMP);
12622
      rebuild_jump_labels (get_insns ());
12623
      timevar_pop (TV_JUMP);
12624
 
12625
      delete_dead_jumptables ();
12626
      cleanup_cfg (CLEANUP_EXPENSIVE | CLEANUP_UPDATE_LIFE);
12627
    }
12628
  return 0;
12629
}
12630
 
12631
struct tree_opt_pass pass_combine =
12632
{
12633
  "combine",                            /* name */
12634
  gate_handle_combine,                  /* gate */
12635
  rest_of_handle_combine,               /* execute */
12636
  NULL,                                 /* sub */
12637
  NULL,                                 /* next */
12638
  0,                                    /* static_pass_number */
12639
  TV_COMBINE,                           /* tv_id */
12640
  0,                                    /* properties_required */
12641
  0,                                    /* properties_provided */
12642
  0,                                    /* properties_destroyed */
12643
  0,                                    /* todo_flags_start */
12644
  TODO_dump_func |
12645
  TODO_ggc_collect,                     /* todo_flags_finish */
12646
  'c'                                   /* letter */
12647
};
12648
 

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.