OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-dev/] [or1k-gcc/] [gcc/] [combine.c] - Blame information for rev 738

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 684 jeremybenn
/* Optimize by combining instructions for GNU compiler.
2
   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3
   1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
4
   2011, 2012 Free Software Foundation, Inc.
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify it under
9
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
12
 
13
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14
WARRANTY; without even the implied warranty of MERCHANTABILITY or
15
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16
for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
/* This module is essentially the "combiner" phase of the U. of Arizona
23
   Portable Optimizer, but redone to work on our list-structured
24
   representation for RTL instead of their string representation.
25
 
26
   The LOG_LINKS of each insn identify the most recent assignment
27
   to each REG used in the insn.  It is a list of previous insns,
28
   each of which contains a SET for a REG that is used in this insn
29
   and not used or set in between.  LOG_LINKs never cross basic blocks.
30
   They were set up by the preceding pass (lifetime analysis).
31
 
32
   We try to combine each pair of insns joined by a logical link.
33
   We also try to combine triples of insns A, B and C when
34
   C has a link back to B and B has a link back to A.
35
 
36
   LOG_LINKS does not have links for use of the CC0.  They don't
37
   need to, because the insn that sets the CC0 is always immediately
38
   before the insn that tests it.  So we always regard a branch
39
   insn as having a logical link to the preceding insn.  The same is true
40
   for an insn explicitly using CC0.
41
 
42
   We check (with use_crosses_set_p) to avoid combining in such a way
43
   as to move a computation to a place where its value would be different.
44
 
45
   Combination is done by mathematically substituting the previous
46
   insn(s) values for the regs they set into the expressions in
47
   the later insns that refer to these regs.  If the result is a valid insn
48
   for our target machine, according to the machine description,
49
   we install it, delete the earlier insns, and update the data flow
50
   information (LOG_LINKS and REG_NOTES) for what we did.
51
 
52
   There are a few exceptions where the dataflow information isn't
53
   completely updated (however this is only a local issue since it is
54
   regenerated before the next pass that uses it):
55
 
56
   - reg_live_length is not updated
57
   - reg_n_refs is not adjusted in the rare case when a register is
58
     no longer required in a computation
59
   - there are extremely rare cases (see distribute_notes) when a
60
     REG_DEAD note is lost
61
   - a LOG_LINKS entry that refers to an insn with multiple SETs may be
62
     removed because there is no way to know which register it was
63
     linking
64
 
65
   To simplify substitution, we combine only when the earlier insn(s)
66
   consist of only a single assignment.  To simplify updating afterward,
67
   we never combine when a subroutine call appears in the middle.
68
 
69
   Since we do not represent assignments to CC0 explicitly except when that
70
   is all an insn does, there is no LOG_LINKS entry in an insn that uses
71
   the condition code for the insn that set the condition code.
72
   Fortunately, these two insns must be consecutive.
73
   Therefore, every JUMP_INSN is taken to have an implicit logical link
74
   to the preceding insn.  This is not quite right, since non-jumps can
75
   also use the condition code; but in practice such insns would not
76
   combine anyway.  */
77
 
78
#include "config.h"
79
#include "system.h"
80
#include "coretypes.h"
81
#include "tm.h"
82
#include "rtl.h"
83
#include "tree.h"
84
#include "tm_p.h"
85
#include "flags.h"
86
#include "regs.h"
87
#include "hard-reg-set.h"
88
#include "basic-block.h"
89
#include "insn-config.h"
90
#include "function.h"
91
/* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
92
#include "expr.h"
93
#include "insn-attr.h"
94
#include "recog.h"
95
#include "diagnostic-core.h"
96
#include "target.h"
97
#include "optabs.h"
98
#include "insn-codes.h"
99
#include "rtlhooks-def.h"
100
/* Include output.h for dump_file.  */
101
#include "output.h"
102
#include "params.h"
103
#include "timevar.h"
104
#include "tree-pass.h"
105
#include "df.h"
106
#include "cgraph.h"
107
#include "obstack.h"
108
 
109
/* Number of attempts to combine instructions in this function.  */
110
 
111
static int combine_attempts;
112
 
113
/* Number of attempts that got as far as substitution in this function.  */
114
 
115
static int combine_merges;
116
 
117
/* Number of instructions combined with added SETs in this function.  */
118
 
119
static int combine_extras;
120
 
121
/* Number of instructions combined in this function.  */
122
 
123
static int combine_successes;
124
 
125
/* Totals over entire compilation.  */
126
 
127
static int total_attempts, total_merges, total_extras, total_successes;
128
 
129
/* combine_instructions may try to replace the right hand side of the
130
   second instruction with the value of an associated REG_EQUAL note
131
   before throwing it at try_combine.  That is problematic when there
132
   is a REG_DEAD note for a register used in the old right hand side
133
   and can cause distribute_notes to do wrong things.  This is the
134
   second instruction if it has been so modified, null otherwise.  */
135
 
136
static rtx i2mod;
137
 
138
/* When I2MOD is nonnull, this is a copy of the old right hand side.  */
139
 
140
static rtx i2mod_old_rhs;
141
 
142
/* When I2MOD is nonnull, this is a copy of the new right hand side.  */
143
 
144
static rtx i2mod_new_rhs;
145
 
146
typedef struct reg_stat_struct {
147
  /* Record last point of death of (hard or pseudo) register n.  */
148
  rtx                           last_death;
149
 
150
  /* Record last point of modification of (hard or pseudo) register n.  */
151
  rtx                           last_set;
152
 
153
  /* The next group of fields allows the recording of the last value assigned
154
     to (hard or pseudo) register n.  We use this information to see if an
155
     operation being processed is redundant given a prior operation performed
156
     on the register.  For example, an `and' with a constant is redundant if
157
     all the zero bits are already known to be turned off.
158
 
159
     We use an approach similar to that used by cse, but change it in the
160
     following ways:
161
 
162
     (1) We do not want to reinitialize at each label.
163
     (2) It is useful, but not critical, to know the actual value assigned
164
         to a register.  Often just its form is helpful.
165
 
166
     Therefore, we maintain the following fields:
167
 
168
     last_set_value             the last value assigned
169
     last_set_label             records the value of label_tick when the
170
                                register was assigned
171
     last_set_table_tick        records the value of label_tick when a
172
                                value using the register is assigned
173
     last_set_invalid           set to nonzero when it is not valid
174
                                to use the value of this register in some
175
                                register's value
176
 
177
     To understand the usage of these tables, it is important to understand
178
     the distinction between the value in last_set_value being valid and
179
     the register being validly contained in some other expression in the
180
     table.
181
 
182
     (The next two parameters are out of date).
183
 
184
     reg_stat[i].last_set_value is valid if it is nonzero, and either
185
     reg_n_sets[i] is 1 or reg_stat[i].last_set_label == label_tick.
186
 
187
     Register I may validly appear in any expression returned for the value
188
     of another register if reg_n_sets[i] is 1.  It may also appear in the
189
     value for register J if reg_stat[j].last_set_invalid is zero, or
190
     reg_stat[i].last_set_label < reg_stat[j].last_set_label.
191
 
192
     If an expression is found in the table containing a register which may
193
     not validly appear in an expression, the register is replaced by
194
     something that won't match, (clobber (const_int 0)).  */
195
 
196
  /* Record last value assigned to (hard or pseudo) register n.  */
197
 
198
  rtx                           last_set_value;
199
 
200
  /* Record the value of label_tick when an expression involving register n
201
     is placed in last_set_value.  */
202
 
203
  int                           last_set_table_tick;
204
 
205
  /* Record the value of label_tick when the value for register n is placed in
206
     last_set_value.  */
207
 
208
  int                           last_set_label;
209
 
210
  /* These fields are maintained in parallel with last_set_value and are
211
     used to store the mode in which the register was last set, the bits
212
     that were known to be zero when it was last set, and the number of
213
     sign bits copies it was known to have when it was last set.  */
214
 
215
  unsigned HOST_WIDE_INT        last_set_nonzero_bits;
216
  char                          last_set_sign_bit_copies;
217
  ENUM_BITFIELD(machine_mode)   last_set_mode : 8;
218
 
219
  /* Set nonzero if references to register n in expressions should not be
220
     used.  last_set_invalid is set nonzero when this register is being
221
     assigned to and last_set_table_tick == label_tick.  */
222
 
223
  char                          last_set_invalid;
224
 
225
  /* Some registers that are set more than once and used in more than one
226
     basic block are nevertheless always set in similar ways.  For example,
227
     a QImode register may be loaded from memory in two places on a machine
228
     where byte loads zero extend.
229
 
230
     We record in the following fields if a register has some leading bits
231
     that are always equal to the sign bit, and what we know about the
232
     nonzero bits of a register, specifically which bits are known to be
233
     zero.
234
 
235
     If an entry is zero, it means that we don't know anything special.  */
236
 
237
  unsigned char                 sign_bit_copies;
238
 
239
  unsigned HOST_WIDE_INT        nonzero_bits;
240
 
241
  /* Record the value of the label_tick when the last truncation
242
     happened.  The field truncated_to_mode is only valid if
243
     truncation_label == label_tick.  */
244
 
245
  int                           truncation_label;
246
 
247
  /* Record the last truncation seen for this register.  If truncation
248
     is not a nop to this mode we might be able to save an explicit
249
     truncation if we know that value already contains a truncated
250
     value.  */
251
 
252
  ENUM_BITFIELD(machine_mode)   truncated_to_mode : 8;
253
} reg_stat_type;
254
 
255
DEF_VEC_O(reg_stat_type);
256
DEF_VEC_ALLOC_O(reg_stat_type,heap);
257
 
258
static VEC(reg_stat_type,heap) *reg_stat;
259
 
260
/* Record the luid of the last insn that invalidated memory
261
   (anything that writes memory, and subroutine calls, but not pushes).  */
262
 
263
static int mem_last_set;
264
 
265
/* Record the luid of the last CALL_INSN
266
   so we can tell whether a potential combination crosses any calls.  */
267
 
268
static int last_call_luid;
269
 
270
/* When `subst' is called, this is the insn that is being modified
271
   (by combining in a previous insn).  The PATTERN of this insn
272
   is still the old pattern partially modified and it should not be
273
   looked at, but this may be used to examine the successors of the insn
274
   to judge whether a simplification is valid.  */
275
 
276
static rtx subst_insn;
277
 
278
/* This is the lowest LUID that `subst' is currently dealing with.
279
   get_last_value will not return a value if the register was set at or
280
   after this LUID.  If not for this mechanism, we could get confused if
281
   I2 or I1 in try_combine were an insn that used the old value of a register
282
   to obtain a new value.  In that case, we might erroneously get the
283
   new value of the register when we wanted the old one.  */
284
 
285
static int subst_low_luid;
286
 
287
/* This contains any hard registers that are used in newpat; reg_dead_at_p
288
   must consider all these registers to be always live.  */
289
 
290
static HARD_REG_SET newpat_used_regs;
291
 
292
/* This is an insn to which a LOG_LINKS entry has been added.  If this
293
   insn is the earlier than I2 or I3, combine should rescan starting at
294
   that location.  */
295
 
296
static rtx added_links_insn;
297
 
298
/* Basic block in which we are performing combines.  */
299
static basic_block this_basic_block;
300
static bool optimize_this_for_speed_p;
301
 
302
 
303
/* Length of the currently allocated uid_insn_cost array.  */
304
 
305
static int max_uid_known;
306
 
307
/* The following array records the insn_rtx_cost for every insn
308
   in the instruction stream.  */
309
 
310
static int *uid_insn_cost;
311
 
312
/* The following array records the LOG_LINKS for every insn in the
313
   instruction stream as struct insn_link pointers.  */
314
 
315
struct insn_link {
316
  rtx insn;
317
  struct insn_link *next;
318
};
319
 
320
static struct insn_link **uid_log_links;
321
 
322
#define INSN_COST(INSN)         (uid_insn_cost[INSN_UID (INSN)])
323
#define LOG_LINKS(INSN)         (uid_log_links[INSN_UID (INSN)])
324
 
325
#define FOR_EACH_LOG_LINK(L, INSN)                              \
326
  for ((L) = LOG_LINKS (INSN); (L); (L) = (L)->next)
327
 
328
/* Links for LOG_LINKS are allocated from this obstack.  */
329
 
330
static struct obstack insn_link_obstack;
331
 
332
/* Allocate a link.  */
333
 
334
static inline struct insn_link *
335
alloc_insn_link (rtx insn, struct insn_link *next)
336
{
337
  struct insn_link *l
338
    = (struct insn_link *) obstack_alloc (&insn_link_obstack,
339
                                          sizeof (struct insn_link));
340
  l->insn = insn;
341
  l->next = next;
342
  return l;
343
}
344
 
345
/* Incremented for each basic block.  */
346
 
347
static int label_tick;
348
 
349
/* Reset to label_tick for each extended basic block in scanning order.  */
350
 
351
static int label_tick_ebb_start;
352
 
353
/* Mode used to compute significance in reg_stat[].nonzero_bits.  It is the
354
   largest integer mode that can fit in HOST_BITS_PER_WIDE_INT.  */
355
 
356
static enum machine_mode nonzero_bits_mode;
357
 
358
/* Nonzero when reg_stat[].nonzero_bits and reg_stat[].sign_bit_copies can
359
   be safely used.  It is zero while computing them and after combine has
360
   completed.  This former test prevents propagating values based on
361
   previously set values, which can be incorrect if a variable is modified
362
   in a loop.  */
363
 
364
static int nonzero_sign_valid;
365
 
366
 
367
/* Record one modification to rtl structure
368
   to be undone by storing old_contents into *where.  */
369
 
370
enum undo_kind { UNDO_RTX, UNDO_INT, UNDO_MODE, UNDO_LINKS };
371
 
372
struct undo
373
{
374
  struct undo *next;
375
  enum undo_kind kind;
376
  union { rtx r; int i; enum machine_mode m; struct insn_link *l; } old_contents;
377
  union { rtx *r; int *i; struct insn_link **l; } where;
378
};
379
 
380
/* Record a bunch of changes to be undone, up to MAX_UNDO of them.
381
   num_undo says how many are currently recorded.
382
 
383
   other_insn is nonzero if we have modified some other insn in the process
384
   of working on subst_insn.  It must be verified too.  */
385
 
386
struct undobuf
387
{
388
  struct undo *undos;
389
  struct undo *frees;
390
  rtx other_insn;
391
};
392
 
393
static struct undobuf undobuf;
394
 
395
/* Number of times the pseudo being substituted for
396
   was found and replaced.  */
397
 
398
static int n_occurrences;
399
 
400
static rtx reg_nonzero_bits_for_combine (const_rtx, enum machine_mode, const_rtx,
401
                                         enum machine_mode,
402
                                         unsigned HOST_WIDE_INT,
403
                                         unsigned HOST_WIDE_INT *);
404
static rtx reg_num_sign_bit_copies_for_combine (const_rtx, enum machine_mode, const_rtx,
405
                                                enum machine_mode,
406
                                                unsigned int, unsigned int *);
407
static void do_SUBST (rtx *, rtx);
408
static void do_SUBST_INT (int *, int);
409
static void init_reg_last (void);
410
static void setup_incoming_promotions (rtx);
411
static void set_nonzero_bits_and_sign_copies (rtx, const_rtx, void *);
412
static int cant_combine_insn_p (rtx);
413
static int can_combine_p (rtx, rtx, rtx, rtx, rtx, rtx, rtx *, rtx *);
414
static int combinable_i3pat (rtx, rtx *, rtx, rtx, rtx, int, int, rtx *);
415
static int contains_muldiv (rtx);
416
static rtx try_combine (rtx, rtx, rtx, rtx, int *, rtx);
417
static void undo_all (void);
418
static void undo_commit (void);
419
static rtx *find_split_point (rtx *, rtx, bool);
420
static rtx subst (rtx, rtx, rtx, int, int, int);
421
static rtx combine_simplify_rtx (rtx, enum machine_mode, int, int);
422
static rtx simplify_if_then_else (rtx);
423
static rtx simplify_set (rtx);
424
static rtx simplify_logical (rtx);
425
static rtx expand_compound_operation (rtx);
426
static const_rtx expand_field_assignment (const_rtx);
427
static rtx make_extraction (enum machine_mode, rtx, HOST_WIDE_INT,
428
                            rtx, unsigned HOST_WIDE_INT, int, int, int);
429
static rtx extract_left_shift (rtx, int);
430
static rtx make_compound_operation (rtx, enum rtx_code);
431
static int get_pos_from_mask (unsigned HOST_WIDE_INT,
432
                              unsigned HOST_WIDE_INT *);
433
static rtx canon_reg_for_combine (rtx, rtx);
434
static rtx force_to_mode (rtx, enum machine_mode,
435
                          unsigned HOST_WIDE_INT, int);
436
static rtx if_then_else_cond (rtx, rtx *, rtx *);
437
static rtx known_cond (rtx, enum rtx_code, rtx, rtx);
438
static int rtx_equal_for_field_assignment_p (rtx, rtx);
439
static rtx make_field_assignment (rtx);
440
static rtx apply_distributive_law (rtx);
441
static rtx distribute_and_simplify_rtx (rtx, int);
442
static rtx simplify_and_const_int_1 (enum machine_mode, rtx,
443
                                     unsigned HOST_WIDE_INT);
444
static rtx simplify_and_const_int (rtx, enum machine_mode, rtx,
445
                                   unsigned HOST_WIDE_INT);
446
static int merge_outer_ops (enum rtx_code *, HOST_WIDE_INT *, enum rtx_code,
447
                            HOST_WIDE_INT, enum machine_mode, int *);
448
static rtx simplify_shift_const_1 (enum rtx_code, enum machine_mode, rtx, int);
449
static rtx simplify_shift_const (rtx, enum rtx_code, enum machine_mode, rtx,
450
                                 int);
451
static int recog_for_combine (rtx *, rtx, rtx *);
452
static rtx gen_lowpart_for_combine (enum machine_mode, rtx);
453
static enum rtx_code simplify_compare_const (enum rtx_code, rtx, rtx *);
454
static enum rtx_code simplify_comparison (enum rtx_code, rtx *, rtx *);
455
static void update_table_tick (rtx);
456
static void record_value_for_reg (rtx, rtx, rtx);
457
static void check_promoted_subreg (rtx, rtx);
458
static void record_dead_and_set_regs_1 (rtx, const_rtx, void *);
459
static void record_dead_and_set_regs (rtx);
460
static int get_last_value_validate (rtx *, rtx, int, int);
461
static rtx get_last_value (const_rtx);
462
static int use_crosses_set_p (const_rtx, int);
463
static void reg_dead_at_p_1 (rtx, const_rtx, void *);
464
static int reg_dead_at_p (rtx, rtx);
465
static void move_deaths (rtx, rtx, int, rtx, rtx *);
466
static int reg_bitfield_target_p (rtx, rtx);
467
static void distribute_notes (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
468
static void distribute_links (struct insn_link *);
469
static void mark_used_regs_combine (rtx);
470
static void record_promoted_value (rtx, rtx);
471
static int unmentioned_reg_p_1 (rtx *, void *);
472
static bool unmentioned_reg_p (rtx, rtx);
473
static int record_truncated_value (rtx *, void *);
474
static void record_truncated_values (rtx *, void *);
475
static bool reg_truncated_to_mode (enum machine_mode, const_rtx);
476
static rtx gen_lowpart_or_truncate (enum machine_mode, rtx);
477
 
478
 
479
/* It is not safe to use ordinary gen_lowpart in combine.
480
   See comments in gen_lowpart_for_combine.  */
481
#undef RTL_HOOKS_GEN_LOWPART
482
#define RTL_HOOKS_GEN_LOWPART              gen_lowpart_for_combine
483
 
484
/* Our implementation of gen_lowpart never emits a new pseudo.  */
485
#undef RTL_HOOKS_GEN_LOWPART_NO_EMIT
486
#define RTL_HOOKS_GEN_LOWPART_NO_EMIT      gen_lowpart_for_combine
487
 
488
#undef RTL_HOOKS_REG_NONZERO_REG_BITS
489
#define RTL_HOOKS_REG_NONZERO_REG_BITS     reg_nonzero_bits_for_combine
490
 
491
#undef RTL_HOOKS_REG_NUM_SIGN_BIT_COPIES
492
#define RTL_HOOKS_REG_NUM_SIGN_BIT_COPIES  reg_num_sign_bit_copies_for_combine
493
 
494
#undef RTL_HOOKS_REG_TRUNCATED_TO_MODE
495
#define RTL_HOOKS_REG_TRUNCATED_TO_MODE    reg_truncated_to_mode
496
 
497
static const struct rtl_hooks combine_rtl_hooks = RTL_HOOKS_INITIALIZER;
498
 
499
 
500
/* Try to split PATTERN found in INSN.  This returns NULL_RTX if
501
   PATTERN can not be split.  Otherwise, it returns an insn sequence.
502
   This is a wrapper around split_insns which ensures that the
503
   reg_stat vector is made larger if the splitter creates a new
504
   register.  */
505
 
506
static rtx
507
combine_split_insns (rtx pattern, rtx insn)
508
{
509
  rtx ret;
510
  unsigned int nregs;
511
 
512
  ret = split_insns (pattern, insn);
513
  nregs = max_reg_num ();
514
  if (nregs > VEC_length (reg_stat_type, reg_stat))
515
    VEC_safe_grow_cleared (reg_stat_type, heap, reg_stat, nregs);
516
  return ret;
517
}
518
 
519
/* This is used by find_single_use to locate an rtx in LOC that
520
   contains exactly one use of DEST, which is typically either a REG
521
   or CC0.  It returns a pointer to the innermost rtx expression
522
   containing DEST.  Appearances of DEST that are being used to
523
   totally replace it are not counted.  */
524
 
525
static rtx *
526
find_single_use_1 (rtx dest, rtx *loc)
527
{
528
  rtx x = *loc;
529
  enum rtx_code code = GET_CODE (x);
530
  rtx *result = NULL;
531
  rtx *this_result;
532
  int i;
533
  const char *fmt;
534
 
535
  switch (code)
536
    {
537
    case CONST_INT:
538
    case CONST:
539
    case LABEL_REF:
540
    case SYMBOL_REF:
541
    case CONST_DOUBLE:
542
    case CONST_VECTOR:
543
    case CLOBBER:
544
      return 0;
545
 
546
    case SET:
547
      /* If the destination is anything other than CC0, PC, a REG or a SUBREG
548
         of a REG that occupies all of the REG, the insn uses DEST if
549
         it is mentioned in the destination or the source.  Otherwise, we
550
         need just check the source.  */
551
      if (GET_CODE (SET_DEST (x)) != CC0
552
          && GET_CODE (SET_DEST (x)) != PC
553
          && !REG_P (SET_DEST (x))
554
          && ! (GET_CODE (SET_DEST (x)) == SUBREG
555
                && REG_P (SUBREG_REG (SET_DEST (x)))
556
                && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
557
                      + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
558
                    == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
559
                         + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
560
        break;
561
 
562
      return find_single_use_1 (dest, &SET_SRC (x));
563
 
564
    case MEM:
565
    case SUBREG:
566
      return find_single_use_1 (dest, &XEXP (x, 0));
567
 
568
    default:
569
      break;
570
    }
571
 
572
  /* If it wasn't one of the common cases above, check each expression and
573
     vector of this code.  Look for a unique usage of DEST.  */
574
 
575
  fmt = GET_RTX_FORMAT (code);
576
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
577
    {
578
      if (fmt[i] == 'e')
579
        {
580
          if (dest == XEXP (x, i)
581
              || (REG_P (dest) && REG_P (XEXP (x, i))
582
                  && REGNO (dest) == REGNO (XEXP (x, i))))
583
            this_result = loc;
584
          else
585
            this_result = find_single_use_1 (dest, &XEXP (x, i));
586
 
587
          if (result == NULL)
588
            result = this_result;
589
          else if (this_result)
590
            /* Duplicate usage.  */
591
            return NULL;
592
        }
593
      else if (fmt[i] == 'E')
594
        {
595
          int j;
596
 
597
          for (j = XVECLEN (x, i) - 1; j >= 0; j--)
598
            {
599
              if (XVECEXP (x, i, j) == dest
600
                  || (REG_P (dest)
601
                      && REG_P (XVECEXP (x, i, j))
602
                      && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
603
                this_result = loc;
604
              else
605
                this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
606
 
607
              if (result == NULL)
608
                result = this_result;
609
              else if (this_result)
610
                return NULL;
611
            }
612
        }
613
    }
614
 
615
  return result;
616
}
617
 
618
 
619
/* See if DEST, produced in INSN, is used only a single time in the
620
   sequel.  If so, return a pointer to the innermost rtx expression in which
621
   it is used.
622
 
623
   If PLOC is nonzero, *PLOC is set to the insn containing the single use.
624
 
625
   If DEST is cc0_rtx, we look only at the next insn.  In that case, we don't
626
   care about REG_DEAD notes or LOG_LINKS.
627
 
628
   Otherwise, we find the single use by finding an insn that has a
629
   LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST.  If DEST is
630
   only referenced once in that insn, we know that it must be the first
631
   and last insn referencing DEST.  */
632
 
633
static rtx *
634
find_single_use (rtx dest, rtx insn, rtx *ploc)
635
{
636
  basic_block bb;
637
  rtx next;
638
  rtx *result;
639
  struct insn_link *link;
640
 
641
#ifdef HAVE_cc0
642
  if (dest == cc0_rtx)
643
    {
644
      next = NEXT_INSN (insn);
645
      if (next == 0
646
          || (!NONJUMP_INSN_P (next) && !JUMP_P (next)))
647
        return 0;
648
 
649
      result = find_single_use_1 (dest, &PATTERN (next));
650
      if (result && ploc)
651
        *ploc = next;
652
      return result;
653
    }
654
#endif
655
 
656
  if (!REG_P (dest))
657
    return 0;
658
 
659
  bb = BLOCK_FOR_INSN (insn);
660
  for (next = NEXT_INSN (insn);
661
       next && BLOCK_FOR_INSN (next) == bb;
662
       next = NEXT_INSN (next))
663
    if (INSN_P (next) && dead_or_set_p (next, dest))
664
      {
665
        FOR_EACH_LOG_LINK (link, next)
666
          if (link->insn == insn)
667
            break;
668
 
669
        if (link)
670
          {
671
            result = find_single_use_1 (dest, &PATTERN (next));
672
            if (ploc)
673
              *ploc = next;
674
            return result;
675
          }
676
      }
677
 
678
  return 0;
679
}
680
 
681
/* Substitute NEWVAL, an rtx expression, into INTO, a place in some
682
   insn.  The substitution can be undone by undo_all.  If INTO is already
683
   set to NEWVAL, do not record this change.  Because computing NEWVAL might
684
   also call SUBST, we have to compute it before we put anything into
685
   the undo table.  */
686
 
687
static void
688
do_SUBST (rtx *into, rtx newval)
689
{
690
  struct undo *buf;
691
  rtx oldval = *into;
692
 
693
  if (oldval == newval)
694
    return;
695
 
696
  /* We'd like to catch as many invalid transformations here as
697
     possible.  Unfortunately, there are way too many mode changes
698
     that are perfectly valid, so we'd waste too much effort for
699
     little gain doing the checks here.  Focus on catching invalid
700
     transformations involving integer constants.  */
701
  if (GET_MODE_CLASS (GET_MODE (oldval)) == MODE_INT
702
      && CONST_INT_P (newval))
703
    {
704
      /* Sanity check that we're replacing oldval with a CONST_INT
705
         that is a valid sign-extension for the original mode.  */
706
      gcc_assert (INTVAL (newval)
707
                  == trunc_int_for_mode (INTVAL (newval), GET_MODE (oldval)));
708
 
709
      /* Replacing the operand of a SUBREG or a ZERO_EXTEND with a
710
         CONST_INT is not valid, because after the replacement, the
711
         original mode would be gone.  Unfortunately, we can't tell
712
         when do_SUBST is called to replace the operand thereof, so we
713
         perform this test on oldval instead, checking whether an
714
         invalid replacement took place before we got here.  */
715
      gcc_assert (!(GET_CODE (oldval) == SUBREG
716
                    && CONST_INT_P (SUBREG_REG (oldval))));
717
      gcc_assert (!(GET_CODE (oldval) == ZERO_EXTEND
718
                    && CONST_INT_P (XEXP (oldval, 0))));
719
    }
720
 
721
  if (undobuf.frees)
722
    buf = undobuf.frees, undobuf.frees = buf->next;
723
  else
724
    buf = XNEW (struct undo);
725
 
726
  buf->kind = UNDO_RTX;
727
  buf->where.r = into;
728
  buf->old_contents.r = oldval;
729
  *into = newval;
730
 
731
  buf->next = undobuf.undos, undobuf.undos = buf;
732
}
733
 
734
#define SUBST(INTO, NEWVAL)     do_SUBST(&(INTO), (NEWVAL))
735
 
736
/* Similar to SUBST, but NEWVAL is an int expression.  Note that substitution
737
   for the value of a HOST_WIDE_INT value (including CONST_INT) is
738
   not safe.  */
739
 
740
static void
741
do_SUBST_INT (int *into, int newval)
742
{
743
  struct undo *buf;
744
  int oldval = *into;
745
 
746
  if (oldval == newval)
747
    return;
748
 
749
  if (undobuf.frees)
750
    buf = undobuf.frees, undobuf.frees = buf->next;
751
  else
752
    buf = XNEW (struct undo);
753
 
754
  buf->kind = UNDO_INT;
755
  buf->where.i = into;
756
  buf->old_contents.i = oldval;
757
  *into = newval;
758
 
759
  buf->next = undobuf.undos, undobuf.undos = buf;
760
}
761
 
762
#define SUBST_INT(INTO, NEWVAL)  do_SUBST_INT(&(INTO), (NEWVAL))
763
 
764
/* Similar to SUBST, but just substitute the mode.  This is used when
765
   changing the mode of a pseudo-register, so that any other
766
   references to the entry in the regno_reg_rtx array will change as
767
   well.  */
768
 
769
static void
770
do_SUBST_MODE (rtx *into, enum machine_mode newval)
771
{
772
  struct undo *buf;
773
  enum machine_mode oldval = GET_MODE (*into);
774
 
775
  if (oldval == newval)
776
    return;
777
 
778
  if (undobuf.frees)
779
    buf = undobuf.frees, undobuf.frees = buf->next;
780
  else
781
    buf = XNEW (struct undo);
782
 
783
  buf->kind = UNDO_MODE;
784
  buf->where.r = into;
785
  buf->old_contents.m = oldval;
786
  adjust_reg_mode (*into, newval);
787
 
788
  buf->next = undobuf.undos, undobuf.undos = buf;
789
}
790
 
791
#define SUBST_MODE(INTO, NEWVAL)  do_SUBST_MODE(&(INTO), (NEWVAL))
792
 
793
#ifndef HAVE_cc0
794
/* Similar to SUBST, but NEWVAL is a LOG_LINKS expression.  */
795
 
796
static void
797
do_SUBST_LINK (struct insn_link **into, struct insn_link *newval)
798
{
799
  struct undo *buf;
800
  struct insn_link * oldval = *into;
801
 
802
  if (oldval == newval)
803
    return;
804
 
805
  if (undobuf.frees)
806
    buf = undobuf.frees, undobuf.frees = buf->next;
807
  else
808
    buf = XNEW (struct undo);
809
 
810
  buf->kind = UNDO_LINKS;
811
  buf->where.l = into;
812
  buf->old_contents.l = oldval;
813
  *into = newval;
814
 
815
  buf->next = undobuf.undos, undobuf.undos = buf;
816
}
817
 
818
#define SUBST_LINK(oldval, newval) do_SUBST_LINK (&oldval, newval)
819
#endif
820
 
821
/* Subroutine of try_combine.  Determine whether the replacement patterns
822
   NEWPAT, NEWI2PAT and NEWOTHERPAT are cheaper according to insn_rtx_cost
823
   than the original sequence I0, I1, I2, I3 and undobuf.other_insn.  Note
824
   that I0, I1 and/or NEWI2PAT may be NULL_RTX.  Similarly, NEWOTHERPAT and
825
   undobuf.other_insn may also both be NULL_RTX.  Return false if the cost
826
   of all the instructions can be estimated and the replacements are more
827
   expensive than the original sequence.  */
828
 
829
static bool
830
combine_validate_cost (rtx i0, rtx i1, rtx i2, rtx i3, rtx newpat,
831
                       rtx newi2pat, rtx newotherpat)
832
{
833
  int i0_cost, i1_cost, i2_cost, i3_cost;
834
  int new_i2_cost, new_i3_cost;
835
  int old_cost, new_cost;
836
 
837
  /* Lookup the original insn_rtx_costs.  */
838
  i2_cost = INSN_COST (i2);
839
  i3_cost = INSN_COST (i3);
840
 
841
  if (i1)
842
    {
843
      i1_cost = INSN_COST (i1);
844
      if (i0)
845
        {
846
          i0_cost = INSN_COST (i0);
847
          old_cost = (i0_cost > 0 && i1_cost > 0 && i2_cost > 0 && i3_cost > 0
848
                      ? i0_cost + i1_cost + i2_cost + i3_cost : 0);
849
        }
850
      else
851
        {
852
          old_cost = (i1_cost > 0 && i2_cost > 0 && i3_cost > 0
853
                      ? i1_cost + i2_cost + i3_cost : 0);
854
          i0_cost = 0;
855
        }
856
    }
857
  else
858
    {
859
      old_cost = (i2_cost > 0 && i3_cost > 0) ? i2_cost + i3_cost : 0;
860
      i1_cost = i0_cost = 0;
861
    }
862
 
863
  /* Calculate the replacement insn_rtx_costs.  */
864
  new_i3_cost = insn_rtx_cost (newpat, optimize_this_for_speed_p);
865
  if (newi2pat)
866
    {
867
      new_i2_cost = insn_rtx_cost (newi2pat, optimize_this_for_speed_p);
868
      new_cost = (new_i2_cost > 0 && new_i3_cost > 0)
869
                 ? new_i2_cost + new_i3_cost : 0;
870
    }
871
  else
872
    {
873
      new_cost = new_i3_cost;
874
      new_i2_cost = 0;
875
    }
876
 
877
  if (undobuf.other_insn)
878
    {
879
      int old_other_cost, new_other_cost;
880
 
881
      old_other_cost = INSN_COST (undobuf.other_insn);
882
      new_other_cost = insn_rtx_cost (newotherpat, optimize_this_for_speed_p);
883
      if (old_other_cost > 0 && new_other_cost > 0)
884
        {
885
          old_cost += old_other_cost;
886
          new_cost += new_other_cost;
887
        }
888
      else
889
        old_cost = 0;
890
    }
891
 
892
  /* Disallow this combination if both new_cost and old_cost are greater than
893
     zero, and new_cost is greater than old cost.  */
894
  if (old_cost > 0 && new_cost > old_cost)
895
    {
896
      if (dump_file)
897
        {
898
          if (i0)
899
            {
900
              fprintf (dump_file,
901
                       "rejecting combination of insns %d, %d, %d and %d\n",
902
                       INSN_UID (i0), INSN_UID (i1), INSN_UID (i2),
903
                       INSN_UID (i3));
904
              fprintf (dump_file, "original costs %d + %d + %d + %d = %d\n",
905
                       i0_cost, i1_cost, i2_cost, i3_cost, old_cost);
906
            }
907
          else if (i1)
908
            {
909
              fprintf (dump_file,
910
                       "rejecting combination of insns %d, %d and %d\n",
911
                       INSN_UID (i1), INSN_UID (i2), INSN_UID (i3));
912
              fprintf (dump_file, "original costs %d + %d + %d = %d\n",
913
                       i1_cost, i2_cost, i3_cost, old_cost);
914
            }
915
          else
916
            {
917
              fprintf (dump_file,
918
                       "rejecting combination of insns %d and %d\n",
919
                       INSN_UID (i2), INSN_UID (i3));
920
              fprintf (dump_file, "original costs %d + %d = %d\n",
921
                       i2_cost, i3_cost, old_cost);
922
            }
923
 
924
          if (newi2pat)
925
            {
926
              fprintf (dump_file, "replacement costs %d + %d = %d\n",
927
                       new_i2_cost, new_i3_cost, new_cost);
928
            }
929
          else
930
            fprintf (dump_file, "replacement cost %d\n", new_cost);
931
        }
932
 
933
      return false;
934
    }
935
 
936
  /* Update the uid_insn_cost array with the replacement costs.  */
937
  INSN_COST (i2) = new_i2_cost;
938
  INSN_COST (i3) = new_i3_cost;
939
  if (i1)
940
    {
941
      INSN_COST (i1) = 0;
942
      if (i0)
943
        INSN_COST (i0) = 0;
944
    }
945
 
946
  return true;
947
}
948
 
949
 
950
/* Delete any insns that copy a register to itself.  */
951
 
952
static void
953
delete_noop_moves (void)
954
{
955
  rtx insn, next;
956
  basic_block bb;
957
 
958
  FOR_EACH_BB (bb)
959
    {
960
      for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb)); insn = next)
961
        {
962
          next = NEXT_INSN (insn);
963
          if (INSN_P (insn) && noop_move_p (insn))
964
            {
965
              if (dump_file)
966
                fprintf (dump_file, "deleting noop move %d\n", INSN_UID (insn));
967
 
968
              delete_insn_and_edges (insn);
969
            }
970
        }
971
    }
972
}
973
 
974
 
975
/* Fill in log links field for all insns.  */
976
 
977
static void
978
create_log_links (void)
979
{
980
  basic_block bb;
981
  rtx *next_use, insn;
982
  df_ref *def_vec, *use_vec;
983
 
984
  next_use = XCNEWVEC (rtx, max_reg_num ());
985
 
986
  /* Pass through each block from the end, recording the uses of each
987
     register and establishing log links when def is encountered.
988
     Note that we do not clear next_use array in order to save time,
989
     so we have to test whether the use is in the same basic block as def.
990
 
991
     There are a few cases below when we do not consider the definition or
992
     usage -- these are taken from original flow.c did. Don't ask me why it is
993
     done this way; I don't know and if it works, I don't want to know.  */
994
 
995
  FOR_EACH_BB (bb)
996
    {
997
      FOR_BB_INSNS_REVERSE (bb, insn)
998
        {
999
          if (!NONDEBUG_INSN_P (insn))
1000
            continue;
1001
 
1002
          /* Log links are created only once.  */
1003
          gcc_assert (!LOG_LINKS (insn));
1004
 
1005
          for (def_vec = DF_INSN_DEFS (insn); *def_vec; def_vec++)
1006
            {
1007
              df_ref def = *def_vec;
1008
              int regno = DF_REF_REGNO (def);
1009
              rtx use_insn;
1010
 
1011
              if (!next_use[regno])
1012
                continue;
1013
 
1014
              /* Do not consider if it is pre/post modification in MEM.  */
1015
              if (DF_REF_FLAGS (def) & DF_REF_PRE_POST_MODIFY)
1016
                continue;
1017
 
1018
              /* Do not make the log link for frame pointer.  */
1019
              if ((regno == FRAME_POINTER_REGNUM
1020
                   && (! reload_completed || frame_pointer_needed))
1021
#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
1022
                  || (regno == HARD_FRAME_POINTER_REGNUM
1023
                      && (! reload_completed || frame_pointer_needed))
1024
#endif
1025
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1026
                  || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
1027
#endif
1028
                  )
1029
                continue;
1030
 
1031
              use_insn = next_use[regno];
1032
              if (BLOCK_FOR_INSN (use_insn) == bb)
1033
                {
1034
                  /* flow.c claimed:
1035
 
1036
                     We don't build a LOG_LINK for hard registers contained
1037
                     in ASM_OPERANDs.  If these registers get replaced,
1038
                     we might wind up changing the semantics of the insn,
1039
                     even if reload can make what appear to be valid
1040
                     assignments later.  */
1041
                  if (regno >= FIRST_PSEUDO_REGISTER
1042
                      || asm_noperands (PATTERN (use_insn)) < 0)
1043
                    {
1044
                      /* Don't add duplicate links between instructions.  */
1045
                      struct insn_link *links;
1046
                      FOR_EACH_LOG_LINK (links, use_insn)
1047
                        if (insn == links->insn)
1048
                          break;
1049
 
1050
                      if (!links)
1051
                        LOG_LINKS (use_insn)
1052
                          = alloc_insn_link (insn, LOG_LINKS (use_insn));
1053
                    }
1054
                }
1055
              next_use[regno] = NULL_RTX;
1056
            }
1057
 
1058
          for (use_vec = DF_INSN_USES (insn); *use_vec; use_vec++)
1059
            {
1060
              df_ref use = *use_vec;
1061
              int regno = DF_REF_REGNO (use);
1062
 
1063
              /* Do not consider the usage of the stack pointer
1064
                 by function call.  */
1065
              if (DF_REF_FLAGS (use) & DF_REF_CALL_STACK_USAGE)
1066
                continue;
1067
 
1068
              next_use[regno] = insn;
1069
            }
1070
        }
1071
    }
1072
 
1073
  free (next_use);
1074
}
1075
 
1076
/* Walk the LOG_LINKS of insn B to see if we find a reference to A.  Return
1077
   true if we found a LOG_LINK that proves that A feeds B.  This only works
1078
   if there are no instructions between A and B which could have a link
1079
   depending on A, since in that case we would not record a link for B.
1080
   We also check the implicit dependency created by a cc0 setter/user
1081
   pair.  */
1082
 
1083
static bool
1084
insn_a_feeds_b (rtx a, rtx b)
1085
{
1086
  struct insn_link *links;
1087
  FOR_EACH_LOG_LINK (links, b)
1088
    if (links->insn == a)
1089
      return true;
1090
#ifdef HAVE_cc0
1091
  if (sets_cc0_p (a))
1092
    return true;
1093
#endif
1094
  return false;
1095
}
1096
 
1097
/* Main entry point for combiner.  F is the first insn of the function.
1098
   NREGS is the first unused pseudo-reg number.
1099
 
1100
   Return nonzero if the combiner has turned an indirect jump
1101
   instruction into a direct jump.  */
1102
static int
1103
combine_instructions (rtx f, unsigned int nregs)
1104
{
1105
  rtx insn, next;
1106
#ifdef HAVE_cc0
1107
  rtx prev;
1108
#endif
1109
  struct insn_link *links, *nextlinks;
1110
  rtx first;
1111
  basic_block last_bb;
1112
 
1113
  int new_direct_jump_p = 0;
1114
 
1115
  for (first = f; first && !INSN_P (first); )
1116
    first = NEXT_INSN (first);
1117
  if (!first)
1118
    return 0;
1119
 
1120
  combine_attempts = 0;
1121
  combine_merges = 0;
1122
  combine_extras = 0;
1123
  combine_successes = 0;
1124
 
1125
  rtl_hooks = combine_rtl_hooks;
1126
 
1127
  VEC_safe_grow_cleared (reg_stat_type, heap, reg_stat, nregs);
1128
 
1129
  init_recog_no_volatile ();
1130
 
1131
  /* Allocate array for insn info.  */
1132
  max_uid_known = get_max_uid ();
1133
  uid_log_links = XCNEWVEC (struct insn_link *, max_uid_known + 1);
1134
  uid_insn_cost = XCNEWVEC (int, max_uid_known + 1);
1135
  gcc_obstack_init (&insn_link_obstack);
1136
 
1137
  nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1138
 
1139
  /* Don't use reg_stat[].nonzero_bits when computing it.  This can cause
1140
     problems when, for example, we have j <<= 1 in a loop.  */
1141
 
1142
  nonzero_sign_valid = 0;
1143
  label_tick = label_tick_ebb_start = 1;
1144
 
1145
  /* Scan all SETs and see if we can deduce anything about what
1146
     bits are known to be zero for some registers and how many copies
1147
     of the sign bit are known to exist for those registers.
1148
 
1149
     Also set any known values so that we can use it while searching
1150
     for what bits are known to be set.  */
1151
 
1152
  setup_incoming_promotions (first);
1153
  /* Allow the entry block and the first block to fall into the same EBB.
1154
     Conceptually the incoming promotions are assigned to the entry block.  */
1155
  last_bb = ENTRY_BLOCK_PTR;
1156
 
1157
  create_log_links ();
1158
  FOR_EACH_BB (this_basic_block)
1159
    {
1160
      optimize_this_for_speed_p = optimize_bb_for_speed_p (this_basic_block);
1161
      last_call_luid = 0;
1162
      mem_last_set = -1;
1163
 
1164
      label_tick++;
1165
      if (!single_pred_p (this_basic_block)
1166
          || single_pred (this_basic_block) != last_bb)
1167
        label_tick_ebb_start = label_tick;
1168
      last_bb = this_basic_block;
1169
 
1170
      FOR_BB_INSNS (this_basic_block, insn)
1171
        if (INSN_P (insn) && BLOCK_FOR_INSN (insn))
1172
          {
1173
#ifdef AUTO_INC_DEC
1174
            rtx links;
1175
#endif
1176
 
1177
            subst_low_luid = DF_INSN_LUID (insn);
1178
            subst_insn = insn;
1179
 
1180
            note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies,
1181
                         insn);
1182
            record_dead_and_set_regs (insn);
1183
 
1184
#ifdef AUTO_INC_DEC
1185
            for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
1186
              if (REG_NOTE_KIND (links) == REG_INC)
1187
                set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX,
1188
                                                  insn);
1189
#endif
1190
 
1191
            /* Record the current insn_rtx_cost of this instruction.  */
1192
            if (NONJUMP_INSN_P (insn))
1193
              INSN_COST (insn) = insn_rtx_cost (PATTERN (insn),
1194
                                                optimize_this_for_speed_p);
1195
            if (dump_file)
1196
              fprintf(dump_file, "insn_cost %d: %d\n",
1197
                    INSN_UID (insn), INSN_COST (insn));
1198
          }
1199
    }
1200
 
1201
  nonzero_sign_valid = 1;
1202
 
1203
  /* Now scan all the insns in forward order.  */
1204
  label_tick = label_tick_ebb_start = 1;
1205
  init_reg_last ();
1206
  setup_incoming_promotions (first);
1207
  last_bb = ENTRY_BLOCK_PTR;
1208
 
1209
  FOR_EACH_BB (this_basic_block)
1210
    {
1211
      rtx last_combined_insn = NULL_RTX;
1212
      optimize_this_for_speed_p = optimize_bb_for_speed_p (this_basic_block);
1213
      last_call_luid = 0;
1214
      mem_last_set = -1;
1215
 
1216
      label_tick++;
1217
      if (!single_pred_p (this_basic_block)
1218
          || single_pred (this_basic_block) != last_bb)
1219
        label_tick_ebb_start = label_tick;
1220
      last_bb = this_basic_block;
1221
 
1222
      rtl_profile_for_bb (this_basic_block);
1223
      for (insn = BB_HEAD (this_basic_block);
1224
           insn != NEXT_INSN (BB_END (this_basic_block));
1225
           insn = next ? next : NEXT_INSN (insn))
1226
        {
1227
          next = 0;
1228
          if (NONDEBUG_INSN_P (insn))
1229
            {
1230
              while (last_combined_insn
1231
                     && INSN_DELETED_P (last_combined_insn))
1232
                last_combined_insn = PREV_INSN (last_combined_insn);
1233
              if (last_combined_insn == NULL_RTX
1234
                  || BARRIER_P (last_combined_insn)
1235
                  || BLOCK_FOR_INSN (last_combined_insn) != this_basic_block
1236
                  || DF_INSN_LUID (last_combined_insn) <= DF_INSN_LUID (insn))
1237
                last_combined_insn = insn;
1238
 
1239
              /* See if we know about function return values before this
1240
                 insn based upon SUBREG flags.  */
1241
              check_promoted_subreg (insn, PATTERN (insn));
1242
 
1243
              /* See if we can find hardregs and subreg of pseudos in
1244
                 narrower modes.  This could help turning TRUNCATEs
1245
                 into SUBREGs.  */
1246
              note_uses (&PATTERN (insn), record_truncated_values, NULL);
1247
 
1248
              /* Try this insn with each insn it links back to.  */
1249
 
1250
              FOR_EACH_LOG_LINK (links, insn)
1251
                if ((next = try_combine (insn, links->insn, NULL_RTX,
1252
                                         NULL_RTX, &new_direct_jump_p,
1253
                                         last_combined_insn)) != 0)
1254
                  goto retry;
1255
 
1256
              /* Try each sequence of three linked insns ending with this one.  */
1257
 
1258
              FOR_EACH_LOG_LINK (links, insn)
1259
                {
1260
                  rtx link = links->insn;
1261
 
1262
                  /* If the linked insn has been replaced by a note, then there
1263
                     is no point in pursuing this chain any further.  */
1264
                  if (NOTE_P (link))
1265
                    continue;
1266
 
1267
                  FOR_EACH_LOG_LINK (nextlinks, link)
1268
                    if ((next = try_combine (insn, link, nextlinks->insn,
1269
                                             NULL_RTX, &new_direct_jump_p,
1270
                                             last_combined_insn)) != 0)
1271
                      goto retry;
1272
                }
1273
 
1274
#ifdef HAVE_cc0
1275
              /* Try to combine a jump insn that uses CC0
1276
                 with a preceding insn that sets CC0, and maybe with its
1277
                 logical predecessor as well.
1278
                 This is how we make decrement-and-branch insns.
1279
                 We need this special code because data flow connections
1280
                 via CC0 do not get entered in LOG_LINKS.  */
1281
 
1282
              if (JUMP_P (insn)
1283
                  && (prev = prev_nonnote_insn (insn)) != 0
1284
                  && NONJUMP_INSN_P (prev)
1285
                  && sets_cc0_p (PATTERN (prev)))
1286
                {
1287
                  if ((next = try_combine (insn, prev, NULL_RTX, NULL_RTX,
1288
                                           &new_direct_jump_p,
1289
                                           last_combined_insn)) != 0)
1290
                    goto retry;
1291
 
1292
                  FOR_EACH_LOG_LINK (nextlinks, prev)
1293
                    if ((next = try_combine (insn, prev, nextlinks->insn,
1294
                                             NULL_RTX, &new_direct_jump_p,
1295
                                             last_combined_insn)) != 0)
1296
                      goto retry;
1297
                }
1298
 
1299
              /* Do the same for an insn that explicitly references CC0.  */
1300
              if (NONJUMP_INSN_P (insn)
1301
                  && (prev = prev_nonnote_insn (insn)) != 0
1302
                  && NONJUMP_INSN_P (prev)
1303
                  && sets_cc0_p (PATTERN (prev))
1304
                  && GET_CODE (PATTERN (insn)) == SET
1305
                  && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
1306
                {
1307
                  if ((next = try_combine (insn, prev, NULL_RTX, NULL_RTX,
1308
                                           &new_direct_jump_p,
1309
                                           last_combined_insn)) != 0)
1310
                    goto retry;
1311
 
1312
                  FOR_EACH_LOG_LINK (nextlinks, prev)
1313
                    if ((next = try_combine (insn, prev, nextlinks->insn,
1314
                                             NULL_RTX, &new_direct_jump_p,
1315
                                             last_combined_insn)) != 0)
1316
                      goto retry;
1317
                }
1318
 
1319
              /* Finally, see if any of the insns that this insn links to
1320
                 explicitly references CC0.  If so, try this insn, that insn,
1321
                 and its predecessor if it sets CC0.  */
1322
              FOR_EACH_LOG_LINK (links, insn)
1323
                if (NONJUMP_INSN_P (links->insn)
1324
                    && GET_CODE (PATTERN (links->insn)) == SET
1325
                    && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (links->insn)))
1326
                    && (prev = prev_nonnote_insn (links->insn)) != 0
1327
                    && NONJUMP_INSN_P (prev)
1328
                    && sets_cc0_p (PATTERN (prev))
1329
                    && (next = try_combine (insn, links->insn,
1330
                                            prev, NULL_RTX, &new_direct_jump_p,
1331
                                            last_combined_insn)) != 0)
1332
                  goto retry;
1333
#endif
1334
 
1335
              /* Try combining an insn with two different insns whose results it
1336
                 uses.  */
1337
              FOR_EACH_LOG_LINK (links, insn)
1338
                for (nextlinks = links->next; nextlinks;
1339
                     nextlinks = nextlinks->next)
1340
                  if ((next = try_combine (insn, links->insn,
1341
                                           nextlinks->insn, NULL_RTX,
1342
                                           &new_direct_jump_p,
1343
                                           last_combined_insn)) != 0)
1344
                    goto retry;
1345
 
1346
              /* Try four-instruction combinations.  */
1347
              FOR_EACH_LOG_LINK (links, insn)
1348
                {
1349
                  struct insn_link *next1;
1350
                  rtx link = links->insn;
1351
 
1352
                  /* If the linked insn has been replaced by a note, then there
1353
                     is no point in pursuing this chain any further.  */
1354
                  if (NOTE_P (link))
1355
                    continue;
1356
 
1357
                  FOR_EACH_LOG_LINK (next1, link)
1358
                    {
1359
                      rtx link1 = next1->insn;
1360
                      if (NOTE_P (link1))
1361
                        continue;
1362
                      /* I0 -> I1 -> I2 -> I3.  */
1363
                      FOR_EACH_LOG_LINK (nextlinks, link1)
1364
                        if ((next = try_combine (insn, link, link1,
1365
                                                 nextlinks->insn,
1366
                                                 &new_direct_jump_p,
1367
                                                 last_combined_insn)) != 0)
1368
                          goto retry;
1369
                      /* I0, I1 -> I2, I2 -> I3.  */
1370
                      for (nextlinks = next1->next; nextlinks;
1371
                           nextlinks = nextlinks->next)
1372
                        if ((next = try_combine (insn, link, link1,
1373
                                                 nextlinks->insn,
1374
                                                 &new_direct_jump_p,
1375
                                                 last_combined_insn)) != 0)
1376
                          goto retry;
1377
                    }
1378
 
1379
                  for (next1 = links->next; next1; next1 = next1->next)
1380
                    {
1381
                      rtx link1 = next1->insn;
1382
                      if (NOTE_P (link1))
1383
                        continue;
1384
                      /* I0 -> I2; I1, I2 -> I3.  */
1385
                      FOR_EACH_LOG_LINK (nextlinks, link)
1386
                        if ((next = try_combine (insn, link, link1,
1387
                                                 nextlinks->insn,
1388
                                                 &new_direct_jump_p,
1389
                                                 last_combined_insn)) != 0)
1390
                          goto retry;
1391
                      /* I0 -> I1; I1, I2 -> I3.  */
1392
                      FOR_EACH_LOG_LINK (nextlinks, link1)
1393
                        if ((next = try_combine (insn, link, link1,
1394
                                                 nextlinks->insn,
1395
                                                 &new_direct_jump_p,
1396
                                                 last_combined_insn)) != 0)
1397
                          goto retry;
1398
                    }
1399
                }
1400
 
1401
              /* Try this insn with each REG_EQUAL note it links back to.  */
1402
              FOR_EACH_LOG_LINK (links, insn)
1403
                {
1404
                  rtx set, note;
1405
                  rtx temp = links->insn;
1406
                  if ((set = single_set (temp)) != 0
1407
                      && (note = find_reg_equal_equiv_note (temp)) != 0
1408
                      && (note = XEXP (note, 0), GET_CODE (note)) != EXPR_LIST
1409
                      /* Avoid using a register that may already been marked
1410
                         dead by an earlier instruction.  */
1411
                      && ! unmentioned_reg_p (note, SET_SRC (set))
1412
                      && (GET_MODE (note) == VOIDmode
1413
                          ? SCALAR_INT_MODE_P (GET_MODE (SET_DEST (set)))
1414
                          : GET_MODE (SET_DEST (set)) == GET_MODE (note)))
1415
                    {
1416
                      /* Temporarily replace the set's source with the
1417
                         contents of the REG_EQUAL note.  The insn will
1418
                         be deleted or recognized by try_combine.  */
1419
                      rtx orig = SET_SRC (set);
1420
                      SET_SRC (set) = note;
1421
                      i2mod = temp;
1422
                      i2mod_old_rhs = copy_rtx (orig);
1423
                      i2mod_new_rhs = copy_rtx (note);
1424
                      next = try_combine (insn, i2mod, NULL_RTX, NULL_RTX,
1425
                                          &new_direct_jump_p,
1426
                                          last_combined_insn);
1427
                      i2mod = NULL_RTX;
1428
                      if (next)
1429
                        goto retry;
1430
                      SET_SRC (set) = orig;
1431
                    }
1432
                }
1433
 
1434
              if (!NOTE_P (insn))
1435
                record_dead_and_set_regs (insn);
1436
 
1437
            retry:
1438
              ;
1439
            }
1440
        }
1441
    }
1442
 
1443
  default_rtl_profile ();
1444
  clear_bb_flags ();
1445
  new_direct_jump_p |= purge_all_dead_edges ();
1446
  delete_noop_moves ();
1447
 
1448
  /* Clean up.  */
1449
  obstack_free (&insn_link_obstack, NULL);
1450
  free (uid_log_links);
1451
  free (uid_insn_cost);
1452
  VEC_free (reg_stat_type, heap, reg_stat);
1453
 
1454
  {
1455
    struct undo *undo, *next;
1456
    for (undo = undobuf.frees; undo; undo = next)
1457
      {
1458
        next = undo->next;
1459
        free (undo);
1460
      }
1461
    undobuf.frees = 0;
1462
  }
1463
 
1464
  total_attempts += combine_attempts;
1465
  total_merges += combine_merges;
1466
  total_extras += combine_extras;
1467
  total_successes += combine_successes;
1468
 
1469
  nonzero_sign_valid = 0;
1470
  rtl_hooks = general_rtl_hooks;
1471
 
1472
  /* Make recognizer allow volatile MEMs again.  */
1473
  init_recog ();
1474
 
1475
  return new_direct_jump_p;
1476
}
1477
 
1478
/* Wipe the last_xxx fields of reg_stat in preparation for another pass.  */
1479
 
1480
static void
1481
init_reg_last (void)
1482
{
1483
  unsigned int i;
1484
  reg_stat_type *p;
1485
 
1486
  FOR_EACH_VEC_ELT (reg_stat_type, reg_stat, i, p)
1487
    memset (p, 0, offsetof (reg_stat_type, sign_bit_copies));
1488
}
1489
 
1490
/* Set up any promoted values for incoming argument registers.  */
1491
 
1492
static void
1493
setup_incoming_promotions (rtx first)
1494
{
1495
  tree arg;
1496
  bool strictly_local = false;
1497
 
1498
  for (arg = DECL_ARGUMENTS (current_function_decl); arg;
1499
       arg = DECL_CHAIN (arg))
1500
    {
1501
      rtx x, reg = DECL_INCOMING_RTL (arg);
1502
      int uns1, uns3;
1503
      enum machine_mode mode1, mode2, mode3, mode4;
1504
 
1505
      /* Only continue if the incoming argument is in a register.  */
1506
      if (!REG_P (reg))
1507
        continue;
1508
 
1509
      /* Determine, if possible, whether all call sites of the current
1510
         function lie within the current compilation unit.  (This does
1511
         take into account the exporting of a function via taking its
1512
         address, and so forth.)  */
1513
      strictly_local = cgraph_local_info (current_function_decl)->local;
1514
 
1515
      /* The mode and signedness of the argument before any promotions happen
1516
         (equal to the mode of the pseudo holding it at that stage).  */
1517
      mode1 = TYPE_MODE (TREE_TYPE (arg));
1518
      uns1 = TYPE_UNSIGNED (TREE_TYPE (arg));
1519
 
1520
      /* The mode and signedness of the argument after any source language and
1521
         TARGET_PROMOTE_PROTOTYPES-driven promotions.  */
1522
      mode2 = TYPE_MODE (DECL_ARG_TYPE (arg));
1523
      uns3 = TYPE_UNSIGNED (DECL_ARG_TYPE (arg));
1524
 
1525
      /* The mode and signedness of the argument as it is actually passed,
1526
         after any TARGET_PROMOTE_FUNCTION_ARGS-driven ABI promotions.  */
1527
      mode3 = promote_function_mode (DECL_ARG_TYPE (arg), mode2, &uns3,
1528
                                     TREE_TYPE (cfun->decl), 0);
1529
 
1530
      /* The mode of the register in which the argument is being passed.  */
1531
      mode4 = GET_MODE (reg);
1532
 
1533
      /* Eliminate sign extensions in the callee when:
1534
         (a) A mode promotion has occurred;  */
1535
      if (mode1 == mode3)
1536
        continue;
1537
      /* (b) The mode of the register is the same as the mode of
1538
             the argument as it is passed; */
1539
      if (mode3 != mode4)
1540
        continue;
1541
      /* (c) There's no language level extension;  */
1542
      if (mode1 == mode2)
1543
        ;
1544
      /* (c.1) All callers are from the current compilation unit.  If that's
1545
         the case we don't have to rely on an ABI, we only have to know
1546
         what we're generating right now, and we know that we will do the
1547
         mode1 to mode2 promotion with the given sign.  */
1548
      else if (!strictly_local)
1549
        continue;
1550
      /* (c.2) The combination of the two promotions is useful.  This is
1551
         true when the signs match, or if the first promotion is unsigned.
1552
         In the later case, (sign_extend (zero_extend x)) is the same as
1553
         (zero_extend (zero_extend x)), so make sure to force UNS3 true.  */
1554
      else if (uns1)
1555
        uns3 = true;
1556
      else if (uns3)
1557
        continue;
1558
 
1559
      /* Record that the value was promoted from mode1 to mode3,
1560
         so that any sign extension at the head of the current
1561
         function may be eliminated.  */
1562
      x = gen_rtx_CLOBBER (mode1, const0_rtx);
1563
      x = gen_rtx_fmt_e ((uns3 ? ZERO_EXTEND : SIGN_EXTEND), mode3, x);
1564
      record_value_for_reg (reg, first, x);
1565
    }
1566
}
1567
 
1568
/* Called via note_stores.  If X is a pseudo that is narrower than
1569
   HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
1570
 
1571
   If we are setting only a portion of X and we can't figure out what
1572
   portion, assume all bits will be used since we don't know what will
1573
   be happening.
1574
 
1575
   Similarly, set how many bits of X are known to be copies of the sign bit
1576
   at all locations in the function.  This is the smallest number implied
1577
   by any set of X.  */
1578
 
1579
static void
1580
set_nonzero_bits_and_sign_copies (rtx x, const_rtx set, void *data)
1581
{
1582
  rtx insn = (rtx) data;
1583
  unsigned int num;
1584
 
1585
  if (REG_P (x)
1586
      && REGNO (x) >= FIRST_PSEUDO_REGISTER
1587
      /* If this register is undefined at the start of the file, we can't
1588
         say what its contents were.  */
1589
      && ! REGNO_REG_SET_P
1590
           (DF_LR_IN (ENTRY_BLOCK_PTR->next_bb), REGNO (x))
1591
      && HWI_COMPUTABLE_MODE_P (GET_MODE (x)))
1592
    {
1593
      reg_stat_type *rsp = VEC_index (reg_stat_type, reg_stat, REGNO (x));
1594
 
1595
      if (set == 0 || GET_CODE (set) == CLOBBER)
1596
        {
1597
          rsp->nonzero_bits = GET_MODE_MASK (GET_MODE (x));
1598
          rsp->sign_bit_copies = 1;
1599
          return;
1600
        }
1601
 
1602
      /* If this register is being initialized using itself, and the
1603
         register is uninitialized in this basic block, and there are
1604
         no LOG_LINKS which set the register, then part of the
1605
         register is uninitialized.  In that case we can't assume
1606
         anything about the number of nonzero bits.
1607
 
1608
         ??? We could do better if we checked this in
1609
         reg_{nonzero_bits,num_sign_bit_copies}_for_combine.  Then we
1610
         could avoid making assumptions about the insn which initially
1611
         sets the register, while still using the information in other
1612
         insns.  We would have to be careful to check every insn
1613
         involved in the combination.  */
1614
 
1615
      if (insn
1616
          && reg_referenced_p (x, PATTERN (insn))
1617
          && !REGNO_REG_SET_P (DF_LR_IN (BLOCK_FOR_INSN (insn)),
1618
                               REGNO (x)))
1619
        {
1620
          struct insn_link *link;
1621
 
1622
          FOR_EACH_LOG_LINK (link, insn)
1623
            if (dead_or_set_p (link->insn, x))
1624
              break;
1625
          if (!link)
1626
            {
1627
              rsp->nonzero_bits = GET_MODE_MASK (GET_MODE (x));
1628
              rsp->sign_bit_copies = 1;
1629
              return;
1630
            }
1631
        }
1632
 
1633
      /* If this is a complex assignment, see if we can convert it into a
1634
         simple assignment.  */
1635
      set = expand_field_assignment (set);
1636
 
1637
      /* If this is a simple assignment, or we have a paradoxical SUBREG,
1638
         set what we know about X.  */
1639
 
1640
      if (SET_DEST (set) == x
1641
          || (paradoxical_subreg_p (SET_DEST (set))
1642
              && SUBREG_REG (SET_DEST (set)) == x))
1643
        {
1644
          rtx src = SET_SRC (set);
1645
 
1646
#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
1647
          /* If X is narrower than a word and SRC is a non-negative
1648
             constant that would appear negative in the mode of X,
1649
             sign-extend it for use in reg_stat[].nonzero_bits because some
1650
             machines (maybe most) will actually do the sign-extension
1651
             and this is the conservative approach.
1652
 
1653
             ??? For 2.5, try to tighten up the MD files in this regard
1654
             instead of this kludge.  */
1655
 
1656
          if (GET_MODE_PRECISION (GET_MODE (x)) < BITS_PER_WORD
1657
              && CONST_INT_P (src)
1658
              && INTVAL (src) > 0
1659
              && val_signbit_known_set_p (GET_MODE (x), INTVAL (src)))
1660
            src = GEN_INT (INTVAL (src) | ~GET_MODE_MASK (GET_MODE (x)));
1661
#endif
1662
 
1663
          /* Don't call nonzero_bits if it cannot change anything.  */
1664
          if (rsp->nonzero_bits != ~(unsigned HOST_WIDE_INT) 0)
1665
            rsp->nonzero_bits |= nonzero_bits (src, nonzero_bits_mode);
1666
          num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
1667
          if (rsp->sign_bit_copies == 0
1668
              || rsp->sign_bit_copies > num)
1669
            rsp->sign_bit_copies = num;
1670
        }
1671
      else
1672
        {
1673
          rsp->nonzero_bits = GET_MODE_MASK (GET_MODE (x));
1674
          rsp->sign_bit_copies = 1;
1675
        }
1676
    }
1677
}
1678
 
1679
/* See if INSN can be combined into I3.  PRED, PRED2, SUCC and SUCC2 are
1680
   optionally insns that were previously combined into I3 or that will be
1681
   combined into the merger of INSN and I3.  The order is PRED, PRED2,
1682
   INSN, SUCC, SUCC2, I3.
1683
 
1684
   Return 0 if the combination is not allowed for any reason.
1685
 
1686
   If the combination is allowed, *PDEST will be set to the single
1687
   destination of INSN and *PSRC to the single source, and this function
1688
   will return 1.  */
1689
 
1690
static int
1691
can_combine_p (rtx insn, rtx i3, rtx pred ATTRIBUTE_UNUSED,
1692
               rtx pred2 ATTRIBUTE_UNUSED, rtx succ, rtx succ2,
1693
               rtx *pdest, rtx *psrc)
1694
{
1695
  int i;
1696
  const_rtx set = 0;
1697
  rtx src, dest;
1698
  rtx p;
1699
#ifdef AUTO_INC_DEC
1700
  rtx link;
1701
#endif
1702
  bool all_adjacent = true;
1703
  int (*is_volatile_p) (const_rtx);
1704
 
1705
  if (succ)
1706
    {
1707
      if (succ2)
1708
        {
1709
          if (next_active_insn (succ2) != i3)
1710
            all_adjacent = false;
1711
          if (next_active_insn (succ) != succ2)
1712
            all_adjacent = false;
1713
        }
1714
      else if (next_active_insn (succ) != i3)
1715
        all_adjacent = false;
1716
      if (next_active_insn (insn) != succ)
1717
        all_adjacent = false;
1718
    }
1719
  else if (next_active_insn (insn) != i3)
1720
    all_adjacent = false;
1721
 
1722
  /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
1723
     or a PARALLEL consisting of such a SET and CLOBBERs.
1724
 
1725
     If INSN has CLOBBER parallel parts, ignore them for our processing.
1726
     By definition, these happen during the execution of the insn.  When it
1727
     is merged with another insn, all bets are off.  If they are, in fact,
1728
     needed and aren't also supplied in I3, they may be added by
1729
     recog_for_combine.  Otherwise, it won't match.
1730
 
1731
     We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
1732
     note.
1733
 
1734
     Get the source and destination of INSN.  If more than one, can't
1735
     combine.  */
1736
 
1737
  if (GET_CODE (PATTERN (insn)) == SET)
1738
    set = PATTERN (insn);
1739
  else if (GET_CODE (PATTERN (insn)) == PARALLEL
1740
           && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1741
    {
1742
      for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1743
        {
1744
          rtx elt = XVECEXP (PATTERN (insn), 0, i);
1745
 
1746
          switch (GET_CODE (elt))
1747
            {
1748
            /* This is important to combine floating point insns
1749
               for the SH4 port.  */
1750
            case USE:
1751
              /* Combining an isolated USE doesn't make sense.
1752
                 We depend here on combinable_i3pat to reject them.  */
1753
              /* The code below this loop only verifies that the inputs of
1754
                 the SET in INSN do not change.  We call reg_set_between_p
1755
                 to verify that the REG in the USE does not change between
1756
                 I3 and INSN.
1757
                 If the USE in INSN was for a pseudo register, the matching
1758
                 insn pattern will likely match any register; combining this
1759
                 with any other USE would only be safe if we knew that the
1760
                 used registers have identical values, or if there was
1761
                 something to tell them apart, e.g. different modes.  For
1762
                 now, we forgo such complicated tests and simply disallow
1763
                 combining of USES of pseudo registers with any other USE.  */
1764
              if (REG_P (XEXP (elt, 0))
1765
                  && GET_CODE (PATTERN (i3)) == PARALLEL)
1766
                {
1767
                  rtx i3pat = PATTERN (i3);
1768
                  int i = XVECLEN (i3pat, 0) - 1;
1769
                  unsigned int regno = REGNO (XEXP (elt, 0));
1770
 
1771
                  do
1772
                    {
1773
                      rtx i3elt = XVECEXP (i3pat, 0, i);
1774
 
1775
                      if (GET_CODE (i3elt) == USE
1776
                          && REG_P (XEXP (i3elt, 0))
1777
                          && (REGNO (XEXP (i3elt, 0)) == regno
1778
                              ? reg_set_between_p (XEXP (elt, 0),
1779
                                                   PREV_INSN (insn), i3)
1780
                              : regno >= FIRST_PSEUDO_REGISTER))
1781
                        return 0;
1782
                    }
1783
                  while (--i >= 0);
1784
                }
1785
              break;
1786
 
1787
              /* We can ignore CLOBBERs.  */
1788
            case CLOBBER:
1789
              break;
1790
 
1791
            case SET:
1792
              /* Ignore SETs whose result isn't used but not those that
1793
                 have side-effects.  */
1794
              if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
1795
                  && insn_nothrow_p (insn)
1796
                  && !side_effects_p (elt))
1797
                break;
1798
 
1799
              /* If we have already found a SET, this is a second one and
1800
                 so we cannot combine with this insn.  */
1801
              if (set)
1802
                return 0;
1803
 
1804
              set = elt;
1805
              break;
1806
 
1807
            default:
1808
              /* Anything else means we can't combine.  */
1809
              return 0;
1810
            }
1811
        }
1812
 
1813
      if (set == 0
1814
          /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
1815
             so don't do anything with it.  */
1816
          || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
1817
        return 0;
1818
    }
1819
  else
1820
    return 0;
1821
 
1822
  if (set == 0)
1823
    return 0;
1824
 
1825
  set = expand_field_assignment (set);
1826
  src = SET_SRC (set), dest = SET_DEST (set);
1827
 
1828
  /* Don't eliminate a store in the stack pointer.  */
1829
  if (dest == stack_pointer_rtx
1830
      /* Don't combine with an insn that sets a register to itself if it has
1831
         a REG_EQUAL note.  This may be part of a LIBCALL sequence.  */
1832
      || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
1833
      /* Can't merge an ASM_OPERANDS.  */
1834
      || GET_CODE (src) == ASM_OPERANDS
1835
      /* Can't merge a function call.  */
1836
      || GET_CODE (src) == CALL
1837
      /* Don't eliminate a function call argument.  */
1838
      || (CALL_P (i3)
1839
          && (find_reg_fusage (i3, USE, dest)
1840
              || (REG_P (dest)
1841
                  && REGNO (dest) < FIRST_PSEUDO_REGISTER
1842
                  && global_regs[REGNO (dest)])))
1843
      /* Don't substitute into an incremented register.  */
1844
      || FIND_REG_INC_NOTE (i3, dest)
1845
      || (succ && FIND_REG_INC_NOTE (succ, dest))
1846
      || (succ2 && FIND_REG_INC_NOTE (succ2, dest))
1847
      /* Don't substitute into a non-local goto, this confuses CFG.  */
1848
      || (JUMP_P (i3) && find_reg_note (i3, REG_NON_LOCAL_GOTO, NULL_RTX))
1849
      /* Make sure that DEST is not used after SUCC but before I3.  */
1850
      || (!all_adjacent
1851
          && ((succ2
1852
               && (reg_used_between_p (dest, succ2, i3)
1853
                   || reg_used_between_p (dest, succ, succ2)))
1854
              || (!succ2 && succ && reg_used_between_p (dest, succ, i3))))
1855
      /* Make sure that the value that is to be substituted for the register
1856
         does not use any registers whose values alter in between.  However,
1857
         If the insns are adjacent, a use can't cross a set even though we
1858
         think it might (this can happen for a sequence of insns each setting
1859
         the same destination; last_set of that register might point to
1860
         a NOTE).  If INSN has a REG_EQUIV note, the register is always
1861
         equivalent to the memory so the substitution is valid even if there
1862
         are intervening stores.  Also, don't move a volatile asm or
1863
         UNSPEC_VOLATILE across any other insns.  */
1864
      || (! all_adjacent
1865
          && (((!MEM_P (src)
1866
                || ! find_reg_note (insn, REG_EQUIV, src))
1867
               && use_crosses_set_p (src, DF_INSN_LUID (insn)))
1868
              || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
1869
              || GET_CODE (src) == UNSPEC_VOLATILE))
1870
      /* Don't combine across a CALL_INSN, because that would possibly
1871
         change whether the life span of some REGs crosses calls or not,
1872
         and it is a pain to update that information.
1873
         Exception: if source is a constant, moving it later can't hurt.
1874
         Accept that as a special case.  */
1875
      || (DF_INSN_LUID (insn) < last_call_luid && ! CONSTANT_P (src)))
1876
    return 0;
1877
 
1878
  /* DEST must either be a REG or CC0.  */
1879
  if (REG_P (dest))
1880
    {
1881
      /* If register alignment is being enforced for multi-word items in all
1882
         cases except for parameters, it is possible to have a register copy
1883
         insn referencing a hard register that is not allowed to contain the
1884
         mode being copied and which would not be valid as an operand of most
1885
         insns.  Eliminate this problem by not combining with such an insn.
1886
 
1887
         Also, on some machines we don't want to extend the life of a hard
1888
         register.  */
1889
 
1890
      if (REG_P (src)
1891
          && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
1892
               && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
1893
              /* Don't extend the life of a hard register unless it is
1894
                 user variable (if we have few registers) or it can't
1895
                 fit into the desired register (meaning something special
1896
                 is going on).
1897
                 Also avoid substituting a return register into I3, because
1898
                 reload can't handle a conflict with constraints of other
1899
                 inputs.  */
1900
              || (REGNO (src) < FIRST_PSEUDO_REGISTER
1901
                  && ! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src)))))
1902
        return 0;
1903
    }
1904
  else if (GET_CODE (dest) != CC0)
1905
    return 0;
1906
 
1907
 
1908
  if (GET_CODE (PATTERN (i3)) == PARALLEL)
1909
    for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
1910
      if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER)
1911
        {
1912
          /* Don't substitute for a register intended as a clobberable
1913
             operand.  */
1914
          rtx reg = XEXP (XVECEXP (PATTERN (i3), 0, i), 0);
1915
          if (rtx_equal_p (reg, dest))
1916
            return 0;
1917
 
1918
          /* If the clobber represents an earlyclobber operand, we must not
1919
             substitute an expression containing the clobbered register.
1920
             As we do not analyze the constraint strings here, we have to
1921
             make the conservative assumption.  However, if the register is
1922
             a fixed hard reg, the clobber cannot represent any operand;
1923
             we leave it up to the machine description to either accept or
1924
             reject use-and-clobber patterns.  */
1925
          if (!REG_P (reg)
1926
              || REGNO (reg) >= FIRST_PSEUDO_REGISTER
1927
              || !fixed_regs[REGNO (reg)])
1928
            if (reg_overlap_mentioned_p (reg, src))
1929
              return 0;
1930
        }
1931
 
1932
  /* If INSN contains anything volatile, or is an `asm' (whether volatile
1933
     or not), reject, unless nothing volatile comes between it and I3 */
1934
 
1935
  if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
1936
    {
1937
      /* Make sure neither succ nor succ2 contains a volatile reference.  */
1938
      if (succ2 != 0 && volatile_refs_p (PATTERN (succ2)))
1939
        return 0;
1940
      if (succ != 0 && volatile_refs_p (PATTERN (succ)))
1941
        return 0;
1942
      /* We'll check insns between INSN and I3 below.  */
1943
    }
1944
 
1945
  /* If INSN is an asm, and DEST is a hard register, reject, since it has
1946
     to be an explicit register variable, and was chosen for a reason.  */
1947
 
1948
  if (GET_CODE (src) == ASM_OPERANDS
1949
      && REG_P (dest) && REGNO (dest) < FIRST_PSEUDO_REGISTER)
1950
    return 0;
1951
 
1952
  /* If INSN contains volatile references (specifically volatile MEMs),
1953
     we cannot combine across any other volatile references.
1954
     Even if INSN doesn't contain volatile references, any intervening
1955
     volatile insn might affect machine state.  */
1956
 
1957
  is_volatile_p = volatile_refs_p (PATTERN (insn))
1958
    ? volatile_refs_p
1959
    : volatile_insn_p;
1960
 
1961
  for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1962
    if (INSN_P (p) && p != succ && p != succ2 && is_volatile_p (PATTERN (p)))
1963
      return 0;
1964
 
1965
  /* If INSN contains an autoincrement or autodecrement, make sure that
1966
     register is not used between there and I3, and not already used in
1967
     I3 either.  Neither must it be used in PRED or SUCC, if they exist.
1968
     Also insist that I3 not be a jump; if it were one
1969
     and the incremented register were spilled, we would lose.  */
1970
 
1971
#ifdef AUTO_INC_DEC
1972
  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1973
    if (REG_NOTE_KIND (link) == REG_INC
1974
        && (JUMP_P (i3)
1975
            || reg_used_between_p (XEXP (link, 0), insn, i3)
1976
            || (pred != NULL_RTX
1977
                && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (pred)))
1978
            || (pred2 != NULL_RTX
1979
                && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (pred2)))
1980
            || (succ != NULL_RTX
1981
                && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (succ)))
1982
            || (succ2 != NULL_RTX
1983
                && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (succ2)))
1984
            || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
1985
      return 0;
1986
#endif
1987
 
1988
#ifdef HAVE_cc0
1989
  /* Don't combine an insn that follows a CC0-setting insn.
1990
     An insn that uses CC0 must not be separated from the one that sets it.
1991
     We do, however, allow I2 to follow a CC0-setting insn if that insn
1992
     is passed as I1; in that case it will be deleted also.
1993
     We also allow combining in this case if all the insns are adjacent
1994
     because that would leave the two CC0 insns adjacent as well.
1995
     It would be more logical to test whether CC0 occurs inside I1 or I2,
1996
     but that would be much slower, and this ought to be equivalent.  */
1997
 
1998
  p = prev_nonnote_insn (insn);
1999
  if (p && p != pred && NONJUMP_INSN_P (p) && sets_cc0_p (PATTERN (p))
2000
      && ! all_adjacent)
2001
    return 0;
2002
#endif
2003
 
2004
  /* If we get here, we have passed all the tests and the combination is
2005
     to be allowed.  */
2006
 
2007
  *pdest = dest;
2008
  *psrc = src;
2009
 
2010
  return 1;
2011
}
2012
 
2013
/* LOC is the location within I3 that contains its pattern or the component
2014
   of a PARALLEL of the pattern.  We validate that it is valid for combining.
2015
 
2016
   One problem is if I3 modifies its output, as opposed to replacing it
2017
   entirely, we can't allow the output to contain I2DEST, I1DEST or I0DEST as
2018
   doing so would produce an insn that is not equivalent to the original insns.
2019
 
2020
   Consider:
2021
 
2022
         (set (reg:DI 101) (reg:DI 100))
2023
         (set (subreg:SI (reg:DI 101) 0) <foo>)
2024
 
2025
   This is NOT equivalent to:
2026
 
2027
         (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
2028
                    (set (reg:DI 101) (reg:DI 100))])
2029
 
2030
   Not only does this modify 100 (in which case it might still be valid
2031
   if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
2032
 
2033
   We can also run into a problem if I2 sets a register that I1
2034
   uses and I1 gets directly substituted into I3 (not via I2).  In that
2035
   case, we would be getting the wrong value of I2DEST into I3, so we
2036
   must reject the combination.  This case occurs when I2 and I1 both
2037
   feed into I3, rather than when I1 feeds into I2, which feeds into I3.
2038
   If I1_NOT_IN_SRC is nonzero, it means that finding I1 in the source
2039
   of a SET must prevent combination from occurring.  The same situation
2040
   can occur for I0, in which case I0_NOT_IN_SRC is set.
2041
 
2042
   Before doing the above check, we first try to expand a field assignment
2043
   into a set of logical operations.
2044
 
2045
   If PI3_DEST_KILLED is nonzero, it is a pointer to a location in which
2046
   we place a register that is both set and used within I3.  If more than one
2047
   such register is detected, we fail.
2048
 
2049
   Return 1 if the combination is valid, zero otherwise.  */
2050
 
2051
static int
2052
combinable_i3pat (rtx i3, rtx *loc, rtx i2dest, rtx i1dest, rtx i0dest,
2053
                  int i1_not_in_src, int i0_not_in_src, rtx *pi3dest_killed)
2054
{
2055
  rtx x = *loc;
2056
 
2057
  if (GET_CODE (x) == SET)
2058
    {
2059
      rtx set = x ;
2060
      rtx dest = SET_DEST (set);
2061
      rtx src = SET_SRC (set);
2062
      rtx inner_dest = dest;
2063
      rtx subdest;
2064
 
2065
      while (GET_CODE (inner_dest) == STRICT_LOW_PART
2066
             || GET_CODE (inner_dest) == SUBREG
2067
             || GET_CODE (inner_dest) == ZERO_EXTRACT)
2068
        inner_dest = XEXP (inner_dest, 0);
2069
 
2070
      /* Check for the case where I3 modifies its output, as discussed
2071
         above.  We don't want to prevent pseudos from being combined
2072
         into the address of a MEM, so only prevent the combination if
2073
         i1 or i2 set the same MEM.  */
2074
      if ((inner_dest != dest &&
2075
           (!MEM_P (inner_dest)
2076
            || rtx_equal_p (i2dest, inner_dest)
2077
            || (i1dest && rtx_equal_p (i1dest, inner_dest))
2078
            || (i0dest && rtx_equal_p (i0dest, inner_dest)))
2079
           && (reg_overlap_mentioned_p (i2dest, inner_dest)
2080
               || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))
2081
               || (i0dest && reg_overlap_mentioned_p (i0dest, inner_dest))))
2082
 
2083
          /* This is the same test done in can_combine_p except we can't test
2084
             all_adjacent; we don't have to, since this instruction will stay
2085
             in place, thus we are not considering increasing the lifetime of
2086
             INNER_DEST.
2087
 
2088
             Also, if this insn sets a function argument, combining it with
2089
             something that might need a spill could clobber a previous
2090
             function argument; the all_adjacent test in can_combine_p also
2091
             checks this; here, we do a more specific test for this case.  */
2092
 
2093
          || (REG_P (inner_dest)
2094
              && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
2095
              && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
2096
                                        GET_MODE (inner_dest))))
2097
          || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src))
2098
          || (i0_not_in_src && reg_overlap_mentioned_p (i0dest, src)))
2099
        return 0;
2100
 
2101
      /* If DEST is used in I3, it is being killed in this insn, so
2102
         record that for later.  We have to consider paradoxical
2103
         subregs here, since they kill the whole register, but we
2104
         ignore partial subregs, STRICT_LOW_PART, etc.
2105
         Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
2106
         STACK_POINTER_REGNUM, since these are always considered to be
2107
         live.  Similarly for ARG_POINTER_REGNUM if it is fixed.  */
2108
      subdest = dest;
2109
      if (GET_CODE (subdest) == SUBREG
2110
          && (GET_MODE_SIZE (GET_MODE (subdest))
2111
              >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (subdest)))))
2112
        subdest = SUBREG_REG (subdest);
2113
      if (pi3dest_killed
2114
          && REG_P (subdest)
2115
          && reg_referenced_p (subdest, PATTERN (i3))
2116
          && REGNO (subdest) != FRAME_POINTER_REGNUM
2117
#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
2118
          && REGNO (subdest) != HARD_FRAME_POINTER_REGNUM
2119
#endif
2120
#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2121
          && (REGNO (subdest) != ARG_POINTER_REGNUM
2122
              || ! fixed_regs [REGNO (subdest)])
2123
#endif
2124
          && REGNO (subdest) != STACK_POINTER_REGNUM)
2125
        {
2126
          if (*pi3dest_killed)
2127
            return 0;
2128
 
2129
          *pi3dest_killed = subdest;
2130
        }
2131
    }
2132
 
2133
  else if (GET_CODE (x) == PARALLEL)
2134
    {
2135
      int i;
2136
 
2137
      for (i = 0; i < XVECLEN (x, 0); i++)
2138
        if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest, i0dest,
2139
                                i1_not_in_src, i0_not_in_src, pi3dest_killed))
2140
          return 0;
2141
    }
2142
 
2143
  return 1;
2144
}
2145
 
2146
/* Return 1 if X is an arithmetic expression that contains a multiplication
2147
   and division.  We don't count multiplications by powers of two here.  */
2148
 
2149
static int
2150
contains_muldiv (rtx x)
2151
{
2152
  switch (GET_CODE (x))
2153
    {
2154
    case MOD:  case DIV:  case UMOD:  case UDIV:
2155
      return 1;
2156
 
2157
    case MULT:
2158
      return ! (CONST_INT_P (XEXP (x, 1))
2159
                && exact_log2 (UINTVAL (XEXP (x, 1))) >= 0);
2160
    default:
2161
      if (BINARY_P (x))
2162
        return contains_muldiv (XEXP (x, 0))
2163
            || contains_muldiv (XEXP (x, 1));
2164
 
2165
      if (UNARY_P (x))
2166
        return contains_muldiv (XEXP (x, 0));
2167
 
2168
      return 0;
2169
    }
2170
}
2171
 
2172
/* Determine whether INSN can be used in a combination.  Return nonzero if
2173
   not.  This is used in try_combine to detect early some cases where we
2174
   can't perform combinations.  */
2175
 
2176
static int
2177
cant_combine_insn_p (rtx insn)
2178
{
2179
  rtx set;
2180
  rtx src, dest;
2181
 
2182
  /* If this isn't really an insn, we can't do anything.
2183
     This can occur when flow deletes an insn that it has merged into an
2184
     auto-increment address.  */
2185
  if (! INSN_P (insn))
2186
    return 1;
2187
 
2188
  /* Never combine loads and stores involving hard regs that are likely
2189
     to be spilled.  The register allocator can usually handle such
2190
     reg-reg moves by tying.  If we allow the combiner to make
2191
     substitutions of likely-spilled regs, reload might die.
2192
     As an exception, we allow combinations involving fixed regs; these are
2193
     not available to the register allocator so there's no risk involved.  */
2194
 
2195
  set = single_set (insn);
2196
  if (! set)
2197
    return 0;
2198
  src = SET_SRC (set);
2199
  dest = SET_DEST (set);
2200
  if (GET_CODE (src) == SUBREG)
2201
    src = SUBREG_REG (src);
2202
  if (GET_CODE (dest) == SUBREG)
2203
    dest = SUBREG_REG (dest);
2204
  if (REG_P (src) && REG_P (dest)
2205
      && ((HARD_REGISTER_P (src)
2206
           && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (src))
2207
           && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (src))))
2208
          || (HARD_REGISTER_P (dest)
2209
              && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (dest))
2210
              && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (dest))))))
2211
    return 1;
2212
 
2213
  return 0;
2214
}
2215
 
2216
struct likely_spilled_retval_info
2217
{
2218
  unsigned regno, nregs;
2219
  unsigned mask;
2220
};
2221
 
2222
/* Called via note_stores by likely_spilled_retval_p.  Remove from info->mask
2223
   hard registers that are known to be written to / clobbered in full.  */
2224
static void
2225
likely_spilled_retval_1 (rtx x, const_rtx set, void *data)
2226
{
2227
  struct likely_spilled_retval_info *const info =
2228
    (struct likely_spilled_retval_info *) data;
2229
  unsigned regno, nregs;
2230
  unsigned new_mask;
2231
 
2232
  if (!REG_P (XEXP (set, 0)))
2233
    return;
2234
  regno = REGNO (x);
2235
  if (regno >= info->regno + info->nregs)
2236
    return;
2237
  nregs = hard_regno_nregs[regno][GET_MODE (x)];
2238
  if (regno + nregs <= info->regno)
2239
    return;
2240
  new_mask = (2U << (nregs - 1)) - 1;
2241
  if (regno < info->regno)
2242
    new_mask >>= info->regno - regno;
2243
  else
2244
    new_mask <<= regno - info->regno;
2245
  info->mask &= ~new_mask;
2246
}
2247
 
2248
/* Return nonzero iff part of the return value is live during INSN, and
2249
   it is likely spilled.  This can happen when more than one insn is needed
2250
   to copy the return value, e.g. when we consider to combine into the
2251
   second copy insn for a complex value.  */
2252
 
2253
static int
2254
likely_spilled_retval_p (rtx insn)
2255
{
2256
  rtx use = BB_END (this_basic_block);
2257
  rtx reg, p;
2258
  unsigned regno, nregs;
2259
  /* We assume here that no machine mode needs more than
2260
     32 hard registers when the value overlaps with a register
2261
     for which TARGET_FUNCTION_VALUE_REGNO_P is true.  */
2262
  unsigned mask;
2263
  struct likely_spilled_retval_info info;
2264
 
2265
  if (!NONJUMP_INSN_P (use) || GET_CODE (PATTERN (use)) != USE || insn == use)
2266
    return 0;
2267
  reg = XEXP (PATTERN (use), 0);
2268
  if (!REG_P (reg) || !targetm.calls.function_value_regno_p (REGNO (reg)))
2269
    return 0;
2270
  regno = REGNO (reg);
2271
  nregs = hard_regno_nregs[regno][GET_MODE (reg)];
2272
  if (nregs == 1)
2273
    return 0;
2274
  mask = (2U << (nregs - 1)) - 1;
2275
 
2276
  /* Disregard parts of the return value that are set later.  */
2277
  info.regno = regno;
2278
  info.nregs = nregs;
2279
  info.mask = mask;
2280
  for (p = PREV_INSN (use); info.mask && p != insn; p = PREV_INSN (p))
2281
    if (INSN_P (p))
2282
      note_stores (PATTERN (p), likely_spilled_retval_1, &info);
2283
  mask = info.mask;
2284
 
2285
  /* Check if any of the (probably) live return value registers is
2286
     likely spilled.  */
2287
  nregs --;
2288
  do
2289
    {
2290
      if ((mask & 1 << nregs)
2291
          && targetm.class_likely_spilled_p (REGNO_REG_CLASS (regno + nregs)))
2292
        return 1;
2293
    } while (nregs--);
2294
  return 0;
2295
}
2296
 
2297
/* Adjust INSN after we made a change to its destination.
2298
 
2299
   Changing the destination can invalidate notes that say something about
2300
   the results of the insn and a LOG_LINK pointing to the insn.  */
2301
 
2302
static void
2303
adjust_for_new_dest (rtx insn)
2304
{
2305
  /* For notes, be conservative and simply remove them.  */
2306
  remove_reg_equal_equiv_notes (insn);
2307
 
2308
  /* The new insn will have a destination that was previously the destination
2309
     of an insn just above it.  Call distribute_links to make a LOG_LINK from
2310
     the next use of that destination.  */
2311
  distribute_links (alloc_insn_link (insn, NULL));
2312
 
2313
  df_insn_rescan (insn);
2314
}
2315
 
2316
/* Return TRUE if combine can reuse reg X in mode MODE.
2317
   ADDED_SETS is nonzero if the original set is still required.  */
2318
static bool
2319
can_change_dest_mode (rtx x, int added_sets, enum machine_mode mode)
2320
{
2321
  unsigned int regno;
2322
 
2323
  if (!REG_P(x))
2324
    return false;
2325
 
2326
  regno = REGNO (x);
2327
  /* Allow hard registers if the new mode is legal, and occupies no more
2328
     registers than the old mode.  */
2329
  if (regno < FIRST_PSEUDO_REGISTER)
2330
    return (HARD_REGNO_MODE_OK (regno, mode)
2331
            && (hard_regno_nregs[regno][GET_MODE (x)]
2332
                >= hard_regno_nregs[regno][mode]));
2333
 
2334
  /* Or a pseudo that is only used once.  */
2335
  return (REG_N_SETS (regno) == 1 && !added_sets
2336
          && !REG_USERVAR_P (x));
2337
}
2338
 
2339
 
2340
/* Check whether X, the destination of a set, refers to part of
2341
   the register specified by REG.  */
2342
 
2343
static bool
2344
reg_subword_p (rtx x, rtx reg)
2345
{
2346
  /* Check that reg is an integer mode register.  */
2347
  if (!REG_P (reg) || GET_MODE_CLASS (GET_MODE (reg)) != MODE_INT)
2348
    return false;
2349
 
2350
  if (GET_CODE (x) == STRICT_LOW_PART
2351
      || GET_CODE (x) == ZERO_EXTRACT)
2352
    x = XEXP (x, 0);
2353
 
2354
  return GET_CODE (x) == SUBREG
2355
         && SUBREG_REG (x) == reg
2356
         && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT;
2357
}
2358
 
2359
#ifdef AUTO_INC_DEC
2360
/* Replace auto-increment addressing modes with explicit operations to access
2361
   the same addresses without modifying the corresponding registers.  */
2362
 
2363
static rtx
2364
cleanup_auto_inc_dec (rtx src, enum machine_mode mem_mode)
2365
{
2366
  rtx x = src;
2367
  const RTX_CODE code = GET_CODE (x);
2368
  int i;
2369
  const char *fmt;
2370
 
2371
  switch (code)
2372
    {
2373
    case REG:
2374
    case CONST_INT:
2375
    case CONST_DOUBLE:
2376
    case CONST_FIXED:
2377
    case CONST_VECTOR:
2378
    case SYMBOL_REF:
2379
    case CODE_LABEL:
2380
    case PC:
2381
    case CC0:
2382
    case SCRATCH:
2383
      /* SCRATCH must be shared because they represent distinct values.  */
2384
      return x;
2385
    case CLOBBER:
2386
      if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2387
        return x;
2388
      break;
2389
 
2390
    case CONST:
2391
      if (shared_const_p (x))
2392
        return x;
2393
      break;
2394
 
2395
    case MEM:
2396
      mem_mode = GET_MODE (x);
2397
      break;
2398
 
2399
    case PRE_INC:
2400
    case PRE_DEC:
2401
      gcc_assert (mem_mode != VOIDmode && mem_mode != BLKmode);
2402
      return gen_rtx_PLUS (GET_MODE (x),
2403
                           cleanup_auto_inc_dec (XEXP (x, 0), mem_mode),
2404
                           GEN_INT (code == PRE_INC
2405
                                    ? GET_MODE_SIZE (mem_mode)
2406
                                    : -GET_MODE_SIZE (mem_mode)));
2407
 
2408
    case POST_INC:
2409
    case POST_DEC:
2410
    case PRE_MODIFY:
2411
    case POST_MODIFY:
2412
      return cleanup_auto_inc_dec (code == PRE_MODIFY
2413
                                   ? XEXP (x, 1) : XEXP (x, 0),
2414
                                   mem_mode);
2415
 
2416
    default:
2417
      break;
2418
    }
2419
 
2420
  /* Copy the various flags, fields, and other information.  We assume
2421
     that all fields need copying, and then clear the fields that should
2422
     not be copied.  That is the sensible default behavior, and forces
2423
     us to explicitly document why we are *not* copying a flag.  */
2424
  x = shallow_copy_rtx (x);
2425
 
2426
  /* We do not copy the USED flag, which is used as a mark bit during
2427
     walks over the RTL.  */
2428
  RTX_FLAG (x, used) = 0;
2429
 
2430
  /* We do not copy FRAME_RELATED for INSNs.  */
2431
  if (INSN_P (x))
2432
    RTX_FLAG (x, frame_related) = 0;
2433
 
2434
  fmt = GET_RTX_FORMAT (code);
2435
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2436
    if (fmt[i] == 'e')
2437
      XEXP (x, i) = cleanup_auto_inc_dec (XEXP (x, i), mem_mode);
2438
    else if (fmt[i] == 'E' || fmt[i] == 'V')
2439
      {
2440
        int j;
2441
        XVEC (x, i) = rtvec_alloc (XVECLEN (x, i));
2442
        for (j = 0; j < XVECLEN (x, i); j++)
2443
          XVECEXP (x, i, j)
2444
            = cleanup_auto_inc_dec (XVECEXP (src, i, j), mem_mode);
2445
      }
2446
 
2447
  return x;
2448
}
2449
#endif
2450
 
2451
/* Auxiliary data structure for propagate_for_debug_stmt.  */
2452
 
2453
struct rtx_subst_pair
2454
{
2455
  rtx to;
2456
  bool adjusted;
2457
};
2458
 
2459
/* DATA points to an rtx_subst_pair.  Return the value that should be
2460
   substituted.  */
2461
 
2462
static rtx
2463
propagate_for_debug_subst (rtx from, const_rtx old_rtx, void *data)
2464
{
2465
  struct rtx_subst_pair *pair = (struct rtx_subst_pair *)data;
2466
 
2467
  if (!rtx_equal_p (from, old_rtx))
2468
    return NULL_RTX;
2469
  if (!pair->adjusted)
2470
    {
2471
      pair->adjusted = true;
2472
#ifdef AUTO_INC_DEC
2473
      pair->to = cleanup_auto_inc_dec (pair->to, VOIDmode);
2474
#else
2475
      pair->to = copy_rtx (pair->to);
2476
#endif
2477
      pair->to = make_compound_operation (pair->to, SET);
2478
      return pair->to;
2479
    }
2480
  return copy_rtx (pair->to);
2481
}
2482
 
2483
/* Replace all the occurrences of DEST with SRC in DEBUG_INSNs between INSN
2484
   and LAST, not including INSN, but including LAST.  Also stop at the end
2485
   of THIS_BASIC_BLOCK.  */
2486
 
2487
static void
2488
propagate_for_debug (rtx insn, rtx last, rtx dest, rtx src)
2489
{
2490
  rtx next, loc, end = NEXT_INSN (BB_END (this_basic_block));
2491
 
2492
  struct rtx_subst_pair p;
2493
  p.to = src;
2494
  p.adjusted = false;
2495
 
2496
  next = NEXT_INSN (insn);
2497
  last = NEXT_INSN (last);
2498
  while (next != last && next != end)
2499
    {
2500
      insn = next;
2501
      next = NEXT_INSN (insn);
2502
      if (DEBUG_INSN_P (insn))
2503
        {
2504
          loc = simplify_replace_fn_rtx (INSN_VAR_LOCATION_LOC (insn),
2505
                                         dest, propagate_for_debug_subst, &p);
2506
          if (loc == INSN_VAR_LOCATION_LOC (insn))
2507
            continue;
2508
          INSN_VAR_LOCATION_LOC (insn) = loc;
2509
          df_insn_rescan (insn);
2510
        }
2511
    }
2512
}
2513
 
2514
/* Delete the unconditional jump INSN and adjust the CFG correspondingly.
2515
   Note that the INSN should be deleted *after* removing dead edges, so
2516
   that the kept edge is the fallthrough edge for a (set (pc) (pc))
2517
   but not for a (set (pc) (label_ref FOO)).  */
2518
 
2519
static void
2520
update_cfg_for_uncondjump (rtx insn)
2521
{
2522
  basic_block bb = BLOCK_FOR_INSN (insn);
2523
  gcc_assert (BB_END (bb) == insn);
2524
 
2525
  purge_dead_edges (bb);
2526
 
2527
  delete_insn (insn);
2528
  if (EDGE_COUNT (bb->succs) == 1)
2529
    {
2530
      rtx insn;
2531
 
2532
      single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
2533
 
2534
      /* Remove barriers from the footer if there are any.  */
2535
      for (insn = bb->il.rtl->footer; insn; insn = NEXT_INSN (insn))
2536
        if (BARRIER_P (insn))
2537
          {
2538
            if (PREV_INSN (insn))
2539
              NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
2540
            else
2541
              bb->il.rtl->footer = NEXT_INSN (insn);
2542
            if (NEXT_INSN (insn))
2543
              PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
2544
          }
2545
        else if (LABEL_P (insn))
2546
          break;
2547
    }
2548
}
2549
 
2550
/* Try to combine the insns I0, I1 and I2 into I3.
2551
   Here I0, I1 and I2 appear earlier than I3.
2552
   I0 and I1 can be zero; then we combine just I2 into I3, or I1 and I2 into
2553
   I3.
2554
 
2555
   If we are combining more than two insns and the resulting insn is not
2556
   recognized, try splitting it into two insns.  If that happens, I2 and I3
2557
   are retained and I1/I0 are pseudo-deleted by turning them into a NOTE.
2558
   Otherwise, I0, I1 and I2 are pseudo-deleted.
2559
 
2560
   Return 0 if the combination does not work.  Then nothing is changed.
2561
   If we did the combination, return the insn at which combine should
2562
   resume scanning.
2563
 
2564
   Set NEW_DIRECT_JUMP_P to a nonzero value if try_combine creates a
2565
   new direct jump instruction.
2566
 
2567
   LAST_COMBINED_INSN is either I3, or some insn after I3 that has
2568
   been I3 passed to an earlier try_combine within the same basic
2569
   block.  */
2570
 
2571
static rtx
2572
try_combine (rtx i3, rtx i2, rtx i1, rtx i0, int *new_direct_jump_p,
2573
             rtx last_combined_insn)
2574
{
2575
  /* New patterns for I3 and I2, respectively.  */
2576
  rtx newpat, newi2pat = 0;
2577
  rtvec newpat_vec_with_clobbers = 0;
2578
  int substed_i2 = 0, substed_i1 = 0, substed_i0 = 0;
2579
  /* Indicates need to preserve SET in I0, I1 or I2 in I3 if it is not
2580
     dead.  */
2581
  int added_sets_0, added_sets_1, added_sets_2;
2582
  /* Total number of SETs to put into I3.  */
2583
  int total_sets;
2584
  /* Nonzero if I2's or I1's body now appears in I3.  */
2585
  int i2_is_used = 0, i1_is_used = 0;
2586
  /* INSN_CODEs for new I3, new I2, and user of condition code.  */
2587
  int insn_code_number, i2_code_number = 0, other_code_number = 0;
2588
  /* Contains I3 if the destination of I3 is used in its source, which means
2589
     that the old life of I3 is being killed.  If that usage is placed into
2590
     I2 and not in I3, a REG_DEAD note must be made.  */
2591
  rtx i3dest_killed = 0;
2592
  /* SET_DEST and SET_SRC of I2, I1 and I0.  */
2593
  rtx i2dest = 0, i2src = 0, i1dest = 0, i1src = 0, i0dest = 0, i0src = 0;
2594
  /* Copy of SET_SRC of I1 and I0, if needed.  */
2595
  rtx i1src_copy = 0, i0src_copy = 0, i0src_copy2 = 0;
2596
  /* Set if I2DEST was reused as a scratch register.  */
2597
  bool i2scratch = false;
2598
  /* The PATTERNs of I0, I1, and I2, or a copy of them in certain cases.  */
2599
  rtx i0pat = 0, i1pat = 0, i2pat = 0;
2600
  /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC.  */
2601
  int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
2602
  int i0dest_in_i0src = 0, i1dest_in_i0src = 0, i2dest_in_i0src = 0;
2603
  int i2dest_killed = 0, i1dest_killed = 0, i0dest_killed = 0;
2604
  int i1_feeds_i2_n = 0, i0_feeds_i2_n = 0, i0_feeds_i1_n = 0;
2605
  /* Notes that must be added to REG_NOTES in I3 and I2.  */
2606
  rtx new_i3_notes, new_i2_notes;
2607
  /* Notes that we substituted I3 into I2 instead of the normal case.  */
2608
  int i3_subst_into_i2 = 0;
2609
  /* Notes that I1, I2 or I3 is a MULT operation.  */
2610
  int have_mult = 0;
2611
  int swap_i2i3 = 0;
2612
  int changed_i3_dest = 0;
2613
 
2614
  int maxreg;
2615
  rtx temp;
2616
  struct insn_link *link;
2617
  rtx other_pat = 0;
2618
  rtx new_other_notes;
2619
  int i;
2620
 
2621
  /* Only try four-insn combinations when there's high likelihood of
2622
     success.  Look for simple insns, such as loads of constants or
2623
     binary operations involving a constant.  */
2624
  if (i0)
2625
    {
2626
      int i;
2627
      int ngood = 0;
2628
      int nshift = 0;
2629
 
2630
      if (!flag_expensive_optimizations)
2631
        return 0;
2632
 
2633
      for (i = 0; i < 4; i++)
2634
        {
2635
          rtx insn = i == 0 ? i0 : i == 1 ? i1 : i == 2 ? i2 : i3;
2636
          rtx set = single_set (insn);
2637
          rtx src;
2638
          if (!set)
2639
            continue;
2640
          src = SET_SRC (set);
2641
          if (CONSTANT_P (src))
2642
            {
2643
              ngood += 2;
2644
              break;
2645
            }
2646
          else if (BINARY_P (src) && CONSTANT_P (XEXP (src, 1)))
2647
            ngood++;
2648
          else if (GET_CODE (src) == ASHIFT || GET_CODE (src) == ASHIFTRT
2649
                   || GET_CODE (src) == LSHIFTRT)
2650
            nshift++;
2651
        }
2652
      if (ngood < 2 && nshift < 2)
2653
        return 0;
2654
    }
2655
 
2656
  /* Exit early if one of the insns involved can't be used for
2657
     combinations.  */
2658
  if (cant_combine_insn_p (i3)
2659
      || cant_combine_insn_p (i2)
2660
      || (i1 && cant_combine_insn_p (i1))
2661
      || (i0 && cant_combine_insn_p (i0))
2662
      || likely_spilled_retval_p (i3))
2663
    return 0;
2664
 
2665
  combine_attempts++;
2666
  undobuf.other_insn = 0;
2667
 
2668
  /* Reset the hard register usage information.  */
2669
  CLEAR_HARD_REG_SET (newpat_used_regs);
2670
 
2671
  if (dump_file && (dump_flags & TDF_DETAILS))
2672
    {
2673
      if (i0)
2674
        fprintf (dump_file, "\nTrying %d, %d, %d -> %d:\n",
2675
                 INSN_UID (i0), INSN_UID (i1), INSN_UID (i2), INSN_UID (i3));
2676
      else if (i1)
2677
        fprintf (dump_file, "\nTrying %d, %d -> %d:\n",
2678
                 INSN_UID (i1), INSN_UID (i2), INSN_UID (i3));
2679
      else
2680
        fprintf (dump_file, "\nTrying %d -> %d:\n",
2681
                 INSN_UID (i2), INSN_UID (i3));
2682
    }
2683
 
2684
  /* If multiple insns feed into one of I2 or I3, they can be in any
2685
     order.  To simplify the code below, reorder them in sequence.  */
2686
  if (i0 && DF_INSN_LUID (i0) > DF_INSN_LUID (i2))
2687
    temp = i2, i2 = i0, i0 = temp;
2688
  if (i0 && DF_INSN_LUID (i0) > DF_INSN_LUID (i1))
2689
    temp = i1, i1 = i0, i0 = temp;
2690
  if (i1 && DF_INSN_LUID (i1) > DF_INSN_LUID (i2))
2691
    temp = i1, i1 = i2, i2 = temp;
2692
 
2693
  added_links_insn = 0;
2694
 
2695
  /* First check for one important special case that the code below will
2696
     not handle.  Namely, the case where I1 is zero, I2 is a PARALLEL
2697
     and I3 is a SET whose SET_SRC is a SET_DEST in I2.  In that case,
2698
     we may be able to replace that destination with the destination of I3.
2699
     This occurs in the common code where we compute both a quotient and
2700
     remainder into a structure, in which case we want to do the computation
2701
     directly into the structure to avoid register-register copies.
2702
 
2703
     Note that this case handles both multiple sets in I2 and also cases
2704
     where I2 has a number of CLOBBERs inside the PARALLEL.
2705
 
2706
     We make very conservative checks below and only try to handle the
2707
     most common cases of this.  For example, we only handle the case
2708
     where I2 and I3 are adjacent to avoid making difficult register
2709
     usage tests.  */
2710
 
2711
  if (i1 == 0 && NONJUMP_INSN_P (i3) && GET_CODE (PATTERN (i3)) == SET
2712
      && REG_P (SET_SRC (PATTERN (i3)))
2713
      && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
2714
      && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
2715
      && GET_CODE (PATTERN (i2)) == PARALLEL
2716
      && ! side_effects_p (SET_DEST (PATTERN (i3)))
2717
      /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
2718
         below would need to check what is inside (and reg_overlap_mentioned_p
2719
         doesn't support those codes anyway).  Don't allow those destinations;
2720
         the resulting insn isn't likely to be recognized anyway.  */
2721
      && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
2722
      && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
2723
      && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
2724
                                    SET_DEST (PATTERN (i3)))
2725
      && next_active_insn (i2) == i3)
2726
    {
2727
      rtx p2 = PATTERN (i2);
2728
 
2729
      /* Make sure that the destination of I3,
2730
         which we are going to substitute into one output of I2,
2731
         is not used within another output of I2.  We must avoid making this:
2732
         (parallel [(set (mem (reg 69)) ...)
2733
                    (set (reg 69) ...)])
2734
         which is not well-defined as to order of actions.
2735
         (Besides, reload can't handle output reloads for this.)
2736
 
2737
         The problem can also happen if the dest of I3 is a memory ref,
2738
         if another dest in I2 is an indirect memory ref.  */
2739
      for (i = 0; i < XVECLEN (p2, 0); i++)
2740
        if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
2741
             || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
2742
            && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
2743
                                        SET_DEST (XVECEXP (p2, 0, i))))
2744
          break;
2745
 
2746
      if (i == XVECLEN (p2, 0))
2747
        for (i = 0; i < XVECLEN (p2, 0); i++)
2748
          if (GET_CODE (XVECEXP (p2, 0, i)) == SET
2749
              && SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
2750
            {
2751
              combine_merges++;
2752
 
2753
              subst_insn = i3;
2754
              subst_low_luid = DF_INSN_LUID (i2);
2755
 
2756
              added_sets_2 = added_sets_1 = added_sets_0 = 0;
2757
              i2src = SET_SRC (XVECEXP (p2, 0, i));
2758
              i2dest = SET_DEST (XVECEXP (p2, 0, i));
2759
              i2dest_killed = dead_or_set_p (i2, i2dest);
2760
 
2761
              /* Replace the dest in I2 with our dest and make the resulting
2762
                 insn the new pattern for I3.  Then skip to where we validate
2763
                 the pattern.  Everything was set up above.  */
2764
              SUBST (SET_DEST (XVECEXP (p2, 0, i)), SET_DEST (PATTERN (i3)));
2765
              newpat = p2;
2766
              i3_subst_into_i2 = 1;
2767
              goto validate_replacement;
2768
            }
2769
    }
2770
 
2771
  /* If I2 is setting a pseudo to a constant and I3 is setting some
2772
     sub-part of it to another constant, merge them by making a new
2773
     constant.  */
2774
  if (i1 == 0
2775
      && (temp = single_set (i2)) != 0
2776
      && (CONST_INT_P (SET_SRC (temp))
2777
          || GET_CODE (SET_SRC (temp)) == CONST_DOUBLE)
2778
      && GET_CODE (PATTERN (i3)) == SET
2779
      && (CONST_INT_P (SET_SRC (PATTERN (i3)))
2780
          || GET_CODE (SET_SRC (PATTERN (i3))) == CONST_DOUBLE)
2781
      && reg_subword_p (SET_DEST (PATTERN (i3)), SET_DEST (temp)))
2782
    {
2783
      rtx dest = SET_DEST (PATTERN (i3));
2784
      int offset = -1;
2785
      int width = 0;
2786
 
2787
      if (GET_CODE (dest) == ZERO_EXTRACT)
2788
        {
2789
          if (CONST_INT_P (XEXP (dest, 1))
2790
              && CONST_INT_P (XEXP (dest, 2)))
2791
            {
2792
              width = INTVAL (XEXP (dest, 1));
2793
              offset = INTVAL (XEXP (dest, 2));
2794
              dest = XEXP (dest, 0);
2795
              if (BITS_BIG_ENDIAN)
2796
                offset = GET_MODE_PRECISION (GET_MODE (dest)) - width - offset;
2797
            }
2798
        }
2799
      else
2800
        {
2801
          if (GET_CODE (dest) == STRICT_LOW_PART)
2802
            dest = XEXP (dest, 0);
2803
          width = GET_MODE_PRECISION (GET_MODE (dest));
2804
          offset = 0;
2805
        }
2806
 
2807
      if (offset >= 0)
2808
        {
2809
          /* If this is the low part, we're done.  */
2810
          if (subreg_lowpart_p (dest))
2811
            ;
2812
          /* Handle the case where inner is twice the size of outer.  */
2813
          else if (GET_MODE_PRECISION (GET_MODE (SET_DEST (temp)))
2814
                   == 2 * GET_MODE_PRECISION (GET_MODE (dest)))
2815
            offset += GET_MODE_PRECISION (GET_MODE (dest));
2816
          /* Otherwise give up for now.  */
2817
          else
2818
            offset = -1;
2819
        }
2820
 
2821
      if (offset >= 0
2822
          && (GET_MODE_PRECISION (GET_MODE (SET_DEST (temp)))
2823
              <= HOST_BITS_PER_DOUBLE_INT))
2824
        {
2825
          double_int m, o, i;
2826
          rtx inner = SET_SRC (PATTERN (i3));
2827
          rtx outer = SET_SRC (temp);
2828
 
2829
          o = rtx_to_double_int (outer);
2830
          i = rtx_to_double_int (inner);
2831
 
2832
          m = double_int_mask (width);
2833
          i = double_int_and (i, m);
2834
          m = double_int_lshift (m, offset, HOST_BITS_PER_DOUBLE_INT, false);
2835
          i = double_int_lshift (i, offset, HOST_BITS_PER_DOUBLE_INT, false);
2836
          o = double_int_ior (double_int_and_not (o, m), i);
2837
 
2838
          combine_merges++;
2839
          subst_insn = i3;
2840
          subst_low_luid = DF_INSN_LUID (i2);
2841
          added_sets_2 = added_sets_1 = added_sets_0 = 0;
2842
          i2dest = SET_DEST (temp);
2843
          i2dest_killed = dead_or_set_p (i2, i2dest);
2844
 
2845
          /* Replace the source in I2 with the new constant and make the
2846
             resulting insn the new pattern for I3.  Then skip to where we
2847
             validate the pattern.  Everything was set up above.  */
2848
          SUBST (SET_SRC (temp),
2849
                 immed_double_int_const (o, GET_MODE (SET_DEST (temp))));
2850
 
2851
          newpat = PATTERN (i2);
2852
 
2853
          /* The dest of I3 has been replaced with the dest of I2.  */
2854
          changed_i3_dest = 1;
2855
          goto validate_replacement;
2856
        }
2857
    }
2858
 
2859
#ifndef HAVE_cc0
2860
  /* If we have no I1 and I2 looks like:
2861
        (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
2862
                   (set Y OP)])
2863
     make up a dummy I1 that is
2864
        (set Y OP)
2865
     and change I2 to be
2866
        (set (reg:CC X) (compare:CC Y (const_int 0)))
2867
 
2868
     (We can ignore any trailing CLOBBERs.)
2869
 
2870
     This undoes a previous combination and allows us to match a branch-and-
2871
     decrement insn.  */
2872
 
2873
  if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
2874
      && XVECLEN (PATTERN (i2), 0) >= 2
2875
      && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
2876
      && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
2877
          == MODE_CC)
2878
      && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
2879
      && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
2880
      && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
2881
      && REG_P (SET_DEST (XVECEXP (PATTERN (i2), 0, 1)))
2882
      && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
2883
                      SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
2884
    {
2885
      for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
2886
        if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
2887
          break;
2888
 
2889
      if (i == 1)
2890
        {
2891
          /* We make I1 with the same INSN_UID as I2.  This gives it
2892
             the same DF_INSN_LUID for value tracking.  Our fake I1 will
2893
             never appear in the insn stream so giving it the same INSN_UID
2894
             as I2 will not cause a problem.  */
2895
 
2896
          i1 = gen_rtx_INSN (VOIDmode, INSN_UID (i2), NULL_RTX, i2,
2897
                             BLOCK_FOR_INSN (i2), XVECEXP (PATTERN (i2), 0, 1),
2898
                             INSN_LOCATOR (i2), -1, NULL_RTX);
2899
 
2900
          SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
2901
          SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
2902
                 SET_DEST (PATTERN (i1)));
2903
          SUBST_LINK (LOG_LINKS (i2), alloc_insn_link (i1, LOG_LINKS (i2)));
2904
        }
2905
    }
2906
#endif
2907
 
2908
  /* Verify that I2 and I1 are valid for combining.  */
2909
  if (! can_combine_p (i2, i3, i0, i1, NULL_RTX, NULL_RTX, &i2dest, &i2src)
2910
      || (i1 && ! can_combine_p (i1, i3, i0, NULL_RTX, i2, NULL_RTX,
2911
                                 &i1dest, &i1src))
2912
      || (i0 && ! can_combine_p (i0, i3, NULL_RTX, NULL_RTX, i1, i2,
2913
                                 &i0dest, &i0src)))
2914
    {
2915
      undo_all ();
2916
      return 0;
2917
    }
2918
 
2919
  /* Record whether I2DEST is used in I2SRC and similarly for the other
2920
     cases.  Knowing this will help in register status updating below.  */
2921
  i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
2922
  i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
2923
  i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
2924
  i0dest_in_i0src = i0 && reg_overlap_mentioned_p (i0dest, i0src);
2925
  i1dest_in_i0src = i0 && reg_overlap_mentioned_p (i1dest, i0src);
2926
  i2dest_in_i0src = i0 && reg_overlap_mentioned_p (i2dest, i0src);
2927
  i2dest_killed = dead_or_set_p (i2, i2dest);
2928
  i1dest_killed = i1 && dead_or_set_p (i1, i1dest);
2929
  i0dest_killed = i0 && dead_or_set_p (i0, i0dest);
2930
 
2931
  /* For the earlier insns, determine which of the subsequent ones they
2932
     feed.  */
2933
  i1_feeds_i2_n = i1 && insn_a_feeds_b (i1, i2);
2934
  i0_feeds_i1_n = i0 && insn_a_feeds_b (i0, i1);
2935
  i0_feeds_i2_n = (i0 && (!i0_feeds_i1_n ? insn_a_feeds_b (i0, i2)
2936
                          : (!reg_overlap_mentioned_p (i1dest, i0dest)
2937
                             && reg_overlap_mentioned_p (i0dest, i2src))));
2938
 
2939
  /* Ensure that I3's pattern can be the destination of combines.  */
2940
  if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest, i0dest,
2941
                          i1 && i2dest_in_i1src && !i1_feeds_i2_n,
2942
                          i0 && ((i2dest_in_i0src && !i0_feeds_i2_n)
2943
                                 || (i1dest_in_i0src && !i0_feeds_i1_n)),
2944
                          &i3dest_killed))
2945
    {
2946
      undo_all ();
2947
      return 0;
2948
    }
2949
 
2950
  /* See if any of the insns is a MULT operation.  Unless one is, we will
2951
     reject a combination that is, since it must be slower.  Be conservative
2952
     here.  */
2953
  if (GET_CODE (i2src) == MULT
2954
      || (i1 != 0 && GET_CODE (i1src) == MULT)
2955
      || (i0 != 0 && GET_CODE (i0src) == MULT)
2956
      || (GET_CODE (PATTERN (i3)) == SET
2957
          && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
2958
    have_mult = 1;
2959
 
2960
  /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
2961
     We used to do this EXCEPT in one case: I3 has a post-inc in an
2962
     output operand.  However, that exception can give rise to insns like
2963
        mov r3,(r3)+
2964
     which is a famous insn on the PDP-11 where the value of r3 used as the
2965
     source was model-dependent.  Avoid this sort of thing.  */
2966
 
2967
#if 0
2968
  if (!(GET_CODE (PATTERN (i3)) == SET
2969
        && REG_P (SET_SRC (PATTERN (i3)))
2970
        && MEM_P (SET_DEST (PATTERN (i3)))
2971
        && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
2972
            || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
2973
    /* It's not the exception.  */
2974
#endif
2975
#ifdef AUTO_INC_DEC
2976
    {
2977
      rtx link;
2978
      for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
2979
        if (REG_NOTE_KIND (link) == REG_INC
2980
            && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
2981
                || (i1 != 0
2982
                    && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
2983
          {
2984
            undo_all ();
2985
            return 0;
2986
          }
2987
    }
2988
#endif
2989
 
2990
  /* See if the SETs in I1 or I2 need to be kept around in the merged
2991
     instruction: whenever the value set there is still needed past I3.
2992
     For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
2993
 
2994
     For the SET in I1, we have two cases:  If I1 and I2 independently
2995
     feed into I3, the set in I1 needs to be kept around if I1DEST dies
2996
     or is set in I3.  Otherwise (if I1 feeds I2 which feeds I3), the set
2997
     in I1 needs to be kept around unless I1DEST dies or is set in either
2998
     I2 or I3.  The same consideration applies to I0.  */
2999
 
3000
  added_sets_2 = !dead_or_set_p (i3, i2dest);
3001
 
3002
  if (i1)
3003
    added_sets_1 = !(dead_or_set_p (i3, i1dest)
3004
                     || (i1_feeds_i2_n && dead_or_set_p (i2, i1dest)));
3005
  else
3006
    added_sets_1 = 0;
3007
 
3008
  if (i0)
3009
    added_sets_0 =  !(dead_or_set_p (i3, i0dest)
3010
                      || (i0_feeds_i2_n && dead_or_set_p (i2, i0dest))
3011
                      || (i0_feeds_i1_n && dead_or_set_p (i1, i0dest)));
3012
  else
3013
    added_sets_0 = 0;
3014
 
3015
  /* We are about to copy insns for the case where they need to be kept
3016
     around.  Check that they can be copied in the merged instruction.  */
3017
 
3018
  if (targetm.cannot_copy_insn_p
3019
      && ((added_sets_2 && targetm.cannot_copy_insn_p (i2))
3020
          || (i1 && added_sets_1 && targetm.cannot_copy_insn_p (i1))
3021
          || (i0 && added_sets_0 && targetm.cannot_copy_insn_p (i0))))
3022
    {
3023
      undo_all ();
3024
      return 0;
3025
    }
3026
 
3027
  /* If the set in I2 needs to be kept around, we must make a copy of
3028
     PATTERN (I2), so that when we substitute I1SRC for I1DEST in
3029
     PATTERN (I2), we are only substituting for the original I1DEST, not into
3030
     an already-substituted copy.  This also prevents making self-referential
3031
     rtx.  If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
3032
     I2DEST.  */
3033
 
3034
  if (added_sets_2)
3035
    {
3036
      if (GET_CODE (PATTERN (i2)) == PARALLEL)
3037
        i2pat = gen_rtx_SET (VOIDmode, i2dest, copy_rtx (i2src));
3038
      else
3039
        i2pat = copy_rtx (PATTERN (i2));
3040
    }
3041
 
3042
  if (added_sets_1)
3043
    {
3044
      if (GET_CODE (PATTERN (i1)) == PARALLEL)
3045
        i1pat = gen_rtx_SET (VOIDmode, i1dest, copy_rtx (i1src));
3046
      else
3047
        i1pat = copy_rtx (PATTERN (i1));
3048
    }
3049
 
3050
  if (added_sets_0)
3051
    {
3052
      if (GET_CODE (PATTERN (i0)) == PARALLEL)
3053
        i0pat = gen_rtx_SET (VOIDmode, i0dest, copy_rtx (i0src));
3054
      else
3055
        i0pat = copy_rtx (PATTERN (i0));
3056
    }
3057
 
3058
  combine_merges++;
3059
 
3060
  /* Substitute in the latest insn for the regs set by the earlier ones.  */
3061
 
3062
  maxreg = max_reg_num ();
3063
 
3064
  subst_insn = i3;
3065
 
3066
#ifndef HAVE_cc0
3067
  /* Many machines that don't use CC0 have insns that can both perform an
3068
     arithmetic operation and set the condition code.  These operations will
3069
     be represented as a PARALLEL with the first element of the vector
3070
     being a COMPARE of an arithmetic operation with the constant zero.
3071
     The second element of the vector will set some pseudo to the result
3072
     of the same arithmetic operation.  If we simplify the COMPARE, we won't
3073
     match such a pattern and so will generate an extra insn.   Here we test
3074
     for this case, where both the comparison and the operation result are
3075
     needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
3076
     I2SRC.  Later we will make the PARALLEL that contains I2.  */
3077
 
3078
  if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
3079
      && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
3080
      && CONST_INT_P (XEXP (SET_SRC (PATTERN (i3)), 1))
3081
      && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
3082
    {
3083
      rtx newpat_dest;
3084
      rtx *cc_use_loc = NULL, cc_use_insn = NULL_RTX;
3085
      rtx op0 = i2src, op1 = XEXP (SET_SRC (PATTERN (i3)), 1);
3086
      enum machine_mode compare_mode, orig_compare_mode;
3087
      enum rtx_code compare_code = UNKNOWN, orig_compare_code = UNKNOWN;
3088
 
3089
      newpat = PATTERN (i3);
3090
      newpat_dest = SET_DEST (newpat);
3091
      compare_mode = orig_compare_mode = GET_MODE (newpat_dest);
3092
 
3093
      if (undobuf.other_insn == 0
3094
          && (cc_use_loc = find_single_use (SET_DEST (newpat), i3,
3095
                                            &cc_use_insn)))
3096
        {
3097
          compare_code = orig_compare_code = GET_CODE (*cc_use_loc);
3098
          compare_code = simplify_compare_const (compare_code,
3099
                                                 op0, &op1);
3100
#ifdef CANONICALIZE_COMPARISON
3101
          CANONICALIZE_COMPARISON (compare_code, op0, op1);
3102
#endif
3103
        }
3104
 
3105
      /* Do the rest only if op1 is const0_rtx, which may be the
3106
         result of simplification.  */
3107
      if (op1 == const0_rtx)
3108
        {
3109
          /* If a single use of the CC is found, prepare to modify it
3110
             when SELECT_CC_MODE returns a new CC-class mode, or when
3111
             the above simplify_compare_const() returned a new comparison
3112
             operator.  undobuf.other_insn is assigned the CC use insn
3113
             when modifying it.  */
3114
          if (cc_use_loc)
3115
            {
3116
#ifdef SELECT_CC_MODE
3117
              enum machine_mode new_mode
3118
                = SELECT_CC_MODE (compare_code, op0, op1);
3119
              if (new_mode != orig_compare_mode
3120
                  && can_change_dest_mode (SET_DEST (newpat),
3121
                                           added_sets_2, new_mode))
3122
                {
3123
                  unsigned int regno = REGNO (newpat_dest);
3124
                  compare_mode = new_mode;
3125
                  if (regno < FIRST_PSEUDO_REGISTER)
3126
                    newpat_dest = gen_rtx_REG (compare_mode, regno);
3127
                  else
3128
                    {
3129
                      SUBST_MODE (regno_reg_rtx[regno], compare_mode);
3130
                      newpat_dest = regno_reg_rtx[regno];
3131
                    }
3132
                }
3133
#endif
3134
              /* Cases for modifying the CC-using comparison.  */
3135
              if (compare_code != orig_compare_code
3136
                  /* ??? Do we need to verify the zero rtx?  */
3137
                  && XEXP (*cc_use_loc, 1) == const0_rtx)
3138
                {
3139
                  /* Replace cc_use_loc with entire new RTX.  */
3140
                  SUBST (*cc_use_loc,
3141
                         gen_rtx_fmt_ee (compare_code, compare_mode,
3142
                                         newpat_dest, const0_rtx));
3143
                  undobuf.other_insn = cc_use_insn;
3144
                }
3145
              else if (compare_mode != orig_compare_mode)
3146
                {
3147
                  /* Just replace the CC reg with a new mode.  */
3148
                  SUBST (XEXP (*cc_use_loc, 0), newpat_dest);
3149
                  undobuf.other_insn = cc_use_insn;
3150
                }
3151
            }
3152
 
3153
          /* Now we modify the current newpat:
3154
             First, SET_DEST(newpat) is updated if the CC mode has been
3155
             altered. For targets without SELECT_CC_MODE, this should be
3156
             optimized away.  */
3157
          if (compare_mode != orig_compare_mode)
3158
            SUBST (SET_DEST (newpat), newpat_dest);
3159
          /* This is always done to propagate i2src into newpat.  */
3160
          SUBST (SET_SRC (newpat),
3161
                 gen_rtx_COMPARE (compare_mode, op0, op1));
3162
          /* Create new version of i2pat if needed; the below PARALLEL
3163
             creation needs this to work correctly.  */
3164
          if (! rtx_equal_p (i2src, op0))
3165
            i2pat = gen_rtx_SET (VOIDmode, i2dest, op0);
3166
          i2_is_used = 1;
3167
        }
3168
    }
3169
#endif
3170
 
3171
  if (i2_is_used == 0)
3172
    {
3173
      /* It is possible that the source of I2 or I1 may be performing
3174
         an unneeded operation, such as a ZERO_EXTEND of something
3175
         that is known to have the high part zero.  Handle that case
3176
         by letting subst look at the inner insns.
3177
 
3178
         Another way to do this would be to have a function that tries
3179
         to simplify a single insn instead of merging two or more
3180
         insns.  We don't do this because of the potential of infinite
3181
         loops and because of the potential extra memory required.
3182
         However, doing it the way we are is a bit of a kludge and
3183
         doesn't catch all cases.
3184
 
3185
         But only do this if -fexpensive-optimizations since it slows
3186
         things down and doesn't usually win.
3187
 
3188
         This is not done in the COMPARE case above because the
3189
         unmodified I2PAT is used in the PARALLEL and so a pattern
3190
         with a modified I2SRC would not match.  */
3191
 
3192
      if (flag_expensive_optimizations)
3193
        {
3194
          /* Pass pc_rtx so no substitutions are done, just
3195
             simplifications.  */
3196
          if (i1)
3197
            {
3198
              subst_low_luid = DF_INSN_LUID (i1);
3199
              i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0, 0);
3200
            }
3201
 
3202
          subst_low_luid = DF_INSN_LUID (i2);
3203
          i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0, 0);
3204
        }
3205
 
3206
      n_occurrences = 0;         /* `subst' counts here */
3207
      subst_low_luid = DF_INSN_LUID (i2);
3208
 
3209
      /* If I1 feeds into I2 and I1DEST is in I1SRC, we need to make a unique
3210
         copy of I2SRC each time we substitute it, in order to avoid creating
3211
         self-referential RTL when we will be substituting I1SRC for I1DEST
3212
         later.  Likewise if I0 feeds into I2, either directly or indirectly
3213
         through I1, and I0DEST is in I0SRC.  */
3214
      newpat = subst (PATTERN (i3), i2dest, i2src, 0, 0,
3215
                      (i1_feeds_i2_n && i1dest_in_i1src)
3216
                      || ((i0_feeds_i2_n || (i0_feeds_i1_n && i1_feeds_i2_n))
3217
                          && i0dest_in_i0src));
3218
      substed_i2 = 1;
3219
 
3220
      /* Record whether I2's body now appears within I3's body.  */
3221
      i2_is_used = n_occurrences;
3222
    }
3223
 
3224
  /* If we already got a failure, don't try to do more.  Otherwise, try to
3225
     substitute I1 if we have it.  */
3226
 
3227
  if (i1 && GET_CODE (newpat) != CLOBBER)
3228
    {
3229
      /* Check that an autoincrement side-effect on I1 has not been lost.
3230
         This happens if I1DEST is mentioned in I2 and dies there, and
3231
         has disappeared from the new pattern.  */
3232
      if ((FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
3233
           && i1_feeds_i2_n
3234
           && dead_or_set_p (i2, i1dest)
3235
           && !reg_overlap_mentioned_p (i1dest, newpat))
3236
           /* Before we can do this substitution, we must redo the test done
3237
              above (see detailed comments there) that ensures I1DEST isn't
3238
              mentioned in any SETs in NEWPAT that are field assignments.  */
3239
          || !combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX, NULL_RTX,
3240
                                0, 0, 0))
3241
        {
3242
          undo_all ();
3243
          return 0;
3244
        }
3245
 
3246
      n_occurrences = 0;
3247
      subst_low_luid = DF_INSN_LUID (i1);
3248
 
3249
      /* If the following substitution will modify I1SRC, make a copy of it
3250
         for the case where it is substituted for I1DEST in I2PAT later.  */
3251
      if (added_sets_2 && i1_feeds_i2_n)
3252
        i1src_copy = copy_rtx (i1src);
3253
 
3254
      /* If I0 feeds into I1 and I0DEST is in I0SRC, we need to make a unique
3255
         copy of I1SRC each time we substitute it, in order to avoid creating
3256
         self-referential RTL when we will be substituting I0SRC for I0DEST
3257
         later.  */
3258
      newpat = subst (newpat, i1dest, i1src, 0, 0,
3259
                      i0_feeds_i1_n && i0dest_in_i0src);
3260
      substed_i1 = 1;
3261
 
3262
      /* Record whether I1's body now appears within I3's body.  */
3263
      i1_is_used = n_occurrences;
3264
    }
3265
 
3266
  /* Likewise for I0 if we have it.  */
3267
 
3268
  if (i0 && GET_CODE (newpat) != CLOBBER)
3269
    {
3270
      if ((FIND_REG_INC_NOTE (i0, NULL_RTX) != 0
3271
           && ((i0_feeds_i2_n && dead_or_set_p (i2, i0dest))
3272
               || (i0_feeds_i1_n && dead_or_set_p (i1, i0dest)))
3273
           && !reg_overlap_mentioned_p (i0dest, newpat))
3274
          || !combinable_i3pat (NULL_RTX, &newpat, i0dest, NULL_RTX, NULL_RTX,
3275
                                0, 0, 0))
3276
        {
3277
          undo_all ();
3278
          return 0;
3279
        }
3280
 
3281
      /* If the following substitution will modify I0SRC, make a copy of it
3282
         for the case where it is substituted for I0DEST in I1PAT later.  */
3283
      if (added_sets_1 && i0_feeds_i1_n)
3284
        i0src_copy = copy_rtx (i0src);
3285
      /* And a copy for I0DEST in I2PAT substitution.  */
3286
      if (added_sets_2 && ((i0_feeds_i1_n && i1_feeds_i2_n)
3287
                           || (i0_feeds_i2_n)))
3288
        i0src_copy2 = copy_rtx (i0src);
3289
 
3290
      n_occurrences = 0;
3291
      subst_low_luid = DF_INSN_LUID (i0);
3292
      newpat = subst (newpat, i0dest, i0src, 0, 0, 0);
3293
      substed_i0 = 1;
3294
    }
3295
 
3296
  /* Fail if an autoincrement side-effect has been duplicated.  Be careful
3297
     to count all the ways that I2SRC and I1SRC can be used.  */
3298
  if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
3299
       && i2_is_used + added_sets_2 > 1)
3300
      || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
3301
          && (i1_is_used + added_sets_1 + (added_sets_2 && i1_feeds_i2_n)
3302
              > 1))
3303
      || (i0 != 0 && FIND_REG_INC_NOTE (i0, NULL_RTX) != 0
3304
          && (n_occurrences + added_sets_0
3305
              + (added_sets_1 && i0_feeds_i1_n)
3306
              + (added_sets_2 && i0_feeds_i2_n)
3307
              > 1))
3308
      /* Fail if we tried to make a new register.  */
3309
      || max_reg_num () != maxreg
3310
      /* Fail if we couldn't do something and have a CLOBBER.  */
3311
      || GET_CODE (newpat) == CLOBBER
3312
      /* Fail if this new pattern is a MULT and we didn't have one before
3313
         at the outer level.  */
3314
      || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
3315
          && ! have_mult))
3316
    {
3317
      undo_all ();
3318
      return 0;
3319
    }
3320
 
3321
  /* If the actions of the earlier insns must be kept
3322
     in addition to substituting them into the latest one,
3323
     we must make a new PARALLEL for the latest insn
3324
     to hold additional the SETs.  */
3325
 
3326
  if (added_sets_0 || added_sets_1 || added_sets_2)
3327
    {
3328
      int extra_sets = added_sets_0 + added_sets_1 + added_sets_2;
3329
      combine_extras++;
3330
 
3331
      if (GET_CODE (newpat) == PARALLEL)
3332
        {
3333
          rtvec old = XVEC (newpat, 0);
3334
          total_sets = XVECLEN (newpat, 0) + extra_sets;
3335
          newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
3336
          memcpy (XVEC (newpat, 0)->elem, &old->elem[0],
3337
                  sizeof (old->elem[0]) * old->num_elem);
3338
        }
3339
      else
3340
        {
3341
          rtx old = newpat;
3342
          total_sets = 1 + extra_sets;
3343
          newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
3344
          XVECEXP (newpat, 0, 0) = old;
3345
        }
3346
 
3347
      if (added_sets_0)
3348
        XVECEXP (newpat, 0, --total_sets) = i0pat;
3349
 
3350
      if (added_sets_1)
3351
        {
3352
          rtx t = i1pat;
3353
          if (i0_feeds_i1_n)
3354
            t = subst (t, i0dest, i0src_copy ? i0src_copy : i0src, 0, 0, 0);
3355
 
3356
          XVECEXP (newpat, 0, --total_sets) = t;
3357
        }
3358
      if (added_sets_2)
3359
        {
3360
          rtx t = i2pat;
3361
          if (i1_feeds_i2_n)
3362
            t = subst (t, i1dest, i1src_copy ? i1src_copy : i1src, 0, 0,
3363
                       i0_feeds_i1_n && i0dest_in_i0src);
3364
          if ((i0_feeds_i1_n && i1_feeds_i2_n) || i0_feeds_i2_n)
3365
            t = subst (t, i0dest, i0src_copy2 ? i0src_copy2 : i0src, 0, 0, 0);
3366
 
3367
          XVECEXP (newpat, 0, --total_sets) = t;
3368
        }
3369
    }
3370
 
3371
 validate_replacement:
3372
 
3373
  /* Note which hard regs this insn has as inputs.  */
3374
  mark_used_regs_combine (newpat);
3375
 
3376
  /* If recog_for_combine fails, it strips existing clobbers.  If we'll
3377
     consider splitting this pattern, we might need these clobbers.  */
3378
  if (i1 && GET_CODE (newpat) == PARALLEL
3379
      && GET_CODE (XVECEXP (newpat, 0, XVECLEN (newpat, 0) - 1)) == CLOBBER)
3380
    {
3381
      int len = XVECLEN (newpat, 0);
3382
 
3383
      newpat_vec_with_clobbers = rtvec_alloc (len);
3384
      for (i = 0; i < len; i++)
3385
        RTVEC_ELT (newpat_vec_with_clobbers, i) = XVECEXP (newpat, 0, i);
3386
    }
3387
 
3388
  /* Is the result of combination a valid instruction?  */
3389
  insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3390
 
3391
  /* If the result isn't valid, see if it is a PARALLEL of two SETs where
3392
     the second SET's destination is a register that is unused and isn't
3393
     marked as an instruction that might trap in an EH region.  In that case,
3394
     we just need the first SET.   This can occur when simplifying a divmod
3395
     insn.  We *must* test for this case here because the code below that
3396
     splits two independent SETs doesn't handle this case correctly when it
3397
     updates the register status.
3398
 
3399
     It's pointless doing this if we originally had two sets, one from
3400
     i3, and one from i2.  Combining then splitting the parallel results
3401
     in the original i2 again plus an invalid insn (which we delete).
3402
     The net effect is only to move instructions around, which makes
3403
     debug info less accurate.
3404
 
3405
     Also check the case where the first SET's destination is unused.
3406
     That would not cause incorrect code, but does cause an unneeded
3407
     insn to remain.  */
3408
 
3409
  if (insn_code_number < 0
3410
      && !(added_sets_2 && i1 == 0)
3411
      && GET_CODE (newpat) == PARALLEL
3412
      && XVECLEN (newpat, 0) == 2
3413
      && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
3414
      && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
3415
      && asm_noperands (newpat) < 0)
3416
    {
3417
      rtx set0 = XVECEXP (newpat, 0, 0);
3418
      rtx set1 = XVECEXP (newpat, 0, 1);
3419
 
3420
      if (((REG_P (SET_DEST (set1))
3421
            && find_reg_note (i3, REG_UNUSED, SET_DEST (set1)))
3422
           || (GET_CODE (SET_DEST (set1)) == SUBREG
3423
               && find_reg_note (i3, REG_UNUSED, SUBREG_REG (SET_DEST (set1)))))
3424
          && insn_nothrow_p (i3)
3425
          && !side_effects_p (SET_SRC (set1)))
3426
        {
3427
          newpat = set0;
3428
          insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3429
        }
3430
 
3431
      else if (((REG_P (SET_DEST (set0))
3432
                 && find_reg_note (i3, REG_UNUSED, SET_DEST (set0)))
3433
                || (GET_CODE (SET_DEST (set0)) == SUBREG
3434
                    && find_reg_note (i3, REG_UNUSED,
3435
                                      SUBREG_REG (SET_DEST (set0)))))
3436
               && insn_nothrow_p (i3)
3437
               && !side_effects_p (SET_SRC (set0)))
3438
        {
3439
          newpat = set1;
3440
          insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3441
 
3442
          if (insn_code_number >= 0)
3443
            changed_i3_dest = 1;
3444
        }
3445
    }
3446
 
3447
  /* If we were combining three insns and the result is a simple SET
3448
     with no ASM_OPERANDS that wasn't recognized, try to split it into two
3449
     insns.  There are two ways to do this.  It can be split using a
3450
     machine-specific method (like when you have an addition of a large
3451
     constant) or by combine in the function find_split_point.  */
3452
 
3453
  if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
3454
      && asm_noperands (newpat) < 0)
3455
    {
3456
      rtx parallel, m_split, *split;
3457
 
3458
      /* See if the MD file can split NEWPAT.  If it can't, see if letting it
3459
         use I2DEST as a scratch register will help.  In the latter case,
3460
         convert I2DEST to the mode of the source of NEWPAT if we can.  */
3461
 
3462
      m_split = combine_split_insns (newpat, i3);
3463
 
3464
      /* We can only use I2DEST as a scratch reg if it doesn't overlap any
3465
         inputs of NEWPAT.  */
3466
 
3467
      /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
3468
         possible to try that as a scratch reg.  This would require adding
3469
         more code to make it work though.  */
3470
 
3471
      if (m_split == 0 && ! reg_overlap_mentioned_p (i2dest, newpat))
3472
        {
3473
          enum machine_mode new_mode = GET_MODE (SET_DEST (newpat));
3474
 
3475
          /* First try to split using the original register as a
3476
             scratch register.  */
3477
          parallel = gen_rtx_PARALLEL (VOIDmode,
3478
                                       gen_rtvec (2, newpat,
3479
                                                  gen_rtx_CLOBBER (VOIDmode,
3480
                                                                   i2dest)));
3481
          m_split = combine_split_insns (parallel, i3);
3482
 
3483
          /* If that didn't work, try changing the mode of I2DEST if
3484
             we can.  */
3485
          if (m_split == 0
3486
              && new_mode != GET_MODE (i2dest)
3487
              && new_mode != VOIDmode
3488
              && can_change_dest_mode (i2dest, added_sets_2, new_mode))
3489
            {
3490
              enum machine_mode old_mode = GET_MODE (i2dest);
3491
              rtx ni2dest;
3492
 
3493
              if (REGNO (i2dest) < FIRST_PSEUDO_REGISTER)
3494
                ni2dest = gen_rtx_REG (new_mode, REGNO (i2dest));
3495
              else
3496
                {
3497
                  SUBST_MODE (regno_reg_rtx[REGNO (i2dest)], new_mode);
3498
                  ni2dest = regno_reg_rtx[REGNO (i2dest)];
3499
                }
3500
 
3501
              parallel = (gen_rtx_PARALLEL
3502
                          (VOIDmode,
3503
                           gen_rtvec (2, newpat,
3504
                                      gen_rtx_CLOBBER (VOIDmode,
3505
                                                       ni2dest))));
3506
              m_split = combine_split_insns (parallel, i3);
3507
 
3508
              if (m_split == 0
3509
                  && REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
3510
                {
3511
                  struct undo *buf;
3512
 
3513
                  adjust_reg_mode (regno_reg_rtx[REGNO (i2dest)], old_mode);
3514
                  buf = undobuf.undos;
3515
                  undobuf.undos = buf->next;
3516
                  buf->next = undobuf.frees;
3517
                  undobuf.frees = buf;
3518
                }
3519
            }
3520
 
3521
          i2scratch = m_split != 0;
3522
        }
3523
 
3524
      /* If recog_for_combine has discarded clobbers, try to use them
3525
         again for the split.  */
3526
      if (m_split == 0 && newpat_vec_with_clobbers)
3527
        {
3528
          parallel = gen_rtx_PARALLEL (VOIDmode, newpat_vec_with_clobbers);
3529
          m_split = combine_split_insns (parallel, i3);
3530
        }
3531
 
3532
      if (m_split && NEXT_INSN (m_split) == NULL_RTX)
3533
        {
3534
          m_split = PATTERN (m_split);
3535
          insn_code_number = recog_for_combine (&m_split, i3, &new_i3_notes);
3536
          if (insn_code_number >= 0)
3537
            newpat = m_split;
3538
        }
3539
      else if (m_split && NEXT_INSN (NEXT_INSN (m_split)) == NULL_RTX
3540
               && (next_nonnote_nondebug_insn (i2) == i3
3541
                   || ! use_crosses_set_p (PATTERN (m_split), DF_INSN_LUID (i2))))
3542
        {
3543
          rtx i2set, i3set;
3544
          rtx newi3pat = PATTERN (NEXT_INSN (m_split));
3545
          newi2pat = PATTERN (m_split);
3546
 
3547
          i3set = single_set (NEXT_INSN (m_split));
3548
          i2set = single_set (m_split);
3549
 
3550
          i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
3551
 
3552
          /* If I2 or I3 has multiple SETs, we won't know how to track
3553
             register status, so don't use these insns.  If I2's destination
3554
             is used between I2 and I3, we also can't use these insns.  */
3555
 
3556
          if (i2_code_number >= 0 && i2set && i3set
3557
              && (next_nonnote_nondebug_insn (i2) == i3
3558
                  || ! reg_used_between_p (SET_DEST (i2set), i2, i3)))
3559
            insn_code_number = recog_for_combine (&newi3pat, i3,
3560
                                                  &new_i3_notes);
3561
          if (insn_code_number >= 0)
3562
            newpat = newi3pat;
3563
 
3564
          /* It is possible that both insns now set the destination of I3.
3565
             If so, we must show an extra use of it.  */
3566
 
3567
          if (insn_code_number >= 0)
3568
            {
3569
              rtx new_i3_dest = SET_DEST (i3set);
3570
              rtx new_i2_dest = SET_DEST (i2set);
3571
 
3572
              while (GET_CODE (new_i3_dest) == ZERO_EXTRACT
3573
                     || GET_CODE (new_i3_dest) == STRICT_LOW_PART
3574
                     || GET_CODE (new_i3_dest) == SUBREG)
3575
                new_i3_dest = XEXP (new_i3_dest, 0);
3576
 
3577
              while (GET_CODE (new_i2_dest) == ZERO_EXTRACT
3578
                     || GET_CODE (new_i2_dest) == STRICT_LOW_PART
3579
                     || GET_CODE (new_i2_dest) == SUBREG)
3580
                new_i2_dest = XEXP (new_i2_dest, 0);
3581
 
3582
              if (REG_P (new_i3_dest)
3583
                  && REG_P (new_i2_dest)
3584
                  && REGNO (new_i3_dest) == REGNO (new_i2_dest))
3585
                INC_REG_N_SETS (REGNO (new_i2_dest), 1);
3586
            }
3587
        }
3588
 
3589
      /* If we can split it and use I2DEST, go ahead and see if that
3590
         helps things be recognized.  Verify that none of the registers
3591
         are set between I2 and I3.  */
3592
      if (insn_code_number < 0
3593
          && (split = find_split_point (&newpat, i3, false)) != 0
3594
#ifdef HAVE_cc0
3595
          && REG_P (i2dest)
3596
#endif
3597
          /* We need I2DEST in the proper mode.  If it is a hard register
3598
             or the only use of a pseudo, we can change its mode.
3599
             Make sure we don't change a hard register to have a mode that
3600
             isn't valid for it, or change the number of registers.  */
3601
          && (GET_MODE (*split) == GET_MODE (i2dest)
3602
              || GET_MODE (*split) == VOIDmode
3603
              || can_change_dest_mode (i2dest, added_sets_2,
3604
                                       GET_MODE (*split)))
3605
          && (next_nonnote_nondebug_insn (i2) == i3
3606
              || ! use_crosses_set_p (*split, DF_INSN_LUID (i2)))
3607
          /* We can't overwrite I2DEST if its value is still used by
3608
             NEWPAT.  */
3609
          && ! reg_referenced_p (i2dest, newpat))
3610
        {
3611
          rtx newdest = i2dest;
3612
          enum rtx_code split_code = GET_CODE (*split);
3613
          enum machine_mode split_mode = GET_MODE (*split);
3614
          bool subst_done = false;
3615
          newi2pat = NULL_RTX;
3616
 
3617
          i2scratch = true;
3618
 
3619
          /* *SPLIT may be part of I2SRC, so make sure we have the
3620
             original expression around for later debug processing.
3621
             We should not need I2SRC any more in other cases.  */
3622
          if (MAY_HAVE_DEBUG_INSNS)
3623
            i2src = copy_rtx (i2src);
3624
          else
3625
            i2src = NULL;
3626
 
3627
          /* Get NEWDEST as a register in the proper mode.  We have already
3628
             validated that we can do this.  */
3629
          if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
3630
            {
3631
              if (REGNO (i2dest) < FIRST_PSEUDO_REGISTER)
3632
                newdest = gen_rtx_REG (split_mode, REGNO (i2dest));
3633
              else
3634
                {
3635
                  SUBST_MODE (regno_reg_rtx[REGNO (i2dest)], split_mode);
3636
                  newdest = regno_reg_rtx[REGNO (i2dest)];
3637
                }
3638
            }
3639
 
3640
          /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
3641
             an ASHIFT.  This can occur if it was inside a PLUS and hence
3642
             appeared to be a memory address.  This is a kludge.  */
3643
          if (split_code == MULT
3644
              && CONST_INT_P (XEXP (*split, 1))
3645
              && INTVAL (XEXP (*split, 1)) > 0
3646
              && (i = exact_log2 (UINTVAL (XEXP (*split, 1)))) >= 0)
3647
            {
3648
              SUBST (*split, gen_rtx_ASHIFT (split_mode,
3649
                                             XEXP (*split, 0), GEN_INT (i)));
3650
              /* Update split_code because we may not have a multiply
3651
                 anymore.  */
3652
              split_code = GET_CODE (*split);
3653
            }
3654
 
3655
#ifdef INSN_SCHEDULING
3656
          /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
3657
             be written as a ZERO_EXTEND.  */
3658
          if (split_code == SUBREG && MEM_P (SUBREG_REG (*split)))
3659
            {
3660
#ifdef LOAD_EXTEND_OP
3661
              /* Or as a SIGN_EXTEND if LOAD_EXTEND_OP says that that's
3662
                 what it really is.  */
3663
              if (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (*split)))
3664
                  == SIGN_EXTEND)
3665
                SUBST (*split, gen_rtx_SIGN_EXTEND (split_mode,
3666
                                                    SUBREG_REG (*split)));
3667
              else
3668
#endif
3669
                SUBST (*split, gen_rtx_ZERO_EXTEND (split_mode,
3670
                                                    SUBREG_REG (*split)));
3671
            }
3672
#endif
3673
 
3674
          /* Attempt to split binary operators using arithmetic identities.  */
3675
          if (BINARY_P (SET_SRC (newpat))
3676
              && split_mode == GET_MODE (SET_SRC (newpat))
3677
              && ! side_effects_p (SET_SRC (newpat)))
3678
            {
3679
              rtx setsrc = SET_SRC (newpat);
3680
              enum machine_mode mode = GET_MODE (setsrc);
3681
              enum rtx_code code = GET_CODE (setsrc);
3682
              rtx src_op0 = XEXP (setsrc, 0);
3683
              rtx src_op1 = XEXP (setsrc, 1);
3684
 
3685
              /* Split "X = Y op Y" as "Z = Y; X = Z op Z".  */
3686
              if (rtx_equal_p (src_op0, src_op1))
3687
                {
3688
                  newi2pat = gen_rtx_SET (VOIDmode, newdest, src_op0);
3689
                  SUBST (XEXP (setsrc, 0), newdest);
3690
                  SUBST (XEXP (setsrc, 1), newdest);
3691
                  subst_done = true;
3692
                }
3693
              /* Split "((P op Q) op R) op S" where op is PLUS or MULT.  */
3694
              else if ((code == PLUS || code == MULT)
3695
                       && GET_CODE (src_op0) == code
3696
                       && GET_CODE (XEXP (src_op0, 0)) == code
3697
                       && (INTEGRAL_MODE_P (mode)
3698
                           || (FLOAT_MODE_P (mode)
3699
                               && flag_unsafe_math_optimizations)))
3700
                {
3701
                  rtx p = XEXP (XEXP (src_op0, 0), 0);
3702
                  rtx q = XEXP (XEXP (src_op0, 0), 1);
3703
                  rtx r = XEXP (src_op0, 1);
3704
                  rtx s = src_op1;
3705
 
3706
                  /* Split both "((X op Y) op X) op Y" and
3707
                     "((X op Y) op Y) op X" as "T op T" where T is
3708
                     "X op Y".  */
3709
                  if ((rtx_equal_p (p,r) && rtx_equal_p (q,s))
3710
                       || (rtx_equal_p (p,s) && rtx_equal_p (q,r)))
3711
                    {
3712
                      newi2pat = gen_rtx_SET (VOIDmode, newdest,
3713
                                              XEXP (src_op0, 0));
3714
                      SUBST (XEXP (setsrc, 0), newdest);
3715
                      SUBST (XEXP (setsrc, 1), newdest);
3716
                      subst_done = true;
3717
                    }
3718
                  /* Split "((X op X) op Y) op Y)" as "T op T" where
3719
                     T is "X op Y".  */
3720
                  else if (rtx_equal_p (p,q) && rtx_equal_p (r,s))
3721
                    {
3722
                      rtx tmp = simplify_gen_binary (code, mode, p, r);
3723
                      newi2pat = gen_rtx_SET (VOIDmode, newdest, tmp);
3724
                      SUBST (XEXP (setsrc, 0), newdest);
3725
                      SUBST (XEXP (setsrc, 1), newdest);
3726
                      subst_done = true;
3727
                    }
3728
                }
3729
            }
3730
 
3731
          if (!subst_done)
3732
            {
3733
              newi2pat = gen_rtx_SET (VOIDmode, newdest, *split);
3734
              SUBST (*split, newdest);
3735
            }
3736
 
3737
          i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
3738
 
3739
          /* recog_for_combine might have added CLOBBERs to newi2pat.
3740
             Make sure NEWPAT does not depend on the clobbered regs.  */
3741
          if (GET_CODE (newi2pat) == PARALLEL)
3742
            for (i = XVECLEN (newi2pat, 0) - 1; i >= 0; i--)
3743
              if (GET_CODE (XVECEXP (newi2pat, 0, i)) == CLOBBER)
3744
                {
3745
                  rtx reg = XEXP (XVECEXP (newi2pat, 0, i), 0);
3746
                  if (reg_overlap_mentioned_p (reg, newpat))
3747
                    {
3748
                      undo_all ();
3749
                      return 0;
3750
                    }
3751
                }
3752
 
3753
          /* If the split point was a MULT and we didn't have one before,
3754
             don't use one now.  */
3755
          if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
3756
            insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3757
        }
3758
    }
3759
 
3760
  /* Check for a case where we loaded from memory in a narrow mode and
3761
     then sign extended it, but we need both registers.  In that case,
3762
     we have a PARALLEL with both loads from the same memory location.
3763
     We can split this into a load from memory followed by a register-register
3764
     copy.  This saves at least one insn, more if register allocation can
3765
     eliminate the copy.
3766
 
3767
     We cannot do this if the destination of the first assignment is a
3768
     condition code register or cc0.  We eliminate this case by making sure
3769
     the SET_DEST and SET_SRC have the same mode.
3770
 
3771
     We cannot do this if the destination of the second assignment is
3772
     a register that we have already assumed is zero-extended.  Similarly
3773
     for a SUBREG of such a register.  */
3774
 
3775
  else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
3776
           && GET_CODE (newpat) == PARALLEL
3777
           && XVECLEN (newpat, 0) == 2
3778
           && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
3779
           && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
3780
           && (GET_MODE (SET_DEST (XVECEXP (newpat, 0, 0)))
3781
               == GET_MODE (SET_SRC (XVECEXP (newpat, 0, 0))))
3782
           && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
3783
           && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
3784
                           XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
3785
           && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
3786
                                   DF_INSN_LUID (i2))
3787
           && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
3788
           && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
3789
           && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
3790
                 (REG_P (temp)
3791
                  && VEC_index (reg_stat_type, reg_stat,
3792
                                REGNO (temp))->nonzero_bits != 0
3793
                  && GET_MODE_PRECISION (GET_MODE (temp)) < BITS_PER_WORD
3794
                  && GET_MODE_PRECISION (GET_MODE (temp)) < HOST_BITS_PER_INT
3795
                  && (VEC_index (reg_stat_type, reg_stat,
3796
                                 REGNO (temp))->nonzero_bits
3797
                      != GET_MODE_MASK (word_mode))))
3798
           && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
3799
                 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
3800
                     (REG_P (temp)
3801
                      && VEC_index (reg_stat_type, reg_stat,
3802
                                    REGNO (temp))->nonzero_bits != 0
3803
                      && GET_MODE_PRECISION (GET_MODE (temp)) < BITS_PER_WORD
3804
                      && GET_MODE_PRECISION (GET_MODE (temp)) < HOST_BITS_PER_INT
3805
                      && (VEC_index (reg_stat_type, reg_stat,
3806
                                     REGNO (temp))->nonzero_bits
3807
                          != GET_MODE_MASK (word_mode)))))
3808
           && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
3809
                                         SET_SRC (XVECEXP (newpat, 0, 1)))
3810
           && ! find_reg_note (i3, REG_UNUSED,
3811
                               SET_DEST (XVECEXP (newpat, 0, 0))))
3812
    {
3813
      rtx ni2dest;
3814
 
3815
      newi2pat = XVECEXP (newpat, 0, 0);
3816
      ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
3817
      newpat = XVECEXP (newpat, 0, 1);
3818
      SUBST (SET_SRC (newpat),
3819
             gen_lowpart (GET_MODE (SET_SRC (newpat)), ni2dest));
3820
      i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
3821
 
3822
      if (i2_code_number >= 0)
3823
        insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3824
 
3825
      if (insn_code_number >= 0)
3826
        swap_i2i3 = 1;
3827
    }
3828
 
3829
  /* Similarly, check for a case where we have a PARALLEL of two independent
3830
     SETs but we started with three insns.  In this case, we can do the sets
3831
     as two separate insns.  This case occurs when some SET allows two
3832
     other insns to combine, but the destination of that SET is still live.  */
3833
 
3834
  else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
3835
           && GET_CODE (newpat) == PARALLEL
3836
           && XVECLEN (newpat, 0) == 2
3837
           && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
3838
           && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
3839
           && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
3840
           && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
3841
           && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
3842
           && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
3843
           && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
3844
                                  XVECEXP (newpat, 0, 0))
3845
           && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
3846
                                  XVECEXP (newpat, 0, 1))
3847
           && ! (contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 0)))
3848
                 && contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 1)))))
3849
    {
3850
      /* Normally, it doesn't matter which of the two is done first,
3851
         but the one that references cc0 can't be the second, and
3852
         one which uses any regs/memory set in between i2 and i3 can't
3853
         be first.  */
3854
      if (!use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
3855
                              DF_INSN_LUID (i2))
3856
#ifdef HAVE_cc0
3857
          && !reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0))
3858
#endif
3859
         )
3860
        {
3861
          newi2pat = XVECEXP (newpat, 0, 1);
3862
          newpat = XVECEXP (newpat, 0, 0);
3863
        }
3864
      else if (!use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 0)),
3865
                                   DF_INSN_LUID (i2))
3866
#ifdef HAVE_cc0
3867
               && !reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 1))
3868
#endif
3869
              )
3870
        {
3871
          newi2pat = XVECEXP (newpat, 0, 0);
3872
          newpat = XVECEXP (newpat, 0, 1);
3873
        }
3874
      else
3875
        {
3876
          undo_all ();
3877
          return 0;
3878
        }
3879
 
3880
      i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
3881
 
3882
      if (i2_code_number >= 0)
3883
        {
3884
          /* recog_for_combine might have added CLOBBERs to newi2pat.
3885
             Make sure NEWPAT does not depend on the clobbered regs.  */
3886
          if (GET_CODE (newi2pat) == PARALLEL)
3887
            {
3888
              for (i = XVECLEN (newi2pat, 0) - 1; i >= 0; i--)
3889
                if (GET_CODE (XVECEXP (newi2pat, 0, i)) == CLOBBER)
3890
                  {
3891
                    rtx reg = XEXP (XVECEXP (newi2pat, 0, i), 0);
3892
                    if (reg_overlap_mentioned_p (reg, newpat))
3893
                      {
3894
                        undo_all ();
3895
                        return 0;
3896
                      }
3897
                  }
3898
            }
3899
 
3900
          insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3901
        }
3902
    }
3903
 
3904
  /* If it still isn't recognized, fail and change things back the way they
3905
     were.  */
3906
  if ((insn_code_number < 0
3907
       /* Is the result a reasonable ASM_OPERANDS?  */
3908
       && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
3909
    {
3910
      undo_all ();
3911
      return 0;
3912
    }
3913
 
3914
  /* If we had to change another insn, make sure it is valid also.  */
3915
  if (undobuf.other_insn)
3916
    {
3917
      CLEAR_HARD_REG_SET (newpat_used_regs);
3918
 
3919
      other_pat = PATTERN (undobuf.other_insn);
3920
      other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
3921
                                             &new_other_notes);
3922
 
3923
      if (other_code_number < 0 && ! check_asm_operands (other_pat))
3924
        {
3925
          undo_all ();
3926
          return 0;
3927
        }
3928
    }
3929
 
3930
#ifdef HAVE_cc0
3931
  /* If I2 is the CC0 setter and I3 is the CC0 user then check whether
3932
     they are adjacent to each other or not.  */
3933
  {
3934
    rtx p = prev_nonnote_insn (i3);
3935
    if (p && p != i2 && NONJUMP_INSN_P (p) && newi2pat
3936
        && sets_cc0_p (newi2pat))
3937
      {
3938
        undo_all ();
3939
        return 0;
3940
      }
3941
  }
3942
#endif
3943
 
3944
  /* Only allow this combination if insn_rtx_costs reports that the
3945
     replacement instructions are cheaper than the originals.  */
3946
  if (!combine_validate_cost (i0, i1, i2, i3, newpat, newi2pat, other_pat))
3947
    {
3948
      undo_all ();
3949
      return 0;
3950
    }
3951
 
3952
  if (MAY_HAVE_DEBUG_INSNS)
3953
    {
3954
      struct undo *undo;
3955
 
3956
      for (undo = undobuf.undos; undo; undo = undo->next)
3957
        if (undo->kind == UNDO_MODE)
3958
          {
3959
            rtx reg = *undo->where.r;
3960
            enum machine_mode new_mode = GET_MODE (reg);
3961
            enum machine_mode old_mode = undo->old_contents.m;
3962
 
3963
            /* Temporarily revert mode back.  */
3964
            adjust_reg_mode (reg, old_mode);
3965
 
3966
            if (reg == i2dest && i2scratch)
3967
              {
3968
                /* If we used i2dest as a scratch register with a
3969
                   different mode, substitute it for the original
3970
                   i2src while its original mode is temporarily
3971
                   restored, and then clear i2scratch so that we don't
3972
                   do it again later.  */
3973
                propagate_for_debug (i2, last_combined_insn, reg, i2src);
3974
                i2scratch = false;
3975
                /* Put back the new mode.  */
3976
                adjust_reg_mode (reg, new_mode);
3977
              }
3978
            else
3979
              {
3980
                rtx tempreg = gen_raw_REG (old_mode, REGNO (reg));
3981
                rtx first, last;
3982
 
3983
                if (reg == i2dest)
3984
                  {
3985
                    first = i2;
3986
                    last = last_combined_insn;
3987
                  }
3988
                else
3989
                  {
3990
                    first = i3;
3991
                    last = undobuf.other_insn;
3992
                    gcc_assert (last);
3993
                    if (DF_INSN_LUID (last)
3994
                        < DF_INSN_LUID (last_combined_insn))
3995
                      last = last_combined_insn;
3996
                  }
3997
 
3998
                /* We're dealing with a reg that changed mode but not
3999
                   meaning, so we want to turn it into a subreg for
4000
                   the new mode.  However, because of REG sharing and
4001
                   because its mode had already changed, we have to do
4002
                   it in two steps.  First, replace any debug uses of
4003
                   reg, with its original mode temporarily restored,
4004
                   with this copy we have created; then, replace the
4005
                   copy with the SUBREG of the original shared reg,
4006
                   once again changed to the new mode.  */
4007
                propagate_for_debug (first, last, reg, tempreg);
4008
                adjust_reg_mode (reg, new_mode);
4009
                propagate_for_debug (first, last, tempreg,
4010
                                     lowpart_subreg (old_mode, reg, new_mode));
4011
              }
4012
          }
4013
    }
4014
 
4015
  /* If we will be able to accept this, we have made a
4016
     change to the destination of I3.  This requires us to
4017
     do a few adjustments.  */
4018
 
4019
  if (changed_i3_dest)
4020
    {
4021
      PATTERN (i3) = newpat;
4022
      adjust_for_new_dest (i3);
4023
    }
4024
 
4025
  /* We now know that we can do this combination.  Merge the insns and
4026
     update the status of registers and LOG_LINKS.  */
4027
 
4028
  if (undobuf.other_insn)
4029
    {
4030
      rtx note, next;
4031
 
4032
      PATTERN (undobuf.other_insn) = other_pat;
4033
 
4034
      /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
4035
         are still valid.  Then add any non-duplicate notes added by
4036
         recog_for_combine.  */
4037
      for (note = REG_NOTES (undobuf.other_insn); note; note = next)
4038
        {
4039
          next = XEXP (note, 1);
4040
 
4041
          if (REG_NOTE_KIND (note) == REG_UNUSED
4042
              && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
4043
            remove_note (undobuf.other_insn, note);
4044
        }
4045
 
4046
      distribute_notes (new_other_notes, undobuf.other_insn,
4047
                        undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX,
4048
                        NULL_RTX);
4049
    }
4050
 
4051
  if (swap_i2i3)
4052
    {
4053
      rtx insn;
4054
      struct insn_link *link;
4055
      rtx ni2dest;
4056
 
4057
      /* I3 now uses what used to be its destination and which is now
4058
         I2's destination.  This requires us to do a few adjustments.  */
4059
      PATTERN (i3) = newpat;
4060
      adjust_for_new_dest (i3);
4061
 
4062
      /* We need a LOG_LINK from I3 to I2.  But we used to have one,
4063
         so we still will.
4064
 
4065
         However, some later insn might be using I2's dest and have
4066
         a LOG_LINK pointing at I3.  We must remove this link.
4067
         The simplest way to remove the link is to point it at I1,
4068
         which we know will be a NOTE.  */
4069
 
4070
      /* newi2pat is usually a SET here; however, recog_for_combine might
4071
         have added some clobbers.  */
4072
      if (GET_CODE (newi2pat) == PARALLEL)
4073
        ni2dest = SET_DEST (XVECEXP (newi2pat, 0, 0));
4074
      else
4075
        ni2dest = SET_DEST (newi2pat);
4076
 
4077
      for (insn = NEXT_INSN (i3);
4078
           insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR
4079
                    || insn != BB_HEAD (this_basic_block->next_bb));
4080
           insn = NEXT_INSN (insn))
4081
        {
4082
          if (INSN_P (insn) && reg_referenced_p (ni2dest, PATTERN (insn)))
4083
            {
4084
              FOR_EACH_LOG_LINK (link, insn)
4085
                if (link->insn == i3)
4086
                  link->insn = i1;
4087
 
4088
              break;
4089
            }
4090
        }
4091
    }
4092
 
4093
  {
4094
    rtx i3notes, i2notes, i1notes = 0, i0notes = 0;
4095
    struct insn_link *i3links, *i2links, *i1links = 0, *i0links = 0;
4096
    rtx midnotes = 0;
4097
    int from_luid;
4098
    /* Compute which registers we expect to eliminate.  newi2pat may be setting
4099
       either i3dest or i2dest, so we must check it.  Also, i1dest may be the
4100
       same as i3dest, in which case newi2pat may be setting i1dest.  */
4101
    rtx elim_i2 = ((newi2pat && reg_set_p (i2dest, newi2pat))
4102
                   || i2dest_in_i2src || i2dest_in_i1src || i2dest_in_i0src
4103
                   || !i2dest_killed
4104
                   ? 0 : i2dest);
4105
    rtx elim_i1 = (i1 == 0 || i1dest_in_i1src || i1dest_in_i0src
4106
                   || (newi2pat && reg_set_p (i1dest, newi2pat))
4107
                   || !i1dest_killed
4108
                   ? 0 : i1dest);
4109
    rtx elim_i0 = (i0 == 0 || i0dest_in_i0src
4110
                   || (newi2pat && reg_set_p (i0dest, newi2pat))
4111
                   || !i0dest_killed
4112
                   ? 0 : i0dest);
4113
 
4114
    /* Get the old REG_NOTES and LOG_LINKS from all our insns and
4115
       clear them.  */
4116
    i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
4117
    i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
4118
    if (i1)
4119
      i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
4120
    if (i0)
4121
      i0notes = REG_NOTES (i0), i0links = LOG_LINKS (i0);
4122
 
4123
    /* Ensure that we do not have something that should not be shared but
4124
       occurs multiple times in the new insns.  Check this by first
4125
       resetting all the `used' flags and then copying anything is shared.  */
4126
 
4127
    reset_used_flags (i3notes);
4128
    reset_used_flags (i2notes);
4129
    reset_used_flags (i1notes);
4130
    reset_used_flags (i0notes);
4131
    reset_used_flags (newpat);
4132
    reset_used_flags (newi2pat);
4133
    if (undobuf.other_insn)
4134
      reset_used_flags (PATTERN (undobuf.other_insn));
4135
 
4136
    i3notes = copy_rtx_if_shared (i3notes);
4137
    i2notes = copy_rtx_if_shared (i2notes);
4138
    i1notes = copy_rtx_if_shared (i1notes);
4139
    i0notes = copy_rtx_if_shared (i0notes);
4140
    newpat = copy_rtx_if_shared (newpat);
4141
    newi2pat = copy_rtx_if_shared (newi2pat);
4142
    if (undobuf.other_insn)
4143
      reset_used_flags (PATTERN (undobuf.other_insn));
4144
 
4145
    INSN_CODE (i3) = insn_code_number;
4146
    PATTERN (i3) = newpat;
4147
 
4148
    if (CALL_P (i3) && CALL_INSN_FUNCTION_USAGE (i3))
4149
      {
4150
        rtx call_usage = CALL_INSN_FUNCTION_USAGE (i3);
4151
 
4152
        reset_used_flags (call_usage);
4153
        call_usage = copy_rtx (call_usage);
4154
 
4155
        if (substed_i2)
4156
          {
4157
            /* I2SRC must still be meaningful at this point.  Some splitting
4158
               operations can invalidate I2SRC, but those operations do not
4159
               apply to calls.  */
4160
            gcc_assert (i2src);
4161
            replace_rtx (call_usage, i2dest, i2src);
4162
          }
4163
 
4164
        if (substed_i1)
4165
          replace_rtx (call_usage, i1dest, i1src);
4166
        if (substed_i0)
4167
          replace_rtx (call_usage, i0dest, i0src);
4168
 
4169
        CALL_INSN_FUNCTION_USAGE (i3) = call_usage;
4170
      }
4171
 
4172
    if (undobuf.other_insn)
4173
      INSN_CODE (undobuf.other_insn) = other_code_number;
4174
 
4175
    /* We had one special case above where I2 had more than one set and
4176
       we replaced a destination of one of those sets with the destination
4177
       of I3.  In that case, we have to update LOG_LINKS of insns later
4178
       in this basic block.  Note that this (expensive) case is rare.
4179
 
4180
       Also, in this case, we must pretend that all REG_NOTEs for I2
4181
       actually came from I3, so that REG_UNUSED notes from I2 will be
4182
       properly handled.  */
4183
 
4184
    if (i3_subst_into_i2)
4185
      {
4186
        for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
4187
          if ((GET_CODE (XVECEXP (PATTERN (i2), 0, i)) == SET
4188
               || GET_CODE (XVECEXP (PATTERN (i2), 0, i)) == CLOBBER)
4189
              && REG_P (SET_DEST (XVECEXP (PATTERN (i2), 0, i)))
4190
              && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
4191
              && ! find_reg_note (i2, REG_UNUSED,
4192
                                  SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
4193
            for (temp = NEXT_INSN (i2);
4194
                 temp && (this_basic_block->next_bb == EXIT_BLOCK_PTR
4195
                          || BB_HEAD (this_basic_block) != temp);
4196
                 temp = NEXT_INSN (temp))
4197
              if (temp != i3 && INSN_P (temp))
4198
                FOR_EACH_LOG_LINK (link, temp)
4199
                  if (link->insn == i2)
4200
                    link->insn = i3;
4201
 
4202
        if (i3notes)
4203
          {
4204
            rtx link = i3notes;
4205
            while (XEXP (link, 1))
4206
              link = XEXP (link, 1);
4207
            XEXP (link, 1) = i2notes;
4208
          }
4209
        else
4210
          i3notes = i2notes;
4211
        i2notes = 0;
4212
      }
4213
 
4214
    LOG_LINKS (i3) = NULL;
4215
    REG_NOTES (i3) = 0;
4216
    LOG_LINKS (i2) = NULL;
4217
    REG_NOTES (i2) = 0;
4218
 
4219
    if (newi2pat)
4220
      {
4221
        if (MAY_HAVE_DEBUG_INSNS && i2scratch)
4222
          propagate_for_debug (i2, last_combined_insn, i2dest, i2src);
4223
        INSN_CODE (i2) = i2_code_number;
4224
        PATTERN (i2) = newi2pat;
4225
      }
4226
    else
4227
      {
4228
        if (MAY_HAVE_DEBUG_INSNS && i2src)
4229
          propagate_for_debug (i2, last_combined_insn, i2dest, i2src);
4230
        SET_INSN_DELETED (i2);
4231
      }
4232
 
4233
    if (i1)
4234
      {
4235
        LOG_LINKS (i1) = NULL;
4236
        REG_NOTES (i1) = 0;
4237
        if (MAY_HAVE_DEBUG_INSNS)
4238
          propagate_for_debug (i1, last_combined_insn, i1dest, i1src);
4239
        SET_INSN_DELETED (i1);
4240
      }
4241
 
4242
    if (i0)
4243
      {
4244
        LOG_LINKS (i0) = NULL;
4245
        REG_NOTES (i0) = 0;
4246
        if (MAY_HAVE_DEBUG_INSNS)
4247
          propagate_for_debug (i0, last_combined_insn, i0dest, i0src);
4248
        SET_INSN_DELETED (i0);
4249
      }
4250
 
4251
    /* Get death notes for everything that is now used in either I3 or
4252
       I2 and used to die in a previous insn.  If we built two new
4253
       patterns, move from I1 to I2 then I2 to I3 so that we get the
4254
       proper movement on registers that I2 modifies.  */
4255
 
4256
    if (i0)
4257
      from_luid = DF_INSN_LUID (i0);
4258
    else if (i1)
4259
      from_luid = DF_INSN_LUID (i1);
4260
    else
4261
      from_luid = DF_INSN_LUID (i2);
4262
    if (newi2pat)
4263
      move_deaths (newi2pat, NULL_RTX, from_luid, i2, &midnotes);
4264
    move_deaths (newpat, newi2pat, from_luid, i3, &midnotes);
4265
 
4266
    /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3.  */
4267
    if (i3notes)
4268
      distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
4269
                        elim_i2, elim_i1, elim_i0);
4270
    if (i2notes)
4271
      distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
4272
                        elim_i2, elim_i1, elim_i0);
4273
    if (i1notes)
4274
      distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
4275
                        elim_i2, elim_i1, elim_i0);
4276
    if (i0notes)
4277
      distribute_notes (i0notes, i0, i3, newi2pat ? i2 : NULL_RTX,
4278
                        elim_i2, elim_i1, elim_i0);
4279
    if (midnotes)
4280
      distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
4281
                        elim_i2, elim_i1, elim_i0);
4282
 
4283
    /* Distribute any notes added to I2 or I3 by recog_for_combine.  We
4284
       know these are REG_UNUSED and want them to go to the desired insn,
4285
       so we always pass it as i3.  */
4286
 
4287
    if (newi2pat && new_i2_notes)
4288
      distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX,
4289
                        NULL_RTX);
4290
 
4291
    if (new_i3_notes)
4292
      distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX,
4293
                        NULL_RTX);
4294
 
4295
    /* If I3DEST was used in I3SRC, it really died in I3.  We may need to
4296
       put a REG_DEAD note for it somewhere.  If NEWI2PAT exists and sets
4297
       I3DEST, the death must be somewhere before I2, not I3.  If we passed I3
4298
       in that case, it might delete I2.  Similarly for I2 and I1.
4299
       Show an additional death due to the REG_DEAD note we make here.  If
4300
       we discard it in distribute_notes, we will decrement it again.  */
4301
 
4302
    if (i3dest_killed)
4303
      {
4304
        if (newi2pat && reg_set_p (i3dest_killed, newi2pat))
4305
          distribute_notes (alloc_reg_note (REG_DEAD, i3dest_killed,
4306
                                            NULL_RTX),
4307
                            NULL_RTX, i2, NULL_RTX, elim_i2, elim_i1, elim_i0);
4308
        else
4309
          distribute_notes (alloc_reg_note (REG_DEAD, i3dest_killed,
4310
                                            NULL_RTX),
4311
                            NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
4312
                            elim_i2, elim_i1, elim_i0);
4313
      }
4314
 
4315
    if (i2dest_in_i2src)
4316
      {
4317
        rtx new_note = alloc_reg_note (REG_DEAD, i2dest, NULL_RTX);
4318
        if (newi2pat && reg_set_p (i2dest, newi2pat))
4319
          distribute_notes (new_note,  NULL_RTX, i2, NULL_RTX, NULL_RTX,
4320
                            NULL_RTX, NULL_RTX);
4321
        else
4322
          distribute_notes (new_note, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
4323
                            NULL_RTX, NULL_RTX, NULL_RTX);
4324
      }
4325
 
4326
    if (i1dest_in_i1src)
4327
      {
4328
        rtx new_note = alloc_reg_note (REG_DEAD, i1dest, NULL_RTX);
4329
        if (newi2pat && reg_set_p (i1dest, newi2pat))
4330
          distribute_notes (new_note, NULL_RTX, i2, NULL_RTX, NULL_RTX,
4331
                            NULL_RTX, NULL_RTX);
4332
        else
4333
          distribute_notes (new_note, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
4334
                            NULL_RTX, NULL_RTX, NULL_RTX);
4335
      }
4336
 
4337
    if (i0dest_in_i0src)
4338
      {
4339
        rtx new_note = alloc_reg_note (REG_DEAD, i0dest, NULL_RTX);
4340
        if (newi2pat && reg_set_p (i0dest, newi2pat))
4341
          distribute_notes (new_note, NULL_RTX, i2, NULL_RTX, NULL_RTX,
4342
                            NULL_RTX, NULL_RTX);
4343
        else
4344
          distribute_notes (new_note, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
4345
                            NULL_RTX, NULL_RTX, NULL_RTX);
4346
      }
4347
 
4348
    distribute_links (i3links);
4349
    distribute_links (i2links);
4350
    distribute_links (i1links);
4351
    distribute_links (i0links);
4352
 
4353
    if (REG_P (i2dest))
4354
      {
4355
        struct insn_link *link;
4356
        rtx i2_insn = 0, i2_val = 0, set;
4357
 
4358
        /* The insn that used to set this register doesn't exist, and
4359
           this life of the register may not exist either.  See if one of
4360
           I3's links points to an insn that sets I2DEST.  If it does,
4361
           that is now the last known value for I2DEST. If we don't update
4362
           this and I2 set the register to a value that depended on its old
4363
           contents, we will get confused.  If this insn is used, thing
4364
           will be set correctly in combine_instructions.  */
4365
        FOR_EACH_LOG_LINK (link, i3)
4366
          if ((set = single_set (link->insn)) != 0
4367
              && rtx_equal_p (i2dest, SET_DEST (set)))
4368
            i2_insn = link->insn, i2_val = SET_SRC (set);
4369
 
4370
        record_value_for_reg (i2dest, i2_insn, i2_val);
4371
 
4372
        /* If the reg formerly set in I2 died only once and that was in I3,
4373
           zero its use count so it won't make `reload' do any work.  */
4374
        if (! added_sets_2
4375
            && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat))
4376
            && ! i2dest_in_i2src)
4377
          INC_REG_N_SETS (REGNO (i2dest), -1);
4378
      }
4379
 
4380
    if (i1 && REG_P (i1dest))
4381
      {
4382
        struct insn_link *link;
4383
        rtx i1_insn = 0, i1_val = 0, set;
4384
 
4385
        FOR_EACH_LOG_LINK (link, i3)
4386
          if ((set = single_set (link->insn)) != 0
4387
              && rtx_equal_p (i1dest, SET_DEST (set)))
4388
            i1_insn = link->insn, i1_val = SET_SRC (set);
4389
 
4390
        record_value_for_reg (i1dest, i1_insn, i1_val);
4391
 
4392
        if (! added_sets_1 && ! i1dest_in_i1src)
4393
          INC_REG_N_SETS (REGNO (i1dest), -1);
4394
      }
4395
 
4396
    if (i0 && REG_P (i0dest))
4397
      {
4398
        struct insn_link *link;
4399
        rtx i0_insn = 0, i0_val = 0, set;
4400
 
4401
        FOR_EACH_LOG_LINK (link, i3)
4402
          if ((set = single_set (link->insn)) != 0
4403
              && rtx_equal_p (i0dest, SET_DEST (set)))
4404
            i0_insn = link->insn, i0_val = SET_SRC (set);
4405
 
4406
        record_value_for_reg (i0dest, i0_insn, i0_val);
4407
 
4408
        if (! added_sets_0 && ! i0dest_in_i0src)
4409
          INC_REG_N_SETS (REGNO (i0dest), -1);
4410
      }
4411
 
4412
    /* Update reg_stat[].nonzero_bits et al for any changes that may have
4413
       been made to this insn.  The order of
4414
       set_nonzero_bits_and_sign_copies() is important.  Because newi2pat
4415
       can affect nonzero_bits of newpat */
4416
    if (newi2pat)
4417
      note_stores (newi2pat, set_nonzero_bits_and_sign_copies, NULL);
4418
    note_stores (newpat, set_nonzero_bits_and_sign_copies, NULL);
4419
  }
4420
 
4421
  if (undobuf.other_insn != NULL_RTX)
4422
    {
4423
      if (dump_file)
4424
        {
4425
          fprintf (dump_file, "modifying other_insn ");
4426
          dump_insn_slim (dump_file, undobuf.other_insn);
4427
        }
4428
      df_insn_rescan (undobuf.other_insn);
4429
    }
4430
 
4431
  if (i0 && !(NOTE_P(i0) && (NOTE_KIND (i0) == NOTE_INSN_DELETED)))
4432
    {
4433
      if (dump_file)
4434
        {
4435
          fprintf (dump_file, "modifying insn i1 ");
4436
          dump_insn_slim (dump_file, i0);
4437
        }
4438
      df_insn_rescan (i0);
4439
    }
4440
 
4441
  if (i1 && !(NOTE_P(i1) && (NOTE_KIND (i1) == NOTE_INSN_DELETED)))
4442
    {
4443
      if (dump_file)
4444
        {
4445
          fprintf (dump_file, "modifying insn i1 ");
4446
          dump_insn_slim (dump_file, i1);
4447
        }
4448
      df_insn_rescan (i1);
4449
    }
4450
 
4451
  if (i2 && !(NOTE_P(i2) && (NOTE_KIND (i2) == NOTE_INSN_DELETED)))
4452
    {
4453
      if (dump_file)
4454
        {
4455
          fprintf (dump_file, "modifying insn i2 ");
4456
          dump_insn_slim (dump_file, i2);
4457
        }
4458
      df_insn_rescan (i2);
4459
    }
4460
 
4461
  if (i3 && !(NOTE_P(i3) && (NOTE_KIND (i3) == NOTE_INSN_DELETED)))
4462
    {
4463
      if (dump_file)
4464
        {
4465
          fprintf (dump_file, "modifying insn i3 ");
4466
          dump_insn_slim (dump_file, i3);
4467
        }
4468
      df_insn_rescan (i3);
4469
    }
4470
 
4471
  /* Set new_direct_jump_p if a new return or simple jump instruction
4472
     has been created.  Adjust the CFG accordingly.  */
4473
 
4474
  if (returnjump_p (i3) || any_uncondjump_p (i3))
4475
    {
4476
      *new_direct_jump_p = 1;
4477
      mark_jump_label (PATTERN (i3), i3, 0);
4478
      update_cfg_for_uncondjump (i3);
4479
    }
4480
 
4481
  if (undobuf.other_insn != NULL_RTX
4482
      && (returnjump_p (undobuf.other_insn)
4483
          || any_uncondjump_p (undobuf.other_insn)))
4484
    {
4485
      *new_direct_jump_p = 1;
4486
      update_cfg_for_uncondjump (undobuf.other_insn);
4487
    }
4488
 
4489
  /* A noop might also need cleaning up of CFG, if it comes from the
4490
     simplification of a jump.  */
4491
  if (JUMP_P (i3)
4492
      && GET_CODE (newpat) == SET
4493
      && SET_SRC (newpat) == pc_rtx
4494
      && SET_DEST (newpat) == pc_rtx)
4495
    {
4496
      *new_direct_jump_p = 1;
4497
      update_cfg_for_uncondjump (i3);
4498
    }
4499
 
4500
  if (undobuf.other_insn != NULL_RTX
4501
      && JUMP_P (undobuf.other_insn)
4502
      && GET_CODE (PATTERN (undobuf.other_insn)) == SET
4503
      && SET_SRC (PATTERN (undobuf.other_insn)) == pc_rtx
4504
      && SET_DEST (PATTERN (undobuf.other_insn)) == pc_rtx)
4505
    {
4506
      *new_direct_jump_p = 1;
4507
      update_cfg_for_uncondjump (undobuf.other_insn);
4508
    }
4509
 
4510
  combine_successes++;
4511
  undo_commit ();
4512
 
4513
  if (added_links_insn
4514
      && (newi2pat == 0 || DF_INSN_LUID (added_links_insn) < DF_INSN_LUID (i2))
4515
      && DF_INSN_LUID (added_links_insn) < DF_INSN_LUID (i3))
4516
    return added_links_insn;
4517
  else
4518
    return newi2pat ? i2 : i3;
4519
}
4520
 
4521
/* Undo all the modifications recorded in undobuf.  */
4522
 
4523
static void
4524
undo_all (void)
4525
{
4526
  struct undo *undo, *next;
4527
 
4528
  for (undo = undobuf.undos; undo; undo = next)
4529
    {
4530
      next = undo->next;
4531
      switch (undo->kind)
4532
        {
4533
        case UNDO_RTX:
4534
          *undo->where.r = undo->old_contents.r;
4535
          break;
4536
        case UNDO_INT:
4537
          *undo->where.i = undo->old_contents.i;
4538
          break;
4539
        case UNDO_MODE:
4540
          adjust_reg_mode (*undo->where.r, undo->old_contents.m);
4541
          break;
4542
        case UNDO_LINKS:
4543
          *undo->where.l = undo->old_contents.l;
4544
          break;
4545
        default:
4546
          gcc_unreachable ();
4547
        }
4548
 
4549
      undo->next = undobuf.frees;
4550
      undobuf.frees = undo;
4551
    }
4552
 
4553
  undobuf.undos = 0;
4554
}
4555
 
4556
/* We've committed to accepting the changes we made.  Move all
4557
   of the undos to the free list.  */
4558
 
4559
static void
4560
undo_commit (void)
4561
{
4562
  struct undo *undo, *next;
4563
 
4564
  for (undo = undobuf.undos; undo; undo = next)
4565
    {
4566
      next = undo->next;
4567
      undo->next = undobuf.frees;
4568
      undobuf.frees = undo;
4569
    }
4570
  undobuf.undos = 0;
4571
}
4572
 
4573
/* Find the innermost point within the rtx at LOC, possibly LOC itself,
4574
   where we have an arithmetic expression and return that point.  LOC will
4575
   be inside INSN.
4576
 
4577
   try_combine will call this function to see if an insn can be split into
4578
   two insns.  */
4579
 
4580
static rtx *
4581
find_split_point (rtx *loc, rtx insn, bool set_src)
4582
{
4583
  rtx x = *loc;
4584
  enum rtx_code code = GET_CODE (x);
4585
  rtx *split;
4586
  unsigned HOST_WIDE_INT len = 0;
4587
  HOST_WIDE_INT pos = 0;
4588
  int unsignedp = 0;
4589
  rtx inner = NULL_RTX;
4590
 
4591
  /* First special-case some codes.  */
4592
  switch (code)
4593
    {
4594
    case SUBREG:
4595
#ifdef INSN_SCHEDULING
4596
      /* If we are making a paradoxical SUBREG invalid, it becomes a split
4597
         point.  */
4598
      if (MEM_P (SUBREG_REG (x)))
4599
        return loc;
4600
#endif
4601
      return find_split_point (&SUBREG_REG (x), insn, false);
4602
 
4603
    case MEM:
4604
#ifdef HAVE_lo_sum
4605
      /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
4606
         using LO_SUM and HIGH.  */
4607
      if (GET_CODE (XEXP (x, 0)) == CONST
4608
          || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
4609
        {
4610
          enum machine_mode address_mode
4611
            = targetm.addr_space.address_mode (MEM_ADDR_SPACE (x));
4612
 
4613
          SUBST (XEXP (x, 0),
4614
                 gen_rtx_LO_SUM (address_mode,
4615
                                 gen_rtx_HIGH (address_mode, XEXP (x, 0)),
4616
                                 XEXP (x, 0)));
4617
          return &XEXP (XEXP (x, 0), 0);
4618
        }
4619
#endif
4620
 
4621
      /* If we have a PLUS whose second operand is a constant and the
4622
         address is not valid, perhaps will can split it up using
4623
         the machine-specific way to split large constants.  We use
4624
         the first pseudo-reg (one of the virtual regs) as a placeholder;
4625
         it will not remain in the result.  */
4626
      if (GET_CODE (XEXP (x, 0)) == PLUS
4627
          && CONST_INT_P (XEXP (XEXP (x, 0), 1))
4628
          && ! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
4629
                                            MEM_ADDR_SPACE (x)))
4630
        {
4631
          rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
4632
          rtx seq = combine_split_insns (gen_rtx_SET (VOIDmode, reg,
4633
                                                      XEXP (x, 0)),
4634
                                         subst_insn);
4635
 
4636
          /* This should have produced two insns, each of which sets our
4637
             placeholder.  If the source of the second is a valid address,
4638
             we can make put both sources together and make a split point
4639
             in the middle.  */
4640
 
4641
          if (seq
4642
              && NEXT_INSN (seq) != NULL_RTX
4643
              && NEXT_INSN (NEXT_INSN (seq)) == NULL_RTX
4644
              && NONJUMP_INSN_P (seq)
4645
              && GET_CODE (PATTERN (seq)) == SET
4646
              && SET_DEST (PATTERN (seq)) == reg
4647
              && ! reg_mentioned_p (reg,
4648
                                    SET_SRC (PATTERN (seq)))
4649
              && NONJUMP_INSN_P (NEXT_INSN (seq))
4650
              && GET_CODE (PATTERN (NEXT_INSN (seq))) == SET
4651
              && SET_DEST (PATTERN (NEXT_INSN (seq))) == reg
4652
              && memory_address_addr_space_p
4653
                   (GET_MODE (x), SET_SRC (PATTERN (NEXT_INSN (seq))),
4654
                    MEM_ADDR_SPACE (x)))
4655
            {
4656
              rtx src1 = SET_SRC (PATTERN (seq));
4657
              rtx src2 = SET_SRC (PATTERN (NEXT_INSN (seq)));
4658
 
4659
              /* Replace the placeholder in SRC2 with SRC1.  If we can
4660
                 find where in SRC2 it was placed, that can become our
4661
                 split point and we can replace this address with SRC2.
4662
                 Just try two obvious places.  */
4663
 
4664
              src2 = replace_rtx (src2, reg, src1);
4665
              split = 0;
4666
              if (XEXP (src2, 0) == src1)
4667
                split = &XEXP (src2, 0);
4668
              else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
4669
                       && XEXP (XEXP (src2, 0), 0) == src1)
4670
                split = &XEXP (XEXP (src2, 0), 0);
4671
 
4672
              if (split)
4673
                {
4674
                  SUBST (XEXP (x, 0), src2);
4675
                  return split;
4676
                }
4677
            }
4678
 
4679
          /* If that didn't work, perhaps the first operand is complex and
4680
             needs to be computed separately, so make a split point there.
4681
             This will occur on machines that just support REG + CONST
4682
             and have a constant moved through some previous computation.  */
4683
 
4684
          else if (!OBJECT_P (XEXP (XEXP (x, 0), 0))
4685
                   && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
4686
                         && OBJECT_P (SUBREG_REG (XEXP (XEXP (x, 0), 0)))))
4687
            return &XEXP (XEXP (x, 0), 0);
4688
        }
4689
 
4690
      /* If we have a PLUS whose first operand is complex, try computing it
4691
         separately by making a split there.  */
4692
      if (GET_CODE (XEXP (x, 0)) == PLUS
4693
          && ! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
4694
                                            MEM_ADDR_SPACE (x))
4695
          && ! OBJECT_P (XEXP (XEXP (x, 0), 0))
4696
          && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
4697
                && OBJECT_P (SUBREG_REG (XEXP (XEXP (x, 0), 0)))))
4698
        return &XEXP (XEXP (x, 0), 0);
4699
      break;
4700
 
4701
    case SET:
4702
#ifdef HAVE_cc0
4703
      /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
4704
         ZERO_EXTRACT, the most likely reason why this doesn't match is that
4705
         we need to put the operand into a register.  So split at that
4706
         point.  */
4707
 
4708
      if (SET_DEST (x) == cc0_rtx
4709
          && GET_CODE (SET_SRC (x)) != COMPARE
4710
          && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
4711
          && !OBJECT_P (SET_SRC (x))
4712
          && ! (GET_CODE (SET_SRC (x)) == SUBREG
4713
                && OBJECT_P (SUBREG_REG (SET_SRC (x)))))
4714
        return &SET_SRC (x);
4715
#endif
4716
 
4717
      /* See if we can split SET_SRC as it stands.  */
4718
      split = find_split_point (&SET_SRC (x), insn, true);
4719
      if (split && split != &SET_SRC (x))
4720
        return split;
4721
 
4722
      /* See if we can split SET_DEST as it stands.  */
4723
      split = find_split_point (&SET_DEST (x), insn, false);
4724
      if (split && split != &SET_DEST (x))
4725
        return split;
4726
 
4727
      /* See if this is a bitfield assignment with everything constant.  If
4728
         so, this is an IOR of an AND, so split it into that.  */
4729
      if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
4730
          && HWI_COMPUTABLE_MODE_P (GET_MODE (XEXP (SET_DEST (x), 0)))
4731
          && CONST_INT_P (XEXP (SET_DEST (x), 1))
4732
          && CONST_INT_P (XEXP (SET_DEST (x), 2))
4733
          && CONST_INT_P (SET_SRC (x))
4734
          && ((INTVAL (XEXP (SET_DEST (x), 1))
4735
               + INTVAL (XEXP (SET_DEST (x), 2)))
4736
              <= GET_MODE_PRECISION (GET_MODE (XEXP (SET_DEST (x), 0))))
4737
          && ! side_effects_p (XEXP (SET_DEST (x), 0)))
4738
        {
4739
          HOST_WIDE_INT pos = INTVAL (XEXP (SET_DEST (x), 2));
4740
          unsigned HOST_WIDE_INT len = INTVAL (XEXP (SET_DEST (x), 1));
4741
          unsigned HOST_WIDE_INT src = INTVAL (SET_SRC (x));
4742
          rtx dest = XEXP (SET_DEST (x), 0);
4743
          enum machine_mode mode = GET_MODE (dest);
4744
          unsigned HOST_WIDE_INT mask
4745
            = ((unsigned HOST_WIDE_INT) 1 << len) - 1;
4746
          rtx or_mask;
4747
 
4748
          if (BITS_BIG_ENDIAN)
4749
            pos = GET_MODE_PRECISION (mode) - len - pos;
4750
 
4751
          or_mask = gen_int_mode (src << pos, mode);
4752
          if (src == mask)
4753
            SUBST (SET_SRC (x),
4754
                   simplify_gen_binary (IOR, mode, dest, or_mask));
4755
          else
4756
            {
4757
              rtx negmask = gen_int_mode (~(mask << pos), mode);
4758
              SUBST (SET_SRC (x),
4759
                     simplify_gen_binary (IOR, mode,
4760
                                          simplify_gen_binary (AND, mode,
4761
                                                               dest, negmask),
4762
                                          or_mask));
4763
            }
4764
 
4765
          SUBST (SET_DEST (x), dest);
4766
 
4767
          split = find_split_point (&SET_SRC (x), insn, true);
4768
          if (split && split != &SET_SRC (x))
4769
            return split;
4770
        }
4771
 
4772
      /* Otherwise, see if this is an operation that we can split into two.
4773
         If so, try to split that.  */
4774
      code = GET_CODE (SET_SRC (x));
4775
 
4776
      switch (code)
4777
        {
4778
        case AND:
4779
          /* If we are AND'ing with a large constant that is only a single
4780
             bit and the result is only being used in a context where we
4781
             need to know if it is zero or nonzero, replace it with a bit
4782
             extraction.  This will avoid the large constant, which might
4783
             have taken more than one insn to make.  If the constant were
4784
             not a valid argument to the AND but took only one insn to make,
4785
             this is no worse, but if it took more than one insn, it will
4786
             be better.  */
4787
 
4788
          if (CONST_INT_P (XEXP (SET_SRC (x), 1))
4789
              && REG_P (XEXP (SET_SRC (x), 0))
4790
              && (pos = exact_log2 (UINTVAL (XEXP (SET_SRC (x), 1)))) >= 7
4791
              && REG_P (SET_DEST (x))
4792
              && (split = find_single_use (SET_DEST (x), insn, (rtx*) 0)) != 0
4793
              && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
4794
              && XEXP (*split, 0) == SET_DEST (x)
4795
              && XEXP (*split, 1) == const0_rtx)
4796
            {
4797
              rtx extraction = make_extraction (GET_MODE (SET_DEST (x)),
4798
                                                XEXP (SET_SRC (x), 0),
4799
                                                pos, NULL_RTX, 1, 1, 0, 0);
4800
              if (extraction != 0)
4801
                {
4802
                  SUBST (SET_SRC (x), extraction);
4803
                  return find_split_point (loc, insn, false);
4804
                }
4805
            }
4806
          break;
4807
 
4808
        case NE:
4809
          /* If STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X
4810
             is known to be on, this can be converted into a NEG of a shift.  */
4811
          if (STORE_FLAG_VALUE == -1 && XEXP (SET_SRC (x), 1) == const0_rtx
4812
              && GET_MODE (SET_SRC (x)) == GET_MODE (XEXP (SET_SRC (x), 0))
4813
              && 1 <= (pos = exact_log2
4814
                       (nonzero_bits (XEXP (SET_SRC (x), 0),
4815
                                      GET_MODE (XEXP (SET_SRC (x), 0))))))
4816
            {
4817
              enum machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0));
4818
 
4819
              SUBST (SET_SRC (x),
4820
                     gen_rtx_NEG (mode,
4821
                                  gen_rtx_LSHIFTRT (mode,
4822
                                                    XEXP (SET_SRC (x), 0),
4823
                                                    GEN_INT (pos))));
4824
 
4825
              split = find_split_point (&SET_SRC (x), insn, true);
4826
              if (split && split != &SET_SRC (x))
4827
                return split;
4828
            }
4829
          break;
4830
 
4831
        case SIGN_EXTEND:
4832
          inner = XEXP (SET_SRC (x), 0);
4833
 
4834
          /* We can't optimize if either mode is a partial integer
4835
             mode as we don't know how many bits are significant
4836
             in those modes.  */
4837
          if (GET_MODE_CLASS (GET_MODE (inner)) == MODE_PARTIAL_INT
4838
              || GET_MODE_CLASS (GET_MODE (SET_SRC (x))) == MODE_PARTIAL_INT)
4839
            break;
4840
 
4841
          pos = 0;
4842
          len = GET_MODE_PRECISION (GET_MODE (inner));
4843
          unsignedp = 0;
4844
          break;
4845
 
4846
        case SIGN_EXTRACT:
4847
        case ZERO_EXTRACT:
4848
          if (CONST_INT_P (XEXP (SET_SRC (x), 1))
4849
              && CONST_INT_P (XEXP (SET_SRC (x), 2)))
4850
            {
4851
              inner = XEXP (SET_SRC (x), 0);
4852
              len = INTVAL (XEXP (SET_SRC (x), 1));
4853
              pos = INTVAL (XEXP (SET_SRC (x), 2));
4854
 
4855
              if (BITS_BIG_ENDIAN)
4856
                pos = GET_MODE_PRECISION (GET_MODE (inner)) - len - pos;
4857
              unsignedp = (code == ZERO_EXTRACT);
4858
            }
4859
          break;
4860
 
4861
        default:
4862
          break;
4863
        }
4864
 
4865
      if (len && pos >= 0
4866
          && pos + len <= GET_MODE_PRECISION (GET_MODE (inner)))
4867
        {
4868
          enum machine_mode mode = GET_MODE (SET_SRC (x));
4869
 
4870
          /* For unsigned, we have a choice of a shift followed by an
4871
             AND or two shifts.  Use two shifts for field sizes where the
4872
             constant might be too large.  We assume here that we can
4873
             always at least get 8-bit constants in an AND insn, which is
4874
             true for every current RISC.  */
4875
 
4876
          if (unsignedp && len <= 8)
4877
            {
4878
              SUBST (SET_SRC (x),
4879
                     gen_rtx_AND (mode,
4880
                                  gen_rtx_LSHIFTRT
4881
                                  (mode, gen_lowpart (mode, inner),
4882
                                   GEN_INT (pos)),
4883
                                  GEN_INT (((unsigned HOST_WIDE_INT) 1 << len)
4884
                                           - 1)));
4885
 
4886
              split = find_split_point (&SET_SRC (x), insn, true);
4887
              if (split && split != &SET_SRC (x))
4888
                return split;
4889
            }
4890
          else
4891
            {
4892
              SUBST (SET_SRC (x),
4893
                     gen_rtx_fmt_ee
4894
                     (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
4895
                      gen_rtx_ASHIFT (mode,
4896
                                      gen_lowpart (mode, inner),
4897
                                      GEN_INT (GET_MODE_PRECISION (mode)
4898
                                               - len - pos)),
4899
                      GEN_INT (GET_MODE_PRECISION (mode) - len)));
4900
 
4901
              split = find_split_point (&SET_SRC (x), insn, true);
4902
              if (split && split != &SET_SRC (x))
4903
                return split;
4904
            }
4905
        }
4906
 
4907
      /* See if this is a simple operation with a constant as the second
4908
         operand.  It might be that this constant is out of range and hence
4909
         could be used as a split point.  */
4910
      if (BINARY_P (SET_SRC (x))
4911
          && CONSTANT_P (XEXP (SET_SRC (x), 1))
4912
          && (OBJECT_P (XEXP (SET_SRC (x), 0))
4913
              || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
4914
                  && OBJECT_P (SUBREG_REG (XEXP (SET_SRC (x), 0))))))
4915
        return &XEXP (SET_SRC (x), 1);
4916
 
4917
      /* Finally, see if this is a simple operation with its first operand
4918
         not in a register.  The operation might require this operand in a
4919
         register, so return it as a split point.  We can always do this
4920
         because if the first operand were another operation, we would have
4921
         already found it as a split point.  */
4922
      if ((BINARY_P (SET_SRC (x)) || UNARY_P (SET_SRC (x)))
4923
          && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
4924
        return &XEXP (SET_SRC (x), 0);
4925
 
4926
      return 0;
4927
 
4928
    case AND:
4929
    case IOR:
4930
      /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
4931
         it is better to write this as (not (ior A B)) so we can split it.
4932
         Similarly for IOR.  */
4933
      if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
4934
        {
4935
          SUBST (*loc,
4936
                 gen_rtx_NOT (GET_MODE (x),
4937
                              gen_rtx_fmt_ee (code == IOR ? AND : IOR,
4938
                                              GET_MODE (x),
4939
                                              XEXP (XEXP (x, 0), 0),
4940
                                              XEXP (XEXP (x, 1), 0))));
4941
          return find_split_point (loc, insn, set_src);
4942
        }
4943
 
4944
      /* Many RISC machines have a large set of logical insns.  If the
4945
         second operand is a NOT, put it first so we will try to split the
4946
         other operand first.  */
4947
      if (GET_CODE (XEXP (x, 1)) == NOT)
4948
        {
4949
          rtx tem = XEXP (x, 0);
4950
          SUBST (XEXP (x, 0), XEXP (x, 1));
4951
          SUBST (XEXP (x, 1), tem);
4952
        }
4953
      break;
4954
 
4955
    case PLUS:
4956
    case MINUS:
4957
      /* Canonicalization can produce (minus A (mult B C)), where C is a
4958
         constant.  It may be better to try splitting (plus (mult B -C) A)
4959
         instead if this isn't a multiply by a power of two.  */
4960
      if (set_src && code == MINUS && GET_CODE (XEXP (x, 1)) == MULT
4961
          && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
4962
          && exact_log2 (INTVAL (XEXP (XEXP (x, 1), 1))) < 0)
4963
        {
4964
          enum machine_mode mode = GET_MODE (x);
4965
          unsigned HOST_WIDE_INT this_int = INTVAL (XEXP (XEXP (x, 1), 1));
4966
          HOST_WIDE_INT other_int = trunc_int_for_mode (-this_int, mode);
4967
          SUBST (*loc, gen_rtx_PLUS (mode, gen_rtx_MULT (mode,
4968
                                                         XEXP (XEXP (x, 1), 0),
4969
                                                         GEN_INT (other_int)),
4970
                                     XEXP (x, 0)));
4971
          return find_split_point (loc, insn, set_src);
4972
        }
4973
 
4974
      /* Split at a multiply-accumulate instruction.  However if this is
4975
         the SET_SRC, we likely do not have such an instruction and it's
4976
         worthless to try this split.  */
4977
      if (!set_src && GET_CODE (XEXP (x, 0)) == MULT)
4978
        return loc;
4979
 
4980
    default:
4981
      break;
4982
    }
4983
 
4984
  /* Otherwise, select our actions depending on our rtx class.  */
4985
  switch (GET_RTX_CLASS (code))
4986
    {
4987
    case RTX_BITFIELD_OPS:              /* This is ZERO_EXTRACT and SIGN_EXTRACT.  */
4988
    case RTX_TERNARY:
4989
      split = find_split_point (&XEXP (x, 2), insn, false);
4990
      if (split)
4991
        return split;
4992
      /* ... fall through ...  */
4993
    case RTX_BIN_ARITH:
4994
    case RTX_COMM_ARITH:
4995
    case RTX_COMPARE:
4996
    case RTX_COMM_COMPARE:
4997
      split = find_split_point (&XEXP (x, 1), insn, false);
4998
      if (split)
4999
        return split;
5000
      /* ... fall through ...  */
5001
    case RTX_UNARY:
5002
      /* Some machines have (and (shift ...) ...) insns.  If X is not
5003
         an AND, but XEXP (X, 0) is, use it as our split point.  */
5004
      if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
5005
        return &XEXP (x, 0);
5006
 
5007
      split = find_split_point (&XEXP (x, 0), insn, false);
5008
      if (split)
5009
        return split;
5010
      return loc;
5011
 
5012
    default:
5013
      /* Otherwise, we don't have a split point.  */
5014
      return 0;
5015
    }
5016
}
5017
 
5018
/* Throughout X, replace FROM with TO, and return the result.
5019
   The result is TO if X is FROM;
5020
   otherwise the result is X, but its contents may have been modified.
5021
   If they were modified, a record was made in undobuf so that
5022
   undo_all will (among other things) return X to its original state.
5023
 
5024
   If the number of changes necessary is too much to record to undo,
5025
   the excess changes are not made, so the result is invalid.
5026
   The changes already made can still be undone.
5027
   undobuf.num_undo is incremented for such changes, so by testing that
5028
   the caller can tell whether the result is valid.
5029
 
5030
   `n_occurrences' is incremented each time FROM is replaced.
5031
 
5032
   IN_DEST is nonzero if we are processing the SET_DEST of a SET.
5033
 
5034
   IN_COND is nonzero if we are at the top level of a condition.
5035
 
5036
   UNIQUE_COPY is nonzero if each substitution must be unique.  We do this
5037
   by copying if `n_occurrences' is nonzero.  */
5038
 
5039
static rtx
5040
subst (rtx x, rtx from, rtx to, int in_dest, int in_cond, int unique_copy)
5041
{
5042
  enum rtx_code code = GET_CODE (x);
5043
  enum machine_mode op0_mode = VOIDmode;
5044
  const char *fmt;
5045
  int len, i;
5046
  rtx new_rtx;
5047
 
5048
/* Two expressions are equal if they are identical copies of a shared
5049
   RTX or if they are both registers with the same register number
5050
   and mode.  */
5051
 
5052
#define COMBINE_RTX_EQUAL_P(X,Y)                        \
5053
  ((X) == (Y)                                           \
5054
   || (REG_P (X) && REG_P (Y)   \
5055
       && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
5056
 
5057
  if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
5058
    {
5059
      n_occurrences++;
5060
      return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
5061
    }
5062
 
5063
  /* If X and FROM are the same register but different modes, they
5064
     will not have been seen as equal above.  However, the log links code
5065
     will make a LOG_LINKS entry for that case.  If we do nothing, we
5066
     will try to rerecognize our original insn and, when it succeeds,
5067
     we will delete the feeding insn, which is incorrect.
5068
 
5069
     So force this insn not to match in this (rare) case.  */
5070
  if (! in_dest && code == REG && REG_P (from)
5071
      && reg_overlap_mentioned_p (x, from))
5072
    return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
5073
 
5074
  /* If this is an object, we are done unless it is a MEM or LO_SUM, both
5075
     of which may contain things that can be combined.  */
5076
  if (code != MEM && code != LO_SUM && OBJECT_P (x))
5077
    return x;
5078
 
5079
  /* It is possible to have a subexpression appear twice in the insn.
5080
     Suppose that FROM is a register that appears within TO.
5081
     Then, after that subexpression has been scanned once by `subst',
5082
     the second time it is scanned, TO may be found.  If we were
5083
     to scan TO here, we would find FROM within it and create a
5084
     self-referent rtl structure which is completely wrong.  */
5085
  if (COMBINE_RTX_EQUAL_P (x, to))
5086
    return to;
5087
 
5088
  /* Parallel asm_operands need special attention because all of the
5089
     inputs are shared across the arms.  Furthermore, unsharing the
5090
     rtl results in recognition failures.  Failure to handle this case
5091
     specially can result in circular rtl.
5092
 
5093
     Solve this by doing a normal pass across the first entry of the
5094
     parallel, and only processing the SET_DESTs of the subsequent
5095
     entries.  Ug.  */
5096
 
5097
  if (code == PARALLEL
5098
      && GET_CODE (XVECEXP (x, 0, 0)) == SET
5099
      && GET_CODE (SET_SRC (XVECEXP (x, 0, 0))) == ASM_OPERANDS)
5100
    {
5101
      new_rtx = subst (XVECEXP (x, 0, 0), from, to, 0, 0, unique_copy);
5102
 
5103
      /* If this substitution failed, this whole thing fails.  */
5104
      if (GET_CODE (new_rtx) == CLOBBER
5105
          && XEXP (new_rtx, 0) == const0_rtx)
5106
        return new_rtx;
5107
 
5108
      SUBST (XVECEXP (x, 0, 0), new_rtx);
5109
 
5110
      for (i = XVECLEN (x, 0) - 1; i >= 1; i--)
5111
        {
5112
          rtx dest = SET_DEST (XVECEXP (x, 0, i));
5113
 
5114
          if (!REG_P (dest)
5115
              && GET_CODE (dest) != CC0
5116
              && GET_CODE (dest) != PC)
5117
            {
5118
              new_rtx = subst (dest, from, to, 0, 0, unique_copy);
5119
 
5120
              /* If this substitution failed, this whole thing fails.  */
5121
              if (GET_CODE (new_rtx) == CLOBBER
5122
                  && XEXP (new_rtx, 0) == const0_rtx)
5123
                return new_rtx;
5124
 
5125
              SUBST (SET_DEST (XVECEXP (x, 0, i)), new_rtx);
5126
            }
5127
        }
5128
    }
5129
  else
5130
    {
5131
      len = GET_RTX_LENGTH (code);
5132
      fmt = GET_RTX_FORMAT (code);
5133
 
5134
      /* We don't need to process a SET_DEST that is a register, CC0,
5135
         or PC, so set up to skip this common case.  All other cases
5136
         where we want to suppress replacing something inside a
5137
         SET_SRC are handled via the IN_DEST operand.  */
5138
      if (code == SET
5139
          && (REG_P (SET_DEST (x))
5140
              || GET_CODE (SET_DEST (x)) == CC0
5141
              || GET_CODE (SET_DEST (x)) == PC))
5142
        fmt = "ie";
5143
 
5144
      /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a
5145
         constant.  */
5146
      if (fmt[0] == 'e')
5147
        op0_mode = GET_MODE (XEXP (x, 0));
5148
 
5149
      for (i = 0; i < len; i++)
5150
        {
5151
          if (fmt[i] == 'E')
5152
            {
5153
              int j;
5154
              for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5155
                {
5156
                  if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
5157
                    {
5158
                      new_rtx = (unique_copy && n_occurrences
5159
                             ? copy_rtx (to) : to);
5160
                      n_occurrences++;
5161
                    }
5162
                  else
5163
                    {
5164
                      new_rtx = subst (XVECEXP (x, i, j), from, to, 0, 0,
5165
                                       unique_copy);
5166
 
5167
                      /* If this substitution failed, this whole thing
5168
                         fails.  */
5169
                      if (GET_CODE (new_rtx) == CLOBBER
5170
                          && XEXP (new_rtx, 0) == const0_rtx)
5171
                        return new_rtx;
5172
                    }
5173
 
5174
                  SUBST (XVECEXP (x, i, j), new_rtx);
5175
                }
5176
            }
5177
          else if (fmt[i] == 'e')
5178
            {
5179
              /* If this is a register being set, ignore it.  */
5180
              new_rtx = XEXP (x, i);
5181
              if (in_dest
5182
                  && i == 0
5183
                  && (((code == SUBREG || code == ZERO_EXTRACT)
5184
                       && REG_P (new_rtx))
5185
                      || code == STRICT_LOW_PART))
5186
                ;
5187
 
5188
              else if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
5189
                {
5190
                  /* In general, don't install a subreg involving two
5191
                     modes not tieable.  It can worsen register
5192
                     allocation, and can even make invalid reload
5193
                     insns, since the reg inside may need to be copied
5194
                     from in the outside mode, and that may be invalid
5195
                     if it is an fp reg copied in integer mode.
5196
 
5197
                     We allow two exceptions to this: It is valid if
5198
                     it is inside another SUBREG and the mode of that
5199
                     SUBREG and the mode of the inside of TO is
5200
                     tieable and it is valid if X is a SET that copies
5201
                     FROM to CC0.  */
5202
 
5203
                  if (GET_CODE (to) == SUBREG
5204
                      && ! MODES_TIEABLE_P (GET_MODE (to),
5205
                                            GET_MODE (SUBREG_REG (to)))
5206
                      && ! (code == SUBREG
5207
                            && MODES_TIEABLE_P (GET_MODE (x),
5208
                                                GET_MODE (SUBREG_REG (to))))
5209
#ifdef HAVE_cc0
5210
                      && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
5211
#endif
5212
                      )
5213
                    return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
5214
 
5215
#ifdef CANNOT_CHANGE_MODE_CLASS
5216
                  if (code == SUBREG
5217
                      && REG_P (to)
5218
                      && REGNO (to) < FIRST_PSEUDO_REGISTER
5219
                      && REG_CANNOT_CHANGE_MODE_P (REGNO (to),
5220
                                                   GET_MODE (to),
5221
                                                   GET_MODE (x)))
5222
                    return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
5223
#endif
5224
 
5225
                  new_rtx = (unique_copy && n_occurrences ? copy_rtx (to) : to);
5226
                  n_occurrences++;
5227
                }
5228
              else
5229
                /* If we are in a SET_DEST, suppress most cases unless we
5230
                   have gone inside a MEM, in which case we want to
5231
                   simplify the address.  We assume here that things that
5232
                   are actually part of the destination have their inner
5233
                   parts in the first expression.  This is true for SUBREG,
5234
                   STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
5235
                   things aside from REG and MEM that should appear in a
5236
                   SET_DEST.  */
5237
                new_rtx = subst (XEXP (x, i), from, to,
5238
                             (((in_dest
5239
                                && (code == SUBREG || code == STRICT_LOW_PART
5240
                                    || code == ZERO_EXTRACT))
5241
                               || code == SET)
5242
                              && i == 0),
5243
                                 code == IF_THEN_ELSE && i == 0,
5244
                                 unique_copy);
5245
 
5246
              /* If we found that we will have to reject this combination,
5247
                 indicate that by returning the CLOBBER ourselves, rather than
5248
                 an expression containing it.  This will speed things up as
5249
                 well as prevent accidents where two CLOBBERs are considered
5250
                 to be equal, thus producing an incorrect simplification.  */
5251
 
5252
              if (GET_CODE (new_rtx) == CLOBBER && XEXP (new_rtx, 0) == const0_rtx)
5253
                return new_rtx;
5254
 
5255
              if (GET_CODE (x) == SUBREG
5256
                  && (CONST_INT_P (new_rtx)
5257
                      || GET_CODE (new_rtx) == CONST_DOUBLE))
5258
                {
5259
                  enum machine_mode mode = GET_MODE (x);
5260
 
5261
                  x = simplify_subreg (GET_MODE (x), new_rtx,
5262
                                       GET_MODE (SUBREG_REG (x)),
5263
                                       SUBREG_BYTE (x));
5264
                  if (! x)
5265
                    x = gen_rtx_CLOBBER (mode, const0_rtx);
5266
                }
5267
              else if (CONST_INT_P (new_rtx)
5268
                       && GET_CODE (x) == ZERO_EXTEND)
5269
                {
5270
                  x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
5271
                                                new_rtx, GET_MODE (XEXP (x, 0)));
5272
                  gcc_assert (x);
5273
                }
5274
              else
5275
                SUBST (XEXP (x, i), new_rtx);
5276
            }
5277
        }
5278
    }
5279
 
5280
  /* Check if we are loading something from the constant pool via float
5281
     extension; in this case we would undo compress_float_constant
5282
     optimization and degenerate constant load to an immediate value.  */
5283
  if (GET_CODE (x) == FLOAT_EXTEND
5284
      && MEM_P (XEXP (x, 0))
5285
      && MEM_READONLY_P (XEXP (x, 0)))
5286
    {
5287
      rtx tmp = avoid_constant_pool_reference (x);
5288
      if (x != tmp)
5289
        return x;
5290
    }
5291
 
5292
  /* Try to simplify X.  If the simplification changed the code, it is likely
5293
     that further simplification will help, so loop, but limit the number
5294
     of repetitions that will be performed.  */
5295
 
5296
  for (i = 0; i < 4; i++)
5297
    {
5298
      /* If X is sufficiently simple, don't bother trying to do anything
5299
         with it.  */
5300
      if (code != CONST_INT && code != REG && code != CLOBBER)
5301
        x = combine_simplify_rtx (x, op0_mode, in_dest, in_cond);
5302
 
5303
      if (GET_CODE (x) == code)
5304
        break;
5305
 
5306
      code = GET_CODE (x);
5307
 
5308
      /* We no longer know the original mode of operand 0 since we
5309
         have changed the form of X)  */
5310
      op0_mode = VOIDmode;
5311
    }
5312
 
5313
  return x;
5314
}
5315
 
5316
/* Simplify X, a piece of RTL.  We just operate on the expression at the
5317
   outer level; call `subst' to simplify recursively.  Return the new
5318
   expression.
5319
 
5320
   OP0_MODE is the original mode of XEXP (x, 0).  IN_DEST is nonzero
5321
   if we are inside a SET_DEST.  IN_COND is nonzero if we are at the top level
5322
   of a condition.  */
5323
 
5324
static rtx
5325
combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest,
5326
                      int in_cond)
5327
{
5328
  enum rtx_code code = GET_CODE (x);
5329
  enum machine_mode mode = GET_MODE (x);
5330
  rtx temp;
5331
  int i;
5332
 
5333
  /* If this is a commutative operation, put a constant last and a complex
5334
     expression first.  We don't need to do this for comparisons here.  */
5335
  if (COMMUTATIVE_ARITH_P (x)
5336
      && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
5337
    {
5338
      temp = XEXP (x, 0);
5339
      SUBST (XEXP (x, 0), XEXP (x, 1));
5340
      SUBST (XEXP (x, 1), temp);
5341
    }
5342
 
5343
  /* If this is a simple operation applied to an IF_THEN_ELSE, try
5344
     applying it to the arms of the IF_THEN_ELSE.  This often simplifies
5345
     things.  Check for cases where both arms are testing the same
5346
     condition.
5347
 
5348
     Don't do anything if all operands are very simple.  */
5349
 
5350
  if ((BINARY_P (x)
5351
       && ((!OBJECT_P (XEXP (x, 0))
5352
            && ! (GET_CODE (XEXP (x, 0)) == SUBREG
5353
                  && OBJECT_P (SUBREG_REG (XEXP (x, 0)))))
5354
           || (!OBJECT_P (XEXP (x, 1))
5355
               && ! (GET_CODE (XEXP (x, 1)) == SUBREG
5356
                     && OBJECT_P (SUBREG_REG (XEXP (x, 1)))))))
5357
      || (UNARY_P (x)
5358
          && (!OBJECT_P (XEXP (x, 0))
5359
               && ! (GET_CODE (XEXP (x, 0)) == SUBREG
5360
                     && OBJECT_P (SUBREG_REG (XEXP (x, 0)))))))
5361
    {
5362
      rtx cond, true_rtx, false_rtx;
5363
 
5364
      cond = if_then_else_cond (x, &true_rtx, &false_rtx);
5365
      if (cond != 0
5366
          /* If everything is a comparison, what we have is highly unlikely
5367
             to be simpler, so don't use it.  */
5368
          && ! (COMPARISON_P (x)
5369
                && (COMPARISON_P (true_rtx) || COMPARISON_P (false_rtx))))
5370
        {
5371
          rtx cop1 = const0_rtx;
5372
          enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
5373
 
5374
          if (cond_code == NE && COMPARISON_P (cond))
5375
            return x;
5376
 
5377
          /* Simplify the alternative arms; this may collapse the true and
5378
             false arms to store-flag values.  Be careful to use copy_rtx
5379
             here since true_rtx or false_rtx might share RTL with x as a
5380
             result of the if_then_else_cond call above.  */
5381
          true_rtx = subst (copy_rtx (true_rtx), pc_rtx, pc_rtx, 0, 0, 0);
5382
          false_rtx = subst (copy_rtx (false_rtx), pc_rtx, pc_rtx, 0, 0, 0);
5383
 
5384
          /* If true_rtx and false_rtx are not general_operands, an if_then_else
5385
             is unlikely to be simpler.  */
5386
          if (general_operand (true_rtx, VOIDmode)
5387
              && general_operand (false_rtx, VOIDmode))
5388
            {
5389
              enum rtx_code reversed;
5390
 
5391
              /* Restarting if we generate a store-flag expression will cause
5392
                 us to loop.  Just drop through in this case.  */
5393
 
5394
              /* If the result values are STORE_FLAG_VALUE and zero, we can
5395
                 just make the comparison operation.  */
5396
              if (true_rtx == const_true_rtx && false_rtx == const0_rtx)
5397
                x = simplify_gen_relational (cond_code, mode, VOIDmode,
5398
                                             cond, cop1);
5399
              else if (true_rtx == const0_rtx && false_rtx == const_true_rtx
5400
                       && ((reversed = reversed_comparison_code_parts
5401
                                        (cond_code, cond, cop1, NULL))
5402
                           != UNKNOWN))
5403
                x = simplify_gen_relational (reversed, mode, VOIDmode,
5404
                                             cond, cop1);
5405
 
5406
              /* Likewise, we can make the negate of a comparison operation
5407
                 if the result values are - STORE_FLAG_VALUE and zero.  */
5408
              else if (CONST_INT_P (true_rtx)
5409
                       && INTVAL (true_rtx) == - STORE_FLAG_VALUE
5410
                       && false_rtx == const0_rtx)
5411
                x = simplify_gen_unary (NEG, mode,
5412
                                        simplify_gen_relational (cond_code,
5413
                                                                 mode, VOIDmode,
5414
                                                                 cond, cop1),
5415
                                        mode);
5416
              else if (CONST_INT_P (false_rtx)
5417
                       && INTVAL (false_rtx) == - STORE_FLAG_VALUE
5418
                       && true_rtx == const0_rtx
5419
                       && ((reversed = reversed_comparison_code_parts
5420
                                        (cond_code, cond, cop1, NULL))
5421
                           != UNKNOWN))
5422
                x = simplify_gen_unary (NEG, mode,
5423
                                        simplify_gen_relational (reversed,
5424
                                                                 mode, VOIDmode,
5425
                                                                 cond, cop1),
5426
                                        mode);
5427
              else
5428
                return gen_rtx_IF_THEN_ELSE (mode,
5429
                                             simplify_gen_relational (cond_code,
5430
                                                                      mode,
5431
                                                                      VOIDmode,
5432
                                                                      cond,
5433
                                                                      cop1),
5434
                                             true_rtx, false_rtx);
5435
 
5436
              code = GET_CODE (x);
5437
              op0_mode = VOIDmode;
5438
            }
5439
        }
5440
    }
5441
 
5442
  /* Try to fold this expression in case we have constants that weren't
5443
     present before.  */
5444
  temp = 0;
5445
  switch (GET_RTX_CLASS (code))
5446
    {
5447
    case RTX_UNARY:
5448
      if (op0_mode == VOIDmode)
5449
        op0_mode = GET_MODE (XEXP (x, 0));
5450
      temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
5451
      break;
5452
    case RTX_COMPARE:
5453
    case RTX_COMM_COMPARE:
5454
      {
5455
        enum machine_mode cmp_mode = GET_MODE (XEXP (x, 0));
5456
        if (cmp_mode == VOIDmode)
5457
          {
5458
            cmp_mode = GET_MODE (XEXP (x, 1));
5459
            if (cmp_mode == VOIDmode)
5460
              cmp_mode = op0_mode;
5461
          }
5462
        temp = simplify_relational_operation (code, mode, cmp_mode,
5463
                                              XEXP (x, 0), XEXP (x, 1));
5464
      }
5465
      break;
5466
    case RTX_COMM_ARITH:
5467
    case RTX_BIN_ARITH:
5468
      temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
5469
      break;
5470
    case RTX_BITFIELD_OPS:
5471
    case RTX_TERNARY:
5472
      temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
5473
                                         XEXP (x, 1), XEXP (x, 2));
5474
      break;
5475
    default:
5476
      break;
5477
    }
5478
 
5479
  if (temp)
5480
    {
5481
      x = temp;
5482
      code = GET_CODE (temp);
5483
      op0_mode = VOIDmode;
5484
      mode = GET_MODE (temp);
5485
    }
5486
 
5487
  /* First see if we can apply the inverse distributive law.  */
5488
  if (code == PLUS || code == MINUS
5489
      || code == AND || code == IOR || code == XOR)
5490
    {
5491
      x = apply_distributive_law (x);
5492
      code = GET_CODE (x);
5493
      op0_mode = VOIDmode;
5494
    }
5495
 
5496
  /* If CODE is an associative operation not otherwise handled, see if we
5497
     can associate some operands.  This can win if they are constants or
5498
     if they are logically related (i.e. (a & b) & a).  */
5499
  if ((code == PLUS || code == MINUS || code == MULT || code == DIV
5500
       || code == AND || code == IOR || code == XOR
5501
       || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
5502
      && ((INTEGRAL_MODE_P (mode) && code != DIV)
5503
          || (flag_associative_math && FLOAT_MODE_P (mode))))
5504
    {
5505
      if (GET_CODE (XEXP (x, 0)) == code)
5506
        {
5507
          rtx other = XEXP (XEXP (x, 0), 0);
5508
          rtx inner_op0 = XEXP (XEXP (x, 0), 1);
5509
          rtx inner_op1 = XEXP (x, 1);
5510
          rtx inner;
5511
 
5512
          /* Make sure we pass the constant operand if any as the second
5513
             one if this is a commutative operation.  */
5514
          if (CONSTANT_P (inner_op0) && COMMUTATIVE_ARITH_P (x))
5515
            {
5516
              rtx tem = inner_op0;
5517
              inner_op0 = inner_op1;
5518
              inner_op1 = tem;
5519
            }
5520
          inner = simplify_binary_operation (code == MINUS ? PLUS
5521
                                             : code == DIV ? MULT
5522
                                             : code,
5523
                                             mode, inner_op0, inner_op1);
5524
 
5525
          /* For commutative operations, try the other pair if that one
5526
             didn't simplify.  */
5527
          if (inner == 0 && COMMUTATIVE_ARITH_P (x))
5528
            {
5529
              other = XEXP (XEXP (x, 0), 1);
5530
              inner = simplify_binary_operation (code, mode,
5531
                                                 XEXP (XEXP (x, 0), 0),
5532
                                                 XEXP (x, 1));
5533
            }
5534
 
5535
          if (inner)
5536
            return simplify_gen_binary (code, mode, other, inner);
5537
        }
5538
    }
5539
 
5540
  /* A little bit of algebraic simplification here.  */
5541
  switch (code)
5542
    {
5543
    case MEM:
5544
      /* Ensure that our address has any ASHIFTs converted to MULT in case
5545
         address-recognizing predicates are called later.  */
5546
      temp = make_compound_operation (XEXP (x, 0), MEM);
5547
      SUBST (XEXP (x, 0), temp);
5548
      break;
5549
 
5550
    case SUBREG:
5551
      if (op0_mode == VOIDmode)
5552
        op0_mode = GET_MODE (SUBREG_REG (x));
5553
 
5554
      /* See if this can be moved to simplify_subreg.  */
5555
      if (CONSTANT_P (SUBREG_REG (x))
5556
          && subreg_lowpart_offset (mode, op0_mode) == SUBREG_BYTE (x)
5557
             /* Don't call gen_lowpart if the inner mode
5558
                is VOIDmode and we cannot simplify it, as SUBREG without
5559
                inner mode is invalid.  */
5560
          && (GET_MODE (SUBREG_REG (x)) != VOIDmode
5561
              || gen_lowpart_common (mode, SUBREG_REG (x))))
5562
        return gen_lowpart (mode, SUBREG_REG (x));
5563
 
5564
      if (GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_CC)
5565
        break;
5566
      {
5567
        rtx temp;
5568
        temp = simplify_subreg (mode, SUBREG_REG (x), op0_mode,
5569
                                SUBREG_BYTE (x));
5570
        if (temp)
5571
          return temp;
5572
      }
5573
 
5574
      /* Don't change the mode of the MEM if that would change the meaning
5575
         of the address.  */
5576
      if (MEM_P (SUBREG_REG (x))
5577
          && (MEM_VOLATILE_P (SUBREG_REG (x))
5578
              || mode_dependent_address_p (XEXP (SUBREG_REG (x), 0))))
5579
        return gen_rtx_CLOBBER (mode, const0_rtx);
5580
 
5581
      /* Note that we cannot do any narrowing for non-constants since
5582
         we might have been counting on using the fact that some bits were
5583
         zero.  We now do this in the SET.  */
5584
 
5585
      break;
5586
 
5587
    case NEG:
5588
      temp = expand_compound_operation (XEXP (x, 0));
5589
 
5590
      /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
5591
         replaced by (lshiftrt X C).  This will convert
5592
         (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y).  */
5593
 
5594
      if (GET_CODE (temp) == ASHIFTRT
5595
          && CONST_INT_P (XEXP (temp, 1))
5596
          && INTVAL (XEXP (temp, 1)) == GET_MODE_PRECISION (mode) - 1)
5597
        return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (temp, 0),
5598
                                     INTVAL (XEXP (temp, 1)));
5599
 
5600
      /* If X has only a single bit that might be nonzero, say, bit I, convert
5601
         (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
5602
         MODE minus 1.  This will convert (neg (zero_extract X 1 Y)) to
5603
         (sign_extract X 1 Y).  But only do this if TEMP isn't a register
5604
         or a SUBREG of one since we'd be making the expression more
5605
         complex if it was just a register.  */
5606
 
5607
      if (!REG_P (temp)
5608
          && ! (GET_CODE (temp) == SUBREG
5609
                && REG_P (SUBREG_REG (temp)))
5610
          && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
5611
        {
5612
          rtx temp1 = simplify_shift_const
5613
            (NULL_RTX, ASHIFTRT, mode,
5614
             simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
5615
                                   GET_MODE_PRECISION (mode) - 1 - i),
5616
             GET_MODE_PRECISION (mode) - 1 - i);
5617
 
5618
          /* If all we did was surround TEMP with the two shifts, we
5619
             haven't improved anything, so don't use it.  Otherwise,
5620
             we are better off with TEMP1.  */
5621
          if (GET_CODE (temp1) != ASHIFTRT
5622
              || GET_CODE (XEXP (temp1, 0)) != ASHIFT
5623
              || XEXP (XEXP (temp1, 0), 0) != temp)
5624
            return temp1;
5625
        }
5626
      break;
5627
 
5628
    case TRUNCATE:
5629
      /* We can't handle truncation to a partial integer mode here
5630
         because we don't know the real bitsize of the partial
5631
         integer mode.  */
5632
      if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
5633
        break;
5634
 
5635
      if (HWI_COMPUTABLE_MODE_P (mode))
5636
        SUBST (XEXP (x, 0),
5637
               force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
5638
                              GET_MODE_MASK (mode), 0));
5639
 
5640
      /* We can truncate a constant value and return it.  */
5641
      if (CONST_INT_P (XEXP (x, 0)))
5642
        return gen_int_mode (INTVAL (XEXP (x, 0)), mode);
5643
 
5644
      /* Similarly to what we do in simplify-rtx.c, a truncate of a register
5645
         whose value is a comparison can be replaced with a subreg if
5646
         STORE_FLAG_VALUE permits.  */
5647
      if (HWI_COMPUTABLE_MODE_P (mode)
5648
          && (STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0
5649
          && (temp = get_last_value (XEXP (x, 0)))
5650
          && COMPARISON_P (temp))
5651
        return gen_lowpart (mode, XEXP (x, 0));
5652
      break;
5653
 
5654
    case CONST:
5655
      /* (const (const X)) can become (const X).  Do it this way rather than
5656
         returning the inner CONST since CONST can be shared with a
5657
         REG_EQUAL note.  */
5658
      if (GET_CODE (XEXP (x, 0)) == CONST)
5659
        SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
5660
      break;
5661
 
5662
#ifdef HAVE_lo_sum
5663
    case LO_SUM:
5664
      /* Convert (lo_sum (high FOO) FOO) to FOO.  This is necessary so we
5665
         can add in an offset.  find_split_point will split this address up
5666
         again if it doesn't match.  */
5667
      if (GET_CODE (XEXP (x, 0)) == HIGH
5668
          && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
5669
        return XEXP (x, 1);
5670
      break;
5671
#endif
5672
 
5673
    case PLUS:
5674
      /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
5675
         when c is (const_int (pow2 + 1) / 2) is a sign extension of a
5676
         bit-field and can be replaced by either a sign_extend or a
5677
         sign_extract.  The `and' may be a zero_extend and the two
5678
         <c>, -<c> constants may be reversed.  */
5679
      if (GET_CODE (XEXP (x, 0)) == XOR
5680
          && CONST_INT_P (XEXP (x, 1))
5681
          && CONST_INT_P (XEXP (XEXP (x, 0), 1))
5682
          && INTVAL (XEXP (x, 1)) == -INTVAL (XEXP (XEXP (x, 0), 1))
5683
          && ((i = exact_log2 (UINTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
5684
              || (i = exact_log2 (UINTVAL (XEXP (x, 1)))) >= 0)
5685
          && HWI_COMPUTABLE_MODE_P (mode)
5686
          && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
5687
               && CONST_INT_P (XEXP (XEXP (XEXP (x, 0), 0), 1))
5688
               && (UINTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
5689
                   == ((unsigned HOST_WIDE_INT) 1 << (i + 1)) - 1))
5690
              || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
5691
                  && (GET_MODE_PRECISION (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
5692
                      == (unsigned int) i + 1))))
5693
        return simplify_shift_const
5694
          (NULL_RTX, ASHIFTRT, mode,
5695
           simplify_shift_const (NULL_RTX, ASHIFT, mode,
5696
                                 XEXP (XEXP (XEXP (x, 0), 0), 0),
5697
                                 GET_MODE_PRECISION (mode) - (i + 1)),
5698
           GET_MODE_PRECISION (mode) - (i + 1));
5699
 
5700
      /* If only the low-order bit of X is possibly nonzero, (plus x -1)
5701
         can become (ashiftrt (ashift (xor x 1) C) C) where C is
5702
         the bitsize of the mode - 1.  This allows simplification of
5703
         "a = (b & 8) == 0;"  */
5704
      if (XEXP (x, 1) == constm1_rtx
5705
          && !REG_P (XEXP (x, 0))
5706
          && ! (GET_CODE (XEXP (x, 0)) == SUBREG
5707
                && REG_P (SUBREG_REG (XEXP (x, 0))))
5708
          && nonzero_bits (XEXP (x, 0), mode) == 1)
5709
        return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
5710
           simplify_shift_const (NULL_RTX, ASHIFT, mode,
5711
                                 gen_rtx_XOR (mode, XEXP (x, 0), const1_rtx),
5712
                                 GET_MODE_PRECISION (mode) - 1),
5713
           GET_MODE_PRECISION (mode) - 1);
5714
 
5715
      /* If we are adding two things that have no bits in common, convert
5716
         the addition into an IOR.  This will often be further simplified,
5717
         for example in cases like ((a & 1) + (a & 2)), which can
5718
         become a & 3.  */
5719
 
5720
      if (HWI_COMPUTABLE_MODE_P (mode)
5721
          && (nonzero_bits (XEXP (x, 0), mode)
5722
              & nonzero_bits (XEXP (x, 1), mode)) == 0)
5723
        {
5724
          /* Try to simplify the expression further.  */
5725
          rtx tor = simplify_gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
5726
          temp = combine_simplify_rtx (tor, VOIDmode, in_dest, 0);
5727
 
5728
          /* If we could, great.  If not, do not go ahead with the IOR
5729
             replacement, since PLUS appears in many special purpose
5730
             address arithmetic instructions.  */
5731
          if (GET_CODE (temp) != CLOBBER
5732
              && (GET_CODE (temp) != IOR
5733
                  || ((XEXP (temp, 0) != XEXP (x, 0)
5734
                       || XEXP (temp, 1) != XEXP (x, 1))
5735
                      && (XEXP (temp, 0) != XEXP (x, 1)
5736
                          || XEXP (temp, 1) != XEXP (x, 0)))))
5737
            return temp;
5738
        }
5739
      break;
5740
 
5741
    case MINUS:
5742
      /* (minus <foo> (and <foo> (const_int -pow2))) becomes
5743
         (and <foo> (const_int pow2-1))  */
5744
      if (GET_CODE (XEXP (x, 1)) == AND
5745
          && CONST_INT_P (XEXP (XEXP (x, 1), 1))
5746
          && exact_log2 (-UINTVAL (XEXP (XEXP (x, 1), 1))) >= 0
5747
          && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
5748
        return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
5749
                                       -INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
5750
      break;
5751
 
5752
    case MULT:
5753
      /* If we have (mult (plus A B) C), apply the distributive law and then
5754
         the inverse distributive law to see if things simplify.  This
5755
         occurs mostly in addresses, often when unrolling loops.  */
5756
 
5757
      if (GET_CODE (XEXP (x, 0)) == PLUS)
5758
        {
5759
          rtx result = distribute_and_simplify_rtx (x, 0);
5760
          if (result)
5761
            return result;
5762
        }
5763
 
5764
      /* Try simplify a*(b/c) as (a*b)/c.  */
5765
      if (FLOAT_MODE_P (mode) && flag_associative_math
5766
          && GET_CODE (XEXP (x, 0)) == DIV)
5767
        {
5768
          rtx tem = simplify_binary_operation (MULT, mode,
5769
                                               XEXP (XEXP (x, 0), 0),
5770
                                               XEXP (x, 1));
5771
          if (tem)
5772
            return simplify_gen_binary (DIV, mode, tem, XEXP (XEXP (x, 0), 1));
5773
        }
5774
      break;
5775
 
5776
    case UDIV:
5777
      /* If this is a divide by a power of two, treat it as a shift if
5778
         its first operand is a shift.  */
5779
      if (CONST_INT_P (XEXP (x, 1))
5780
          && (i = exact_log2 (UINTVAL (XEXP (x, 1)))) >= 0
5781
          && (GET_CODE (XEXP (x, 0)) == ASHIFT
5782
              || GET_CODE (XEXP (x, 0)) == LSHIFTRT
5783
              || GET_CODE (XEXP (x, 0)) == ASHIFTRT
5784
              || GET_CODE (XEXP (x, 0)) == ROTATE
5785
              || GET_CODE (XEXP (x, 0)) == ROTATERT))
5786
        return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
5787
      break;
5788
 
5789
    case EQ:  case NE:
5790
    case GT:  case GTU:  case GE:  case GEU:
5791
    case LT:  case LTU:  case LE:  case LEU:
5792
    case UNEQ:  case LTGT:
5793
    case UNGT:  case UNGE:
5794
    case UNLT:  case UNLE:
5795
    case UNORDERED: case ORDERED:
5796
      /* If the first operand is a condition code, we can't do anything
5797
         with it.  */
5798
      if (GET_CODE (XEXP (x, 0)) == COMPARE
5799
          || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
5800
              && ! CC0_P (XEXP (x, 0))))
5801
        {
5802
          rtx op0 = XEXP (x, 0);
5803
          rtx op1 = XEXP (x, 1);
5804
          enum rtx_code new_code;
5805
 
5806
          if (GET_CODE (op0) == COMPARE)
5807
            op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5808
 
5809
          /* Simplify our comparison, if possible.  */
5810
          new_code = simplify_comparison (code, &op0, &op1);
5811
 
5812
          /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
5813
             if only the low-order bit is possibly nonzero in X (such as when
5814
             X is a ZERO_EXTRACT of one bit).  Similarly, we can convert EQ to
5815
             (xor X 1) or (minus 1 X); we use the former.  Finally, if X is
5816
             known to be either 0 or -1, NE becomes a NEG and EQ becomes
5817
             (plus X 1).
5818
 
5819
             Remove any ZERO_EXTRACT we made when thinking this was a
5820
             comparison.  It may now be simpler to use, e.g., an AND.  If a
5821
             ZERO_EXTRACT is indeed appropriate, it will be placed back by
5822
             the call to make_compound_operation in the SET case.
5823
 
5824
             Don't apply these optimizations if the caller would
5825
             prefer a comparison rather than a value.
5826
             E.g., for the condition in an IF_THEN_ELSE most targets need
5827
             an explicit comparison.  */
5828
 
5829
          if (in_cond)
5830
            ;
5831
 
5832
          else if (STORE_FLAG_VALUE == 1
5833
              && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5834
              && op1 == const0_rtx
5835
              && mode == GET_MODE (op0)
5836
              && nonzero_bits (op0, mode) == 1)
5837
            return gen_lowpart (mode,
5838
                                expand_compound_operation (op0));
5839
 
5840
          else if (STORE_FLAG_VALUE == 1
5841
                   && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5842
                   && op1 == const0_rtx
5843
                   && mode == GET_MODE (op0)
5844
                   && (num_sign_bit_copies (op0, mode)
5845
                       == GET_MODE_PRECISION (mode)))
5846
            {
5847
              op0 = expand_compound_operation (op0);
5848
              return simplify_gen_unary (NEG, mode,
5849
                                         gen_lowpart (mode, op0),
5850
                                         mode);
5851
            }
5852
 
5853
          else if (STORE_FLAG_VALUE == 1
5854
                   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5855
                   && op1 == const0_rtx
5856
                   && mode == GET_MODE (op0)
5857
                   && nonzero_bits (op0, mode) == 1)
5858
            {
5859
              op0 = expand_compound_operation (op0);
5860
              return simplify_gen_binary (XOR, mode,
5861
                                          gen_lowpart (mode, op0),
5862
                                          const1_rtx);
5863
            }
5864
 
5865
          else if (STORE_FLAG_VALUE == 1
5866
                   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5867
                   && op1 == const0_rtx
5868
                   && mode == GET_MODE (op0)
5869
                   && (num_sign_bit_copies (op0, mode)
5870
                       == GET_MODE_PRECISION (mode)))
5871
            {
5872
              op0 = expand_compound_operation (op0);
5873
              return plus_constant (gen_lowpart (mode, op0), 1);
5874
            }
5875
 
5876
          /* If STORE_FLAG_VALUE is -1, we have cases similar to
5877
             those above.  */
5878
          if (in_cond)
5879
            ;
5880
 
5881
          else if (STORE_FLAG_VALUE == -1
5882
              && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5883
              && op1 == const0_rtx
5884
              && (num_sign_bit_copies (op0, mode)
5885
                  == GET_MODE_PRECISION (mode)))
5886
            return gen_lowpart (mode,
5887
                                expand_compound_operation (op0));
5888
 
5889
          else if (STORE_FLAG_VALUE == -1
5890
                   && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5891
                   && op1 == const0_rtx
5892
                   && mode == GET_MODE (op0)
5893
                   && nonzero_bits (op0, mode) == 1)
5894
            {
5895
              op0 = expand_compound_operation (op0);
5896
              return simplify_gen_unary (NEG, mode,
5897
                                         gen_lowpart (mode, op0),
5898
                                         mode);
5899
            }
5900
 
5901
          else if (STORE_FLAG_VALUE == -1
5902
                   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5903
                   && op1 == const0_rtx
5904
                   && mode == GET_MODE (op0)
5905
                   && (num_sign_bit_copies (op0, mode)
5906
                       == GET_MODE_PRECISION (mode)))
5907
            {
5908
              op0 = expand_compound_operation (op0);
5909
              return simplify_gen_unary (NOT, mode,
5910
                                         gen_lowpart (mode, op0),
5911
                                         mode);
5912
            }
5913
 
5914
          /* If X is 0/1, (eq X 0) is X-1.  */
5915
          else if (STORE_FLAG_VALUE == -1
5916
                   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5917
                   && op1 == const0_rtx
5918
                   && mode == GET_MODE (op0)
5919
                   && nonzero_bits (op0, mode) == 1)
5920
            {
5921
              op0 = expand_compound_operation (op0);
5922
              return plus_constant (gen_lowpart (mode, op0), -1);
5923
            }
5924
 
5925
          /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
5926
             one bit that might be nonzero, we can convert (ne x 0) to
5927
             (ashift x c) where C puts the bit in the sign bit.  Remove any
5928
             AND with STORE_FLAG_VALUE when we are done, since we are only
5929
             going to test the sign bit.  */
5930
          if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5931
              && HWI_COMPUTABLE_MODE_P (mode)
5932
              && val_signbit_p (mode, STORE_FLAG_VALUE)
5933
              && op1 == const0_rtx
5934
              && mode == GET_MODE (op0)
5935
              && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
5936
            {
5937
              x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
5938
                                        expand_compound_operation (op0),
5939
                                        GET_MODE_PRECISION (mode) - 1 - i);
5940
              if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
5941
                return XEXP (x, 0);
5942
              else
5943
                return x;
5944
            }
5945
 
5946
          /* If the code changed, return a whole new comparison.  */
5947
          if (new_code != code)
5948
            return gen_rtx_fmt_ee (new_code, mode, op0, op1);
5949
 
5950
          /* Otherwise, keep this operation, but maybe change its operands.
5951
             This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR).  */
5952
          SUBST (XEXP (x, 0), op0);
5953
          SUBST (XEXP (x, 1), op1);
5954
        }
5955
      break;
5956
 
5957
    case IF_THEN_ELSE:
5958
      return simplify_if_then_else (x);
5959
 
5960
    case ZERO_EXTRACT:
5961
    case SIGN_EXTRACT:
5962
    case ZERO_EXTEND:
5963
    case SIGN_EXTEND:
5964
      /* If we are processing SET_DEST, we are done.  */
5965
      if (in_dest)
5966
        return x;
5967
 
5968
      return expand_compound_operation (x);
5969
 
5970
    case SET:
5971
      return simplify_set (x);
5972
 
5973
    case AND:
5974
    case IOR:
5975
      return simplify_logical (x);
5976
 
5977
    case ASHIFT:
5978
    case LSHIFTRT:
5979
    case ASHIFTRT:
5980
    case ROTATE:
5981
    case ROTATERT:
5982
      /* If this is a shift by a constant amount, simplify it.  */
5983
      if (CONST_INT_P (XEXP (x, 1)))
5984
        return simplify_shift_const (x, code, mode, XEXP (x, 0),
5985
                                     INTVAL (XEXP (x, 1)));
5986
 
5987
      else if (SHIFT_COUNT_TRUNCATED && !REG_P (XEXP (x, 1)))
5988
        SUBST (XEXP (x, 1),
5989
               force_to_mode (XEXP (x, 1), GET_MODE (XEXP (x, 1)),
5990
                              ((unsigned HOST_WIDE_INT) 1
5991
                               << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
5992
                              - 1,
5993
                              0));
5994
      break;
5995
 
5996
    default:
5997
      break;
5998
    }
5999
 
6000
  return x;
6001
}
6002
 
6003
/* Simplify X, an IF_THEN_ELSE expression.  Return the new expression.  */
6004
 
6005
static rtx
6006
simplify_if_then_else (rtx x)
6007
{
6008
  enum machine_mode mode = GET_MODE (x);
6009
  rtx cond = XEXP (x, 0);
6010
  rtx true_rtx = XEXP (x, 1);
6011
  rtx false_rtx = XEXP (x, 2);
6012
  enum rtx_code true_code = GET_CODE (cond);
6013
  int comparison_p = COMPARISON_P (cond);
6014
  rtx temp;
6015
  int i;
6016
  enum rtx_code false_code;
6017
  rtx reversed;
6018
 
6019
  /* Simplify storing of the truth value.  */
6020
  if (comparison_p && true_rtx == const_true_rtx && false_rtx == const0_rtx)
6021
    return simplify_gen_relational (true_code, mode, VOIDmode,
6022
                                    XEXP (cond, 0), XEXP (cond, 1));
6023
 
6024
  /* Also when the truth value has to be reversed.  */
6025
  if (comparison_p
6026
      && true_rtx == const0_rtx && false_rtx == const_true_rtx
6027
      && (reversed = reversed_comparison (cond, mode)))
6028
    return reversed;
6029
 
6030
  /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
6031
     in it is being compared against certain values.  Get the true and false
6032
     comparisons and see if that says anything about the value of each arm.  */
6033
 
6034
  if (comparison_p
6035
      && ((false_code = reversed_comparison_code (cond, NULL))
6036
          != UNKNOWN)
6037
      && REG_P (XEXP (cond, 0)))
6038
    {
6039
      HOST_WIDE_INT nzb;
6040
      rtx from = XEXP (cond, 0);
6041
      rtx true_val = XEXP (cond, 1);
6042
      rtx false_val = true_val;
6043
      int swapped = 0;
6044
 
6045
      /* If FALSE_CODE is EQ, swap the codes and arms.  */
6046
 
6047
      if (false_code == EQ)
6048
        {
6049
          swapped = 1, true_code = EQ, false_code = NE;
6050
          temp = true_rtx, true_rtx = false_rtx, false_rtx = temp;
6051
        }
6052
 
6053
      /* If we are comparing against zero and the expression being tested has
6054
         only a single bit that might be nonzero, that is its value when it is
6055
         not equal to zero.  Similarly if it is known to be -1 or 0.  */
6056
 
6057
      if (true_code == EQ && true_val == const0_rtx
6058
          && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
6059
        {
6060
          false_code = EQ;
6061
          false_val = gen_int_mode (nzb, GET_MODE (from));
6062
        }
6063
      else if (true_code == EQ && true_val == const0_rtx
6064
               && (num_sign_bit_copies (from, GET_MODE (from))
6065
                   == GET_MODE_PRECISION (GET_MODE (from))))
6066
        {
6067
          false_code = EQ;
6068
          false_val = constm1_rtx;
6069
        }
6070
 
6071
      /* Now simplify an arm if we know the value of the register in the
6072
         branch and it is used in the arm.  Be careful due to the potential
6073
         of locally-shared RTL.  */
6074
 
6075
      if (reg_mentioned_p (from, true_rtx))
6076
        true_rtx = subst (known_cond (copy_rtx (true_rtx), true_code,
6077
                                      from, true_val),
6078
                          pc_rtx, pc_rtx, 0, 0, 0);
6079
      if (reg_mentioned_p (from, false_rtx))
6080
        false_rtx = subst (known_cond (copy_rtx (false_rtx), false_code,
6081
                                   from, false_val),
6082
                           pc_rtx, pc_rtx, 0, 0, 0);
6083
 
6084
      SUBST (XEXP (x, 1), swapped ? false_rtx : true_rtx);
6085
      SUBST (XEXP (x, 2), swapped ? true_rtx : false_rtx);
6086
 
6087
      true_rtx = XEXP (x, 1);
6088
      false_rtx = XEXP (x, 2);
6089
      true_code = GET_CODE (cond);
6090
    }
6091
 
6092
  /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
6093
     reversed, do so to avoid needing two sets of patterns for
6094
     subtract-and-branch insns.  Similarly if we have a constant in the true
6095
     arm, the false arm is the same as the first operand of the comparison, or
6096
     the false arm is more complicated than the true arm.  */
6097
 
6098
  if (comparison_p
6099
      && reversed_comparison_code (cond, NULL) != UNKNOWN
6100
      && (true_rtx == pc_rtx
6101
          || (CONSTANT_P (true_rtx)
6102
              && !CONST_INT_P (false_rtx) && false_rtx != pc_rtx)
6103
          || true_rtx == const0_rtx
6104
          || (OBJECT_P (true_rtx) && !OBJECT_P (false_rtx))
6105
          || (GET_CODE (true_rtx) == SUBREG && OBJECT_P (SUBREG_REG (true_rtx))
6106
              && !OBJECT_P (false_rtx))
6107
          || reg_mentioned_p (true_rtx, false_rtx)
6108
          || rtx_equal_p (false_rtx, XEXP (cond, 0))))
6109
    {
6110
      true_code = reversed_comparison_code (cond, NULL);
6111
      SUBST (XEXP (x, 0), reversed_comparison (cond, GET_MODE (cond)));
6112
      SUBST (XEXP (x, 1), false_rtx);
6113
      SUBST (XEXP (x, 2), true_rtx);
6114
 
6115
      temp = true_rtx, true_rtx = false_rtx, false_rtx = temp;
6116
      cond = XEXP (x, 0);
6117
 
6118
      /* It is possible that the conditional has been simplified out.  */
6119
      true_code = GET_CODE (cond);
6120
      comparison_p = COMPARISON_P (cond);
6121
    }
6122
 
6123
  /* If the two arms are identical, we don't need the comparison.  */
6124
 
6125
  if (rtx_equal_p (true_rtx, false_rtx) && ! side_effects_p (cond))
6126
    return true_rtx;
6127
 
6128
  /* Convert a == b ? b : a to "a".  */
6129
  if (true_code == EQ && ! side_effects_p (cond)
6130
      && !HONOR_NANS (mode)
6131
      && rtx_equal_p (XEXP (cond, 0), false_rtx)
6132
      && rtx_equal_p (XEXP (cond, 1), true_rtx))
6133
    return false_rtx;
6134
  else if (true_code == NE && ! side_effects_p (cond)
6135
           && !HONOR_NANS (mode)
6136
           && rtx_equal_p (XEXP (cond, 0), true_rtx)
6137
           && rtx_equal_p (XEXP (cond, 1), false_rtx))
6138
    return true_rtx;
6139
 
6140
  /* Look for cases where we have (abs x) or (neg (abs X)).  */
6141
 
6142
  if (GET_MODE_CLASS (mode) == MODE_INT
6143
      && comparison_p
6144
      && XEXP (cond, 1) == const0_rtx
6145
      && GET_CODE (false_rtx) == NEG
6146
      && rtx_equal_p (true_rtx, XEXP (false_rtx, 0))
6147
      && rtx_equal_p (true_rtx, XEXP (cond, 0))
6148
      && ! side_effects_p (true_rtx))
6149
    switch (true_code)
6150
      {
6151
      case GT:
6152
      case GE:
6153
        return simplify_gen_unary (ABS, mode, true_rtx, mode);
6154
      case LT:
6155
      case LE:
6156
        return
6157
          simplify_gen_unary (NEG, mode,
6158
                              simplify_gen_unary (ABS, mode, true_rtx, mode),
6159
                              mode);
6160
      default:
6161
        break;
6162
      }
6163
 
6164
  /* Look for MIN or MAX.  */
6165
 
6166
  if ((! FLOAT_MODE_P (mode) || flag_unsafe_math_optimizations)
6167
      && comparison_p
6168
      && rtx_equal_p (XEXP (cond, 0), true_rtx)
6169
      && rtx_equal_p (XEXP (cond, 1), false_rtx)
6170
      && ! side_effects_p (cond))
6171
    switch (true_code)
6172
      {
6173
      case GE:
6174
      case GT:
6175
        return simplify_gen_binary (SMAX, mode, true_rtx, false_rtx);
6176
      case LE:
6177
      case LT:
6178
        return simplify_gen_binary (SMIN, mode, true_rtx, false_rtx);
6179
      case GEU:
6180
      case GTU:
6181
        return simplify_gen_binary (UMAX, mode, true_rtx, false_rtx);
6182
      case LEU:
6183
      case LTU:
6184
        return simplify_gen_binary (UMIN, mode, true_rtx, false_rtx);
6185
      default:
6186
        break;
6187
      }
6188
 
6189
  /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
6190
     second operand is zero, this can be done as (OP Z (mult COND C2)) where
6191
     C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
6192
     SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
6193
     We can do this kind of thing in some cases when STORE_FLAG_VALUE is
6194
     neither 1 or -1, but it isn't worth checking for.  */
6195
 
6196
  if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
6197
      && comparison_p
6198
      && GET_MODE_CLASS (mode) == MODE_INT
6199
      && ! side_effects_p (x))
6200
    {
6201
      rtx t = make_compound_operation (true_rtx, SET);
6202
      rtx f = make_compound_operation (false_rtx, SET);
6203
      rtx cond_op0 = XEXP (cond, 0);
6204
      rtx cond_op1 = XEXP (cond, 1);
6205
      enum rtx_code op = UNKNOWN, extend_op = UNKNOWN;
6206
      enum machine_mode m = mode;
6207
      rtx z = 0, c1 = NULL_RTX;
6208
 
6209
      if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
6210
           || GET_CODE (t) == IOR || GET_CODE (t) == XOR
6211
           || GET_CODE (t) == ASHIFT
6212
           || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
6213
          && rtx_equal_p (XEXP (t, 0), f))
6214
        c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
6215
 
6216
      /* If an identity-zero op is commutative, check whether there
6217
         would be a match if we swapped the operands.  */
6218
      else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
6219
                || GET_CODE (t) == XOR)
6220
               && rtx_equal_p (XEXP (t, 1), f))
6221
        c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
6222
      else if (GET_CODE (t) == SIGN_EXTEND
6223
               && (GET_CODE (XEXP (t, 0)) == PLUS
6224
                   || GET_CODE (XEXP (t, 0)) == MINUS
6225
                   || GET_CODE (XEXP (t, 0)) == IOR
6226
                   || GET_CODE (XEXP (t, 0)) == XOR
6227
                   || GET_CODE (XEXP (t, 0)) == ASHIFT
6228
                   || GET_CODE (XEXP (t, 0)) == LSHIFTRT
6229
                   || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
6230
               && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
6231
               && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
6232
               && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
6233
               && (num_sign_bit_copies (f, GET_MODE (f))
6234
                   > (unsigned int)
6235
                     (GET_MODE_PRECISION (mode)
6236
                      - GET_MODE_PRECISION (GET_MODE (XEXP (XEXP (t, 0), 0))))))
6237
        {
6238
          c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
6239
          extend_op = SIGN_EXTEND;
6240
          m = GET_MODE (XEXP (t, 0));
6241
        }
6242
      else if (GET_CODE (t) == SIGN_EXTEND
6243
               && (GET_CODE (XEXP (t, 0)) == PLUS
6244
                   || GET_CODE (XEXP (t, 0)) == IOR
6245
                   || GET_CODE (XEXP (t, 0)) == XOR)
6246
               && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
6247
               && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
6248
               && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
6249
               && (num_sign_bit_copies (f, GET_MODE (f))
6250
                   > (unsigned int)
6251
                     (GET_MODE_PRECISION (mode)
6252
                      - GET_MODE_PRECISION (GET_MODE (XEXP (XEXP (t, 0), 1))))))
6253
        {
6254
          c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
6255
          extend_op = SIGN_EXTEND;
6256
          m = GET_MODE (XEXP (t, 0));
6257
        }
6258
      else if (GET_CODE (t) == ZERO_EXTEND
6259
               && (GET_CODE (XEXP (t, 0)) == PLUS
6260
                   || GET_CODE (XEXP (t, 0)) == MINUS
6261
                   || GET_CODE (XEXP (t, 0)) == IOR
6262
                   || GET_CODE (XEXP (t, 0)) == XOR
6263
                   || GET_CODE (XEXP (t, 0)) == ASHIFT
6264
                   || GET_CODE (XEXP (t, 0)) == LSHIFTRT
6265
                   || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
6266
               && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
6267
               && HWI_COMPUTABLE_MODE_P (mode)
6268
               && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
6269
               && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
6270
               && ((nonzero_bits (f, GET_MODE (f))
6271
                    & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
6272
                   == 0))
6273
        {
6274
          c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
6275
          extend_op = ZERO_EXTEND;
6276
          m = GET_MODE (XEXP (t, 0));
6277
        }
6278
      else if (GET_CODE (t) == ZERO_EXTEND
6279
               && (GET_CODE (XEXP (t, 0)) == PLUS
6280
                   || GET_CODE (XEXP (t, 0)) == IOR
6281
                   || GET_CODE (XEXP (t, 0)) == XOR)
6282
               && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
6283
               && HWI_COMPUTABLE_MODE_P (mode)
6284
               && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
6285
               && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
6286
               && ((nonzero_bits (f, GET_MODE (f))
6287
                    & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
6288
                   == 0))
6289
        {
6290
          c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
6291
          extend_op = ZERO_EXTEND;
6292
          m = GET_MODE (XEXP (t, 0));
6293
        }
6294
 
6295
      if (z)
6296
        {
6297
          temp = subst (simplify_gen_relational (true_code, m, VOIDmode,
6298
                                                 cond_op0, cond_op1),
6299
                        pc_rtx, pc_rtx, 0, 0, 0);
6300
          temp = simplify_gen_binary (MULT, m, temp,
6301
                                      simplify_gen_binary (MULT, m, c1,
6302
                                                           const_true_rtx));
6303
          temp = subst (temp, pc_rtx, pc_rtx, 0, 0, 0);
6304
          temp = simplify_gen_binary (op, m, gen_lowpart (m, z), temp);
6305
 
6306
          if (extend_op != UNKNOWN)
6307
            temp = simplify_gen_unary (extend_op, mode, temp, m);
6308
 
6309
          return temp;
6310
        }
6311
    }
6312
 
6313
  /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
6314
     1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
6315
     negation of a single bit, we can convert this operation to a shift.  We
6316
     can actually do this more generally, but it doesn't seem worth it.  */
6317
 
6318
  if (true_code == NE && XEXP (cond, 1) == const0_rtx
6319
      && false_rtx == const0_rtx && CONST_INT_P (true_rtx)
6320
      && ((1 == nonzero_bits (XEXP (cond, 0), mode)
6321
           && (i = exact_log2 (UINTVAL (true_rtx))) >= 0)
6322
          || ((num_sign_bit_copies (XEXP (cond, 0), mode)
6323
               == GET_MODE_PRECISION (mode))
6324
              && (i = exact_log2 (-UINTVAL (true_rtx))) >= 0)))
6325
    return
6326
      simplify_shift_const (NULL_RTX, ASHIFT, mode,
6327
                            gen_lowpart (mode, XEXP (cond, 0)), i);
6328
 
6329
  /* (IF_THEN_ELSE (NE REG 0) (0) (8)) is REG for nonzero_bits (REG) == 8.  */
6330
  if (true_code == NE && XEXP (cond, 1) == const0_rtx
6331
      && false_rtx == const0_rtx && CONST_INT_P (true_rtx)
6332
      && GET_MODE (XEXP (cond, 0)) == mode
6333
      && (UINTVAL (true_rtx) & GET_MODE_MASK (mode))
6334
          == nonzero_bits (XEXP (cond, 0), mode)
6335
      && (i = exact_log2 (UINTVAL (true_rtx) & GET_MODE_MASK (mode))) >= 0)
6336
    return XEXP (cond, 0);
6337
 
6338
  return x;
6339
}
6340
 
6341
/* Simplify X, a SET expression.  Return the new expression.  */
6342
 
6343
static rtx
6344
simplify_set (rtx x)
6345
{
6346
  rtx src = SET_SRC (x);
6347
  rtx dest = SET_DEST (x);
6348
  enum machine_mode mode
6349
    = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
6350
  rtx other_insn;
6351
  rtx *cc_use;
6352
 
6353
  /* (set (pc) (return)) gets written as (return).  */
6354
  if (GET_CODE (dest) == PC && ANY_RETURN_P (src))
6355
    return src;
6356
 
6357
  /* Now that we know for sure which bits of SRC we are using, see if we can
6358
     simplify the expression for the object knowing that we only need the
6359
     low-order bits.  */
6360
 
6361
  if (GET_MODE_CLASS (mode) == MODE_INT && HWI_COMPUTABLE_MODE_P (mode))
6362
    {
6363
      src = force_to_mode (src, mode, ~(unsigned HOST_WIDE_INT) 0, 0);
6364
      SUBST (SET_SRC (x), src);
6365
    }
6366
 
6367
  /* If we are setting CC0 or if the source is a COMPARE, look for the use of
6368
     the comparison result and try to simplify it unless we already have used
6369
     undobuf.other_insn.  */
6370
  if ((GET_MODE_CLASS (mode) == MODE_CC
6371
       || GET_CODE (src) == COMPARE
6372
       || CC0_P (dest))
6373
      && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
6374
      && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
6375
      && COMPARISON_P (*cc_use)
6376
      && rtx_equal_p (XEXP (*cc_use, 0), dest))
6377
    {
6378
      enum rtx_code old_code = GET_CODE (*cc_use);
6379
      enum rtx_code new_code;
6380
      rtx op0, op1, tmp;
6381
      int other_changed = 0;
6382
      rtx inner_compare = NULL_RTX;
6383
      enum machine_mode compare_mode = GET_MODE (dest);
6384
 
6385
      if (GET_CODE (src) == COMPARE)
6386
        {
6387
          op0 = XEXP (src, 0), op1 = XEXP (src, 1);
6388
          if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
6389
            {
6390
              inner_compare = op0;
6391
              op0 = XEXP (inner_compare, 0), op1 = XEXP (inner_compare, 1);
6392
            }
6393
        }
6394
      else
6395
        op0 = src, op1 = CONST0_RTX (GET_MODE (src));
6396
 
6397
      tmp = simplify_relational_operation (old_code, compare_mode, VOIDmode,
6398
                                           op0, op1);
6399
      if (!tmp)
6400
        new_code = old_code;
6401
      else if (!CONSTANT_P (tmp))
6402
        {
6403
          new_code = GET_CODE (tmp);
6404
          op0 = XEXP (tmp, 0);
6405
          op1 = XEXP (tmp, 1);
6406
        }
6407
      else
6408
        {
6409
          rtx pat = PATTERN (other_insn);
6410
          undobuf.other_insn = other_insn;
6411
          SUBST (*cc_use, tmp);
6412
 
6413
          /* Attempt to simplify CC user.  */
6414
          if (GET_CODE (pat) == SET)
6415
            {
6416
              rtx new_rtx = simplify_rtx (SET_SRC (pat));
6417
              if (new_rtx != NULL_RTX)
6418
                SUBST (SET_SRC (pat), new_rtx);
6419
            }
6420
 
6421
          /* Convert X into a no-op move.  */
6422
          SUBST (SET_DEST (x), pc_rtx);
6423
          SUBST (SET_SRC (x), pc_rtx);
6424
          return x;
6425
        }
6426
 
6427
      /* Simplify our comparison, if possible.  */
6428
      new_code = simplify_comparison (new_code, &op0, &op1);
6429
 
6430
#ifdef SELECT_CC_MODE
6431
      /* If this machine has CC modes other than CCmode, check to see if we
6432
         need to use a different CC mode here.  */
6433
      if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
6434
        compare_mode = GET_MODE (op0);
6435
      else if (inner_compare
6436
               && GET_MODE_CLASS (GET_MODE (inner_compare)) == MODE_CC
6437
               && new_code == old_code
6438
               && op0 == XEXP (inner_compare, 0)
6439
               && op1 == XEXP (inner_compare, 1))
6440
        compare_mode = GET_MODE (inner_compare);
6441
      else
6442
        compare_mode = SELECT_CC_MODE (new_code, op0, op1);
6443
 
6444
#ifndef HAVE_cc0
6445
      /* If the mode changed, we have to change SET_DEST, the mode in the
6446
         compare, and the mode in the place SET_DEST is used.  If SET_DEST is
6447
         a hard register, just build new versions with the proper mode.  If it
6448
         is a pseudo, we lose unless it is only time we set the pseudo, in
6449
         which case we can safely change its mode.  */
6450
      if (compare_mode != GET_MODE (dest))
6451
        {
6452
          if (can_change_dest_mode (dest, 0, compare_mode))
6453
            {
6454
              unsigned int regno = REGNO (dest);
6455
              rtx new_dest;
6456
 
6457
              if (regno < FIRST_PSEUDO_REGISTER)
6458
                new_dest = gen_rtx_REG (compare_mode, regno);
6459
              else
6460
                {
6461
                  SUBST_MODE (regno_reg_rtx[regno], compare_mode);
6462
                  new_dest = regno_reg_rtx[regno];
6463
                }
6464
 
6465
              SUBST (SET_DEST (x), new_dest);
6466
              SUBST (XEXP (*cc_use, 0), new_dest);
6467
              other_changed = 1;
6468
 
6469
              dest = new_dest;
6470
            }
6471
        }
6472
#endif  /* cc0 */
6473
#endif  /* SELECT_CC_MODE */
6474
 
6475
      /* If the code changed, we have to build a new comparison in
6476
         undobuf.other_insn.  */
6477
      if (new_code != old_code)
6478
        {
6479
          int other_changed_previously = other_changed;
6480
          unsigned HOST_WIDE_INT mask;
6481
          rtx old_cc_use = *cc_use;
6482
 
6483
          SUBST (*cc_use, gen_rtx_fmt_ee (new_code, GET_MODE (*cc_use),
6484
                                          dest, const0_rtx));
6485
          other_changed = 1;
6486
 
6487
          /* If the only change we made was to change an EQ into an NE or
6488
             vice versa, OP0 has only one bit that might be nonzero, and OP1
6489
             is zero, check if changing the user of the condition code will
6490
             produce a valid insn.  If it won't, we can keep the original code
6491
             in that insn by surrounding our operation with an XOR.  */
6492
 
6493
          if (((old_code == NE && new_code == EQ)
6494
               || (old_code == EQ && new_code == NE))
6495
              && ! other_changed_previously && op1 == const0_rtx
6496
              && HWI_COMPUTABLE_MODE_P (GET_MODE (op0))
6497
              && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
6498
            {
6499
              rtx pat = PATTERN (other_insn), note = 0;
6500
 
6501
              if ((recog_for_combine (&pat, other_insn, &note) < 0
6502
                   && ! check_asm_operands (pat)))
6503
                {
6504
                  *cc_use = old_cc_use;
6505
                  other_changed = 0;
6506
 
6507
                  op0 = simplify_gen_binary (XOR, GET_MODE (op0),
6508
                                             op0, GEN_INT (mask));
6509
                }
6510
            }
6511
        }
6512
 
6513
      if (other_changed)
6514
        undobuf.other_insn = other_insn;
6515
 
6516
      /* Otherwise, if we didn't previously have a COMPARE in the
6517
         correct mode, we need one.  */
6518
      if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode)
6519
        {
6520
          SUBST (SET_SRC (x), gen_rtx_COMPARE (compare_mode, op0, op1));
6521
          src = SET_SRC (x);
6522
        }
6523
      else if (GET_MODE (op0) == compare_mode && op1 == const0_rtx)
6524
        {
6525
          SUBST (SET_SRC (x), op0);
6526
          src = SET_SRC (x);
6527
        }
6528
      /* Otherwise, update the COMPARE if needed.  */
6529
      else if (XEXP (src, 0) != op0 || XEXP (src, 1) != op1)
6530
        {
6531
          SUBST (SET_SRC (x), gen_rtx_COMPARE (compare_mode, op0, op1));
6532
          src = SET_SRC (x);
6533
        }
6534
    }
6535
  else
6536
    {
6537
      /* Get SET_SRC in a form where we have placed back any
6538
         compound expressions.  Then do the checks below.  */
6539
      src = make_compound_operation (src, SET);
6540
      SUBST (SET_SRC (x), src);
6541
    }
6542
 
6543
  /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
6544
     and X being a REG or (subreg (reg)), we may be able to convert this to
6545
     (set (subreg:m2 x) (op)).
6546
 
6547
     We can always do this if M1 is narrower than M2 because that means that
6548
     we only care about the low bits of the result.
6549
 
6550
     However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
6551
     perform a narrower operation than requested since the high-order bits will
6552
     be undefined.  On machine where it is defined, this transformation is safe
6553
     as long as M1 and M2 have the same number of words.  */
6554
 
6555
  if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
6556
      && !OBJECT_P (SUBREG_REG (src))
6557
      && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
6558
           / UNITS_PER_WORD)
6559
          == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
6560
               + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
6561
#ifndef WORD_REGISTER_OPERATIONS
6562
      && (GET_MODE_SIZE (GET_MODE (src))
6563
        < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
6564
#endif
6565
#ifdef CANNOT_CHANGE_MODE_CLASS
6566
      && ! (REG_P (dest) && REGNO (dest) < FIRST_PSEUDO_REGISTER
6567
            && REG_CANNOT_CHANGE_MODE_P (REGNO (dest),
6568
                                         GET_MODE (SUBREG_REG (src)),
6569
                                         GET_MODE (src)))
6570
#endif
6571
      && (REG_P (dest)
6572
          || (GET_CODE (dest) == SUBREG
6573
              && REG_P (SUBREG_REG (dest)))))
6574
    {
6575
      SUBST (SET_DEST (x),
6576
             gen_lowpart (GET_MODE (SUBREG_REG (src)),
6577
                                      dest));
6578
      SUBST (SET_SRC (x), SUBREG_REG (src));
6579
 
6580
      src = SET_SRC (x), dest = SET_DEST (x);
6581
    }
6582
 
6583
#ifdef HAVE_cc0
6584
  /* If we have (set (cc0) (subreg ...)), we try to remove the subreg
6585
     in SRC.  */
6586
  if (dest == cc0_rtx
6587
      && GET_CODE (src) == SUBREG
6588
      && subreg_lowpart_p (src)
6589
      && (GET_MODE_PRECISION (GET_MODE (src))
6590
          < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (src)))))
6591
    {
6592
      rtx inner = SUBREG_REG (src);
6593
      enum machine_mode inner_mode = GET_MODE (inner);
6594
 
6595
      /* Here we make sure that we don't have a sign bit on.  */
6596
      if (val_signbit_known_clear_p (GET_MODE (src),
6597
                                     nonzero_bits (inner, inner_mode)))
6598
        {
6599
          SUBST (SET_SRC (x), inner);
6600
          src = SET_SRC (x);
6601
        }
6602
    }
6603
#endif
6604
 
6605
#ifdef LOAD_EXTEND_OP
6606
  /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
6607
     would require a paradoxical subreg.  Replace the subreg with a
6608
     zero_extend to avoid the reload that would otherwise be required.  */
6609
 
6610
  if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
6611
      && INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (src)))
6612
      && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != UNKNOWN
6613
      && SUBREG_BYTE (src) == 0
6614
      && paradoxical_subreg_p (src)
6615
      && MEM_P (SUBREG_REG (src)))
6616
    {
6617
      SUBST (SET_SRC (x),
6618
             gen_rtx_fmt_e (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
6619
                            GET_MODE (src), SUBREG_REG (src)));
6620
 
6621
      src = SET_SRC (x);
6622
    }
6623
#endif
6624
 
6625
  /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
6626
     are comparing an item known to be 0 or -1 against 0, use a logical
6627
     operation instead. Check for one of the arms being an IOR of the other
6628
     arm with some value.  We compute three terms to be IOR'ed together.  In
6629
     practice, at most two will be nonzero.  Then we do the IOR's.  */
6630
 
6631
  if (GET_CODE (dest) != PC
6632
      && GET_CODE (src) == IF_THEN_ELSE
6633
      && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
6634
      && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
6635
      && XEXP (XEXP (src, 0), 1) == const0_rtx
6636
      && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0))
6637
#ifdef HAVE_conditional_move
6638
      && ! can_conditionally_move_p (GET_MODE (src))
6639
#endif
6640
      && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
6641
                               GET_MODE (XEXP (XEXP (src, 0), 0)))
6642
          == GET_MODE_PRECISION (GET_MODE (XEXP (XEXP (src, 0), 0))))
6643
      && ! side_effects_p (src))
6644
    {
6645
      rtx true_rtx = (GET_CODE (XEXP (src, 0)) == NE
6646
                      ? XEXP (src, 1) : XEXP (src, 2));
6647
      rtx false_rtx = (GET_CODE (XEXP (src, 0)) == NE
6648
                   ? XEXP (src, 2) : XEXP (src, 1));
6649
      rtx term1 = const0_rtx, term2, term3;
6650
 
6651
      if (GET_CODE (true_rtx) == IOR
6652
          && rtx_equal_p (XEXP (true_rtx, 0), false_rtx))
6653
        term1 = false_rtx, true_rtx = XEXP (true_rtx, 1), false_rtx = const0_rtx;
6654
      else if (GET_CODE (true_rtx) == IOR
6655
               && rtx_equal_p (XEXP (true_rtx, 1), false_rtx))
6656
        term1 = false_rtx, true_rtx = XEXP (true_rtx, 0), false_rtx = const0_rtx;
6657
      else if (GET_CODE (false_rtx) == IOR
6658
               && rtx_equal_p (XEXP (false_rtx, 0), true_rtx))
6659
        term1 = true_rtx, false_rtx = XEXP (false_rtx, 1), true_rtx = const0_rtx;
6660
      else if (GET_CODE (false_rtx) == IOR
6661
               && rtx_equal_p (XEXP (false_rtx, 1), true_rtx))
6662
        term1 = true_rtx, false_rtx = XEXP (false_rtx, 0), true_rtx = const0_rtx;
6663
 
6664
      term2 = simplify_gen_binary (AND, GET_MODE (src),
6665
                                   XEXP (XEXP (src, 0), 0), true_rtx);
6666
      term3 = simplify_gen_binary (AND, GET_MODE (src),
6667
                                   simplify_gen_unary (NOT, GET_MODE (src),
6668
                                                       XEXP (XEXP (src, 0), 0),
6669
                                                       GET_MODE (src)),
6670
                                   false_rtx);
6671
 
6672
      SUBST (SET_SRC (x),
6673
             simplify_gen_binary (IOR, GET_MODE (src),
6674
                                  simplify_gen_binary (IOR, GET_MODE (src),
6675
                                                       term1, term2),
6676
                                  term3));
6677
 
6678
      src = SET_SRC (x);
6679
    }
6680
 
6681
  /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
6682
     whole thing fail.  */
6683
  if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
6684
    return src;
6685
  else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
6686
    return dest;
6687
  else
6688
    /* Convert this into a field assignment operation, if possible.  */
6689
    return make_field_assignment (x);
6690
}
6691
 
6692
/* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
6693
   result.  */
6694
 
6695
static rtx
6696
simplify_logical (rtx x)
6697
{
6698
  enum machine_mode mode = GET_MODE (x);
6699
  rtx op0 = XEXP (x, 0);
6700
  rtx op1 = XEXP (x, 1);
6701
 
6702
  switch (GET_CODE (x))
6703
    {
6704
    case AND:
6705
      /* We can call simplify_and_const_int only if we don't lose
6706
         any (sign) bits when converting INTVAL (op1) to
6707
         "unsigned HOST_WIDE_INT".  */
6708
      if (CONST_INT_P (op1)
6709
          && (HWI_COMPUTABLE_MODE_P (mode)
6710
              || INTVAL (op1) > 0))
6711
        {
6712
          x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
6713
          if (GET_CODE (x) != AND)
6714
            return x;
6715
 
6716
          op0 = XEXP (x, 0);
6717
          op1 = XEXP (x, 1);
6718
        }
6719
 
6720
      /* If we have any of (and (ior A B) C) or (and (xor A B) C),
6721
         apply the distributive law and then the inverse distributive
6722
         law to see if things simplify.  */
6723
      if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
6724
        {
6725
          rtx result = distribute_and_simplify_rtx (x, 0);
6726
          if (result)
6727
            return result;
6728
        }
6729
      if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
6730
        {
6731
          rtx result = distribute_and_simplify_rtx (x, 1);
6732
          if (result)
6733
            return result;
6734
        }
6735
      break;
6736
 
6737
    case IOR:
6738
      /* If we have (ior (and A B) C), apply the distributive law and then
6739
         the inverse distributive law to see if things simplify.  */
6740
 
6741
      if (GET_CODE (op0) == AND)
6742
        {
6743
          rtx result = distribute_and_simplify_rtx (x, 0);
6744
          if (result)
6745
            return result;
6746
        }
6747
 
6748
      if (GET_CODE (op1) == AND)
6749
        {
6750
          rtx result = distribute_and_simplify_rtx (x, 1);
6751
          if (result)
6752
            return result;
6753
        }
6754
      break;
6755
 
6756
    default:
6757
      gcc_unreachable ();
6758
    }
6759
 
6760
  return x;
6761
}
6762
 
6763
/* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
6764
   operations" because they can be replaced with two more basic operations.
6765
   ZERO_EXTEND is also considered "compound" because it can be replaced with
6766
   an AND operation, which is simpler, though only one operation.
6767
 
6768
   The function expand_compound_operation is called with an rtx expression
6769
   and will convert it to the appropriate shifts and AND operations,
6770
   simplifying at each stage.
6771
 
6772
   The function make_compound_operation is called to convert an expression
6773
   consisting of shifts and ANDs into the equivalent compound expression.
6774
   It is the inverse of this function, loosely speaking.  */
6775
 
6776
static rtx
6777
expand_compound_operation (rtx x)
6778
{
6779
  unsigned HOST_WIDE_INT pos = 0, len;
6780
  int unsignedp = 0;
6781
  unsigned int modewidth;
6782
  rtx tem;
6783
 
6784
  switch (GET_CODE (x))
6785
    {
6786
    case ZERO_EXTEND:
6787
      unsignedp = 1;
6788
    case SIGN_EXTEND:
6789
      /* We can't necessarily use a const_int for a multiword mode;
6790
         it depends on implicitly extending the value.
6791
         Since we don't know the right way to extend it,
6792
         we can't tell whether the implicit way is right.
6793
 
6794
         Even for a mode that is no wider than a const_int,
6795
         we can't win, because we need to sign extend one of its bits through
6796
         the rest of it, and we don't know which bit.  */
6797
      if (CONST_INT_P (XEXP (x, 0)))
6798
        return x;
6799
 
6800
      /* Return if (subreg:MODE FROM 0) is not a safe replacement for
6801
         (zero_extend:MODE FROM) or (sign_extend:MODE FROM).  It is for any MEM
6802
         because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
6803
         reloaded. If not for that, MEM's would very rarely be safe.
6804
 
6805
         Reject MODEs bigger than a word, because we might not be able
6806
         to reference a two-register group starting with an arbitrary register
6807
         (and currently gen_lowpart might crash for a SUBREG).  */
6808
 
6809
      if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD)
6810
        return x;
6811
 
6812
      /* Reject MODEs that aren't scalar integers because turning vector
6813
         or complex modes into shifts causes problems.  */
6814
 
6815
      if (! SCALAR_INT_MODE_P (GET_MODE (XEXP (x, 0))))
6816
        return x;
6817
 
6818
      len = GET_MODE_PRECISION (GET_MODE (XEXP (x, 0)));
6819
      /* If the inner object has VOIDmode (the only way this can happen
6820
         is if it is an ASM_OPERANDS), we can't do anything since we don't
6821
         know how much masking to do.  */
6822
      if (len == 0)
6823
        return x;
6824
 
6825
      break;
6826
 
6827
    case ZERO_EXTRACT:
6828
      unsignedp = 1;
6829
 
6830
      /* ... fall through ...  */
6831
 
6832
    case SIGN_EXTRACT:
6833
      /* If the operand is a CLOBBER, just return it.  */
6834
      if (GET_CODE (XEXP (x, 0)) == CLOBBER)
6835
        return XEXP (x, 0);
6836
 
6837
      if (!CONST_INT_P (XEXP (x, 1))
6838
          || !CONST_INT_P (XEXP (x, 2))
6839
          || GET_MODE (XEXP (x, 0)) == VOIDmode)
6840
        return x;
6841
 
6842
      /* Reject MODEs that aren't scalar integers because turning vector
6843
         or complex modes into shifts causes problems.  */
6844
 
6845
      if (! SCALAR_INT_MODE_P (GET_MODE (XEXP (x, 0))))
6846
        return x;
6847
 
6848
      len = INTVAL (XEXP (x, 1));
6849
      pos = INTVAL (XEXP (x, 2));
6850
 
6851
      /* This should stay within the object being extracted, fail otherwise.  */
6852
      if (len + pos > GET_MODE_PRECISION (GET_MODE (XEXP (x, 0))))
6853
        return x;
6854
 
6855
      if (BITS_BIG_ENDIAN)
6856
        pos = GET_MODE_PRECISION (GET_MODE (XEXP (x, 0))) - len - pos;
6857
 
6858
      break;
6859
 
6860
    default:
6861
      return x;
6862
    }
6863
  /* Convert sign extension to zero extension, if we know that the high
6864
     bit is not set, as this is easier to optimize.  It will be converted
6865
     back to cheaper alternative in make_extraction.  */
6866
  if (GET_CODE (x) == SIGN_EXTEND
6867
      && (HWI_COMPUTABLE_MODE_P (GET_MODE (x))
6868
          && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
6869
                & ~(((unsigned HOST_WIDE_INT)
6870
                      GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
6871
                     >> 1))
6872
               == 0)))
6873
    {
6874
      rtx temp = gen_rtx_ZERO_EXTEND (GET_MODE (x), XEXP (x, 0));
6875
      rtx temp2 = expand_compound_operation (temp);
6876
 
6877
      /* Make sure this is a profitable operation.  */
6878
      if (set_src_cost (x, optimize_this_for_speed_p)
6879
          > set_src_cost (temp2, optimize_this_for_speed_p))
6880
       return temp2;
6881
      else if (set_src_cost (x, optimize_this_for_speed_p)
6882
               > set_src_cost (temp, optimize_this_for_speed_p))
6883
       return temp;
6884
      else
6885
       return x;
6886
    }
6887
 
6888
  /* We can optimize some special cases of ZERO_EXTEND.  */
6889
  if (GET_CODE (x) == ZERO_EXTEND)
6890
    {
6891
      /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we
6892
         know that the last value didn't have any inappropriate bits
6893
         set.  */
6894
      if (GET_CODE (XEXP (x, 0)) == TRUNCATE
6895
          && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
6896
          && HWI_COMPUTABLE_MODE_P (GET_MODE (x))
6897
          && (nonzero_bits (XEXP (XEXP (x, 0), 0), GET_MODE (x))
6898
              & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
6899
        return XEXP (XEXP (x, 0), 0);
6900
 
6901
      /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)).  */
6902
      if (GET_CODE (XEXP (x, 0)) == SUBREG
6903
          && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
6904
          && subreg_lowpart_p (XEXP (x, 0))
6905
          && HWI_COMPUTABLE_MODE_P (GET_MODE (x))
6906
          && (nonzero_bits (SUBREG_REG (XEXP (x, 0)), GET_MODE (x))
6907
              & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
6908
        return SUBREG_REG (XEXP (x, 0));
6909
 
6910
      /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
6911
         is a comparison and STORE_FLAG_VALUE permits.  This is like
6912
         the first case, but it works even when GET_MODE (x) is larger
6913
         than HOST_WIDE_INT.  */
6914
      if (GET_CODE (XEXP (x, 0)) == TRUNCATE
6915
          && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
6916
          && COMPARISON_P (XEXP (XEXP (x, 0), 0))
6917
          && (GET_MODE_PRECISION (GET_MODE (XEXP (x, 0)))
6918
              <= HOST_BITS_PER_WIDE_INT)
6919
          && (STORE_FLAG_VALUE & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
6920
        return XEXP (XEXP (x, 0), 0);
6921
 
6922
      /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)).  */
6923
      if (GET_CODE (XEXP (x, 0)) == SUBREG
6924
          && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
6925
          && subreg_lowpart_p (XEXP (x, 0))
6926
          && COMPARISON_P (SUBREG_REG (XEXP (x, 0)))
6927
          && (GET_MODE_PRECISION (GET_MODE (XEXP (x, 0)))
6928
              <= HOST_BITS_PER_WIDE_INT)
6929
          && (STORE_FLAG_VALUE & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
6930
        return SUBREG_REG (XEXP (x, 0));
6931
 
6932
    }
6933
 
6934
  /* If we reach here, we want to return a pair of shifts.  The inner
6935
     shift is a left shift of BITSIZE - POS - LEN bits.  The outer
6936
     shift is a right shift of BITSIZE - LEN bits.  It is arithmetic or
6937
     logical depending on the value of UNSIGNEDP.
6938
 
6939
     If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
6940
     converted into an AND of a shift.
6941
 
6942
     We must check for the case where the left shift would have a negative
6943
     count.  This can happen in a case like (x >> 31) & 255 on machines
6944
     that can't shift by a constant.  On those machines, we would first
6945
     combine the shift with the AND to produce a variable-position
6946
     extraction.  Then the constant of 31 would be substituted in
6947
     to produce such a position.  */
6948
 
6949
  modewidth = GET_MODE_PRECISION (GET_MODE (x));
6950
  if (modewidth >= pos + len)
6951
    {
6952
      enum machine_mode mode = GET_MODE (x);
6953
      tem = gen_lowpart (mode, XEXP (x, 0));
6954
      if (!tem || GET_CODE (tem) == CLOBBER)
6955
        return x;
6956
      tem = simplify_shift_const (NULL_RTX, ASHIFT, mode,
6957
                                  tem, modewidth - pos - len);
6958
      tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
6959
                                  mode, tem, modewidth - len);
6960
    }
6961
  else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
6962
    tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
6963
                                  simplify_shift_const (NULL_RTX, LSHIFTRT,
6964
                                                        GET_MODE (x),
6965
                                                        XEXP (x, 0), pos),
6966
                                  ((unsigned HOST_WIDE_INT) 1 << len) - 1);
6967
  else
6968
    /* Any other cases we can't handle.  */
6969
    return x;
6970
 
6971
  /* If we couldn't do this for some reason, return the original
6972
     expression.  */
6973
  if (GET_CODE (tem) == CLOBBER)
6974
    return x;
6975
 
6976
  return tem;
6977
}
6978
 
6979
/* X is a SET which contains an assignment of one object into
6980
   a part of another (such as a bit-field assignment, STRICT_LOW_PART,
6981
   or certain SUBREGS). If possible, convert it into a series of
6982
   logical operations.
6983
 
6984
   We half-heartedly support variable positions, but do not at all
6985
   support variable lengths.  */
6986
 
6987
static const_rtx
6988
expand_field_assignment (const_rtx x)
6989
{
6990
  rtx inner;
6991
  rtx pos;                      /* Always counts from low bit.  */
6992
  int len;
6993
  rtx mask, cleared, masked;
6994
  enum machine_mode compute_mode;
6995
 
6996
  /* Loop until we find something we can't simplify.  */
6997
  while (1)
6998
    {
6999
      if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
7000
          && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
7001
        {
7002
          inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
7003
          len = GET_MODE_PRECISION (GET_MODE (XEXP (SET_DEST (x), 0)));
7004
          pos = GEN_INT (subreg_lsb (XEXP (SET_DEST (x), 0)));
7005
        }
7006
      else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
7007
               && CONST_INT_P (XEXP (SET_DEST (x), 1)))
7008
        {
7009
          inner = XEXP (SET_DEST (x), 0);
7010
          len = INTVAL (XEXP (SET_DEST (x), 1));
7011
          pos = XEXP (SET_DEST (x), 2);
7012
 
7013
          /* A constant position should stay within the width of INNER.  */
7014
          if (CONST_INT_P (pos)
7015
              && INTVAL (pos) + len > GET_MODE_PRECISION (GET_MODE (inner)))
7016
            break;
7017
 
7018
          if (BITS_BIG_ENDIAN)
7019
            {
7020
              if (CONST_INT_P (pos))
7021
                pos = GEN_INT (GET_MODE_PRECISION (GET_MODE (inner)) - len
7022
                               - INTVAL (pos));
7023
              else if (GET_CODE (pos) == MINUS
7024
                       && CONST_INT_P (XEXP (pos, 1))
7025
                       && (INTVAL (XEXP (pos, 1))
7026
                           == GET_MODE_PRECISION (GET_MODE (inner)) - len))
7027
                /* If position is ADJUST - X, new position is X.  */
7028
                pos = XEXP (pos, 0);
7029
              else
7030
                pos = simplify_gen_binary (MINUS, GET_MODE (pos),
7031
                                           GEN_INT (GET_MODE_PRECISION (
7032
                                                    GET_MODE (inner))
7033
                                                    - len),
7034
                                           pos);
7035
            }
7036
        }
7037
 
7038
      /* A SUBREG between two modes that occupy the same numbers of words
7039
         can be done by moving the SUBREG to the source.  */
7040
      else if (GET_CODE (SET_DEST (x)) == SUBREG
7041
               /* We need SUBREGs to compute nonzero_bits properly.  */
7042
               && nonzero_sign_valid
7043
               && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
7044
                     + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
7045
                   == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
7046
                        + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
7047
        {
7048
          x = gen_rtx_SET (VOIDmode, SUBREG_REG (SET_DEST (x)),
7049
                           gen_lowpart
7050
                           (GET_MODE (SUBREG_REG (SET_DEST (x))),
7051
                            SET_SRC (x)));
7052
          continue;
7053
        }
7054
      else
7055
        break;
7056
 
7057
      while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
7058
        inner = SUBREG_REG (inner);
7059
 
7060
      compute_mode = GET_MODE (inner);
7061
 
7062
      /* Don't attempt bitwise arithmetic on non scalar integer modes.  */
7063
      if (! SCALAR_INT_MODE_P (compute_mode))
7064
        {
7065
          enum machine_mode imode;
7066
 
7067
          /* Don't do anything for vector or complex integral types.  */
7068
          if (! FLOAT_MODE_P (compute_mode))
7069
            break;
7070
 
7071
          /* Try to find an integral mode to pun with.  */
7072
          imode = mode_for_size (GET_MODE_BITSIZE (compute_mode), MODE_INT, 0);
7073
          if (imode == BLKmode)
7074
            break;
7075
 
7076
          compute_mode = imode;
7077
          inner = gen_lowpart (imode, inner);
7078
        }
7079
 
7080
      /* Compute a mask of LEN bits, if we can do this on the host machine.  */
7081
      if (len >= HOST_BITS_PER_WIDE_INT)
7082
        break;
7083
 
7084
      /* Now compute the equivalent expression.  Make a copy of INNER
7085
         for the SET_DEST in case it is a MEM into which we will substitute;
7086
         we don't want shared RTL in that case.  */
7087
      mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << len) - 1);
7088
      cleared = simplify_gen_binary (AND, compute_mode,
7089
                                     simplify_gen_unary (NOT, compute_mode,
7090
                                       simplify_gen_binary (ASHIFT,
7091
                                                            compute_mode,
7092
                                                            mask, pos),
7093
                                       compute_mode),
7094
                                     inner);
7095
      masked = simplify_gen_binary (ASHIFT, compute_mode,
7096
                                    simplify_gen_binary (
7097
                                      AND, compute_mode,
7098
                                      gen_lowpart (compute_mode, SET_SRC (x)),
7099
                                      mask),
7100
                                    pos);
7101
 
7102
      x = gen_rtx_SET (VOIDmode, copy_rtx (inner),
7103
                       simplify_gen_binary (IOR, compute_mode,
7104
                                            cleared, masked));
7105
    }
7106
 
7107
  return x;
7108
}
7109
 
7110
/* Return an RTX for a reference to LEN bits of INNER.  If POS_RTX is nonzero,
7111
   it is an RTX that represents a variable starting position; otherwise,
7112
   POS is the (constant) starting bit position (counted from the LSB).
7113
 
7114
   UNSIGNEDP is nonzero for an unsigned reference and zero for a
7115
   signed reference.
7116
 
7117
   IN_DEST is nonzero if this is a reference in the destination of a
7118
   SET.  This is used when a ZERO_ or SIGN_EXTRACT isn't needed.  If nonzero,
7119
   a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
7120
   be used.
7121
 
7122
   IN_COMPARE is nonzero if we are in a COMPARE.  This means that a
7123
   ZERO_EXTRACT should be built even for bits starting at bit 0.
7124
 
7125
   MODE is the desired mode of the result (if IN_DEST == 0).
7126
 
7127
   The result is an RTX for the extraction or NULL_RTX if the target
7128
   can't handle it.  */
7129
 
7130
static rtx
7131
make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos,
7132
                 rtx pos_rtx, unsigned HOST_WIDE_INT len, int unsignedp,
7133
                 int in_dest, int in_compare)
7134
{
7135
  /* This mode describes the size of the storage area
7136
     to fetch the overall value from.  Within that, we
7137
     ignore the POS lowest bits, etc.  */
7138
  enum machine_mode is_mode = GET_MODE (inner);
7139
  enum machine_mode inner_mode;
7140
  enum machine_mode wanted_inner_mode;
7141
  enum machine_mode wanted_inner_reg_mode = word_mode;
7142
  enum machine_mode pos_mode = word_mode;
7143
  enum machine_mode extraction_mode = word_mode;
7144
  enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
7145
  rtx new_rtx = 0;
7146
  rtx orig_pos_rtx = pos_rtx;
7147
  HOST_WIDE_INT orig_pos;
7148
 
7149
  if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
7150
    {
7151
      /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
7152
         consider just the QI as the memory to extract from.
7153
         The subreg adds or removes high bits; its mode is
7154
         irrelevant to the meaning of this extraction,
7155
         since POS and LEN count from the lsb.  */
7156
      if (MEM_P (SUBREG_REG (inner)))
7157
        is_mode = GET_MODE (SUBREG_REG (inner));
7158
      inner = SUBREG_REG (inner);
7159
    }
7160
  else if (GET_CODE (inner) == ASHIFT
7161
           && CONST_INT_P (XEXP (inner, 1))
7162
           && pos_rtx == 0 && pos == 0
7163
           && len > UINTVAL (XEXP (inner, 1)))
7164
    {
7165
      /* We're extracting the least significant bits of an rtx
7166
         (ashift X (const_int C)), where LEN > C.  Extract the
7167
         least significant (LEN - C) bits of X, giving an rtx
7168
         whose mode is MODE, then shift it left C times.  */
7169
      new_rtx = make_extraction (mode, XEXP (inner, 0),
7170
                             0, 0, len - INTVAL (XEXP (inner, 1)),
7171
                             unsignedp, in_dest, in_compare);
7172
      if (new_rtx != 0)
7173
        return gen_rtx_ASHIFT (mode, new_rtx, XEXP (inner, 1));
7174
    }
7175
 
7176
  inner_mode = GET_MODE (inner);
7177
 
7178
  if (pos_rtx && CONST_INT_P (pos_rtx))
7179
    pos = INTVAL (pos_rtx), pos_rtx = 0;
7180
 
7181
  /* See if this can be done without an extraction.  We never can if the
7182
     width of the field is not the same as that of some integer mode. For
7183
     registers, we can only avoid the extraction if the position is at the
7184
     low-order bit and this is either not in the destination or we have the
7185
     appropriate STRICT_LOW_PART operation available.
7186
 
7187
     For MEM, we can avoid an extract if the field starts on an appropriate
7188
     boundary and we can change the mode of the memory reference.  */
7189
 
7190
  if (tmode != BLKmode
7191
      && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0
7192
           && !MEM_P (inner)
7193
           && (inner_mode == tmode
7194
               || !REG_P (inner)
7195
               || TRULY_NOOP_TRUNCATION_MODES_P (tmode, inner_mode)
7196
               || reg_truncated_to_mode (tmode, inner))
7197
           && (! in_dest
7198
               || (REG_P (inner)
7199
                   && have_insn_for (STRICT_LOW_PART, tmode))))
7200
          || (MEM_P (inner) && pos_rtx == 0
7201
              && (pos
7202
                  % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
7203
                     : BITS_PER_UNIT)) == 0
7204
              /* We can't do this if we are widening INNER_MODE (it
7205
                 may not be aligned, for one thing).  */
7206
              && GET_MODE_PRECISION (inner_mode) >= GET_MODE_PRECISION (tmode)
7207
              && (inner_mode == tmode
7208
                  || (! mode_dependent_address_p (XEXP (inner, 0))
7209
                      && ! MEM_VOLATILE_P (inner))))))
7210
    {
7211
      /* If INNER is a MEM, make a new MEM that encompasses just the desired
7212
         field.  If the original and current mode are the same, we need not
7213
         adjust the offset.  Otherwise, we do if bytes big endian.
7214
 
7215
         If INNER is not a MEM, get a piece consisting of just the field
7216
         of interest (in this case POS % BITS_PER_WORD must be 0).  */
7217
 
7218
      if (MEM_P (inner))
7219
        {
7220
          HOST_WIDE_INT offset;
7221
 
7222
          /* POS counts from lsb, but make OFFSET count in memory order.  */
7223
          if (BYTES_BIG_ENDIAN)
7224
            offset = (GET_MODE_PRECISION (is_mode) - len - pos) / BITS_PER_UNIT;
7225
          else
7226
            offset = pos / BITS_PER_UNIT;
7227
 
7228
          new_rtx = adjust_address_nv (inner, tmode, offset);
7229
        }
7230
      else if (REG_P (inner))
7231
        {
7232
          if (tmode != inner_mode)
7233
            {
7234
              /* We can't call gen_lowpart in a DEST since we
7235
                 always want a SUBREG (see below) and it would sometimes
7236
                 return a new hard register.  */
7237
              if (pos || in_dest)
7238
                {
7239
                  HOST_WIDE_INT final_word = pos / BITS_PER_WORD;
7240
 
7241
                  if (WORDS_BIG_ENDIAN
7242
                      && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD)
7243
                    final_word = ((GET_MODE_SIZE (inner_mode)
7244
                                   - GET_MODE_SIZE (tmode))
7245
                                  / UNITS_PER_WORD) - final_word;
7246
 
7247
                  final_word *= UNITS_PER_WORD;
7248
                  if (BYTES_BIG_ENDIAN &&
7249
                      GET_MODE_SIZE (inner_mode) > GET_MODE_SIZE (tmode))
7250
                    final_word += (GET_MODE_SIZE (inner_mode)
7251
                                   - GET_MODE_SIZE (tmode)) % UNITS_PER_WORD;
7252
 
7253
                  /* Avoid creating invalid subregs, for example when
7254
                     simplifying (x>>32)&255.  */
7255
                  if (!validate_subreg (tmode, inner_mode, inner, final_word))
7256
                    return NULL_RTX;
7257
 
7258
                  new_rtx = gen_rtx_SUBREG (tmode, inner, final_word);
7259
                }
7260
              else
7261
                new_rtx = gen_lowpart (tmode, inner);
7262
            }
7263
          else
7264
            new_rtx = inner;
7265
        }
7266
      else
7267
        new_rtx = force_to_mode (inner, tmode,
7268
                             len >= HOST_BITS_PER_WIDE_INT
7269
                             ? ~(unsigned HOST_WIDE_INT) 0
7270
                             : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
7271
                             0);
7272
 
7273
      /* If this extraction is going into the destination of a SET,
7274
         make a STRICT_LOW_PART unless we made a MEM.  */
7275
 
7276
      if (in_dest)
7277
        return (MEM_P (new_rtx) ? new_rtx
7278
                : (GET_CODE (new_rtx) != SUBREG
7279
                   ? gen_rtx_CLOBBER (tmode, const0_rtx)
7280
                   : gen_rtx_STRICT_LOW_PART (VOIDmode, new_rtx)));
7281
 
7282
      if (mode == tmode)
7283
        return new_rtx;
7284
 
7285
      if (CONST_INT_P (new_rtx)
7286
          || GET_CODE (new_rtx) == CONST_DOUBLE)
7287
        return simplify_unary_operation (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
7288
                                         mode, new_rtx, tmode);
7289
 
7290
      /* If we know that no extraneous bits are set, and that the high
7291
         bit is not set, convert the extraction to the cheaper of
7292
         sign and zero extension, that are equivalent in these cases.  */
7293
      if (flag_expensive_optimizations
7294
          && (HWI_COMPUTABLE_MODE_P (tmode)
7295
              && ((nonzero_bits (new_rtx, tmode)
7296
                   & ~(((unsigned HOST_WIDE_INT)GET_MODE_MASK (tmode)) >> 1))
7297
                  == 0)))
7298
        {
7299
          rtx temp = gen_rtx_ZERO_EXTEND (mode, new_rtx);
7300
          rtx temp1 = gen_rtx_SIGN_EXTEND (mode, new_rtx);
7301
 
7302
          /* Prefer ZERO_EXTENSION, since it gives more information to
7303
             backends.  */
7304
          if (set_src_cost (temp, optimize_this_for_speed_p)
7305
              <= set_src_cost (temp1, optimize_this_for_speed_p))
7306
            return temp;
7307
          return temp1;
7308
        }
7309
 
7310
      /* Otherwise, sign- or zero-extend unless we already are in the
7311
         proper mode.  */
7312
 
7313
      return (gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
7314
                             mode, new_rtx));
7315
    }
7316
 
7317
  /* Unless this is a COMPARE or we have a funny memory reference,
7318
     don't do anything with zero-extending field extracts starting at
7319
     the low-order bit since they are simple AND operations.  */
7320
  if (pos_rtx == 0 && pos == 0 && ! in_dest
7321
      && ! in_compare && unsignedp)
7322
    return 0;
7323
 
7324
  /* Unless INNER is not MEM, reject this if we would be spanning bytes or
7325
     if the position is not a constant and the length is not 1.  In all
7326
     other cases, we would only be going outside our object in cases when
7327
     an original shift would have been undefined.  */
7328
  if (MEM_P (inner)
7329
      && ((pos_rtx == 0 && pos + len > GET_MODE_PRECISION (is_mode))
7330
          || (pos_rtx != 0 && len != 1)))
7331
    return 0;
7332
 
7333
  /* Get the mode to use should INNER not be a MEM, the mode for the position,
7334
     and the mode for the result.  */
7335
  if (in_dest && mode_for_extraction (EP_insv, -1) != MAX_MACHINE_MODE)
7336
    {
7337
      wanted_inner_reg_mode = mode_for_extraction (EP_insv, 0);
7338
      pos_mode = mode_for_extraction (EP_insv, 2);
7339
      extraction_mode = mode_for_extraction (EP_insv, 3);
7340
    }
7341
 
7342
  if (! in_dest && unsignedp
7343
      && mode_for_extraction (EP_extzv, -1) != MAX_MACHINE_MODE)
7344
    {
7345
      wanted_inner_reg_mode = mode_for_extraction (EP_extzv, 1);
7346
      pos_mode = mode_for_extraction (EP_extzv, 3);
7347
      extraction_mode = mode_for_extraction (EP_extzv, 0);
7348
    }
7349
 
7350
  if (! in_dest && ! unsignedp
7351
      && mode_for_extraction (EP_extv, -1) != MAX_MACHINE_MODE)
7352
    {
7353
      wanted_inner_reg_mode = mode_for_extraction (EP_extv, 1);
7354
      pos_mode = mode_for_extraction (EP_extv, 3);
7355
      extraction_mode = mode_for_extraction (EP_extv, 0);
7356
    }
7357
 
7358
  /* Never narrow an object, since that might not be safe.  */
7359
 
7360
  if (mode != VOIDmode
7361
      && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
7362
    extraction_mode = mode;
7363
 
7364
  if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
7365
      && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
7366
    pos_mode = GET_MODE (pos_rtx);
7367
 
7368
  /* If this is not from memory, the desired mode is the preferred mode
7369
     for an extraction pattern's first input operand, or word_mode if there
7370
     is none.  */
7371
  if (!MEM_P (inner))
7372
    wanted_inner_mode = wanted_inner_reg_mode;
7373
  else
7374
    {
7375
      /* Be careful not to go beyond the extracted object and maintain the
7376
         natural alignment of the memory.  */
7377
      wanted_inner_mode = smallest_mode_for_size (len, MODE_INT);
7378
      while (pos % GET_MODE_BITSIZE (wanted_inner_mode) + len
7379
             > GET_MODE_BITSIZE (wanted_inner_mode))
7380
        {
7381
          wanted_inner_mode = GET_MODE_WIDER_MODE (wanted_inner_mode);
7382
          gcc_assert (wanted_inner_mode != VOIDmode);
7383
        }
7384
 
7385
      /* If we have to change the mode of memory and cannot, the desired mode
7386
         is EXTRACTION_MODE.  */
7387
      if (inner_mode != wanted_inner_mode
7388
          && (mode_dependent_address_p (XEXP (inner, 0))
7389
              || MEM_VOLATILE_P (inner)
7390
              || pos_rtx))
7391
        wanted_inner_mode = extraction_mode;
7392
    }
7393
 
7394
  orig_pos = pos;
7395
 
7396
  if (BITS_BIG_ENDIAN)
7397
    {
7398
      /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to
7399
         BITS_BIG_ENDIAN style.  If position is constant, compute new
7400
         position.  Otherwise, build subtraction.
7401
         Note that POS is relative to the mode of the original argument.
7402
         If it's a MEM we need to recompute POS relative to that.
7403
         However, if we're extracting from (or inserting into) a register,
7404
         we want to recompute POS relative to wanted_inner_mode.  */
7405
      int width = (MEM_P (inner)
7406
                   ? GET_MODE_BITSIZE (is_mode)
7407
                   : GET_MODE_BITSIZE (wanted_inner_mode));
7408
 
7409
      if (pos_rtx == 0)
7410
        pos = width - len - pos;
7411
      else
7412
        pos_rtx
7413
          = gen_rtx_MINUS (GET_MODE (pos_rtx), GEN_INT (width - len), pos_rtx);
7414
      /* POS may be less than 0 now, but we check for that below.
7415
         Note that it can only be less than 0 if !MEM_P (inner).  */
7416
    }
7417
 
7418
  /* If INNER has a wider mode, and this is a constant extraction, try to
7419
     make it smaller and adjust the byte to point to the byte containing
7420
     the value.  */
7421
  if (wanted_inner_mode != VOIDmode
7422
      && inner_mode != wanted_inner_mode
7423
      && ! pos_rtx
7424
      && GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode)
7425
      && MEM_P (inner)
7426
      && ! mode_dependent_address_p (XEXP (inner, 0))
7427
      && ! MEM_VOLATILE_P (inner))
7428
    {
7429
      int offset = 0;
7430
 
7431
      /* The computations below will be correct if the machine is big
7432
         endian in both bits and bytes or little endian in bits and bytes.
7433
         If it is mixed, we must adjust.  */
7434
 
7435
      /* If bytes are big endian and we had a paradoxical SUBREG, we must
7436
         adjust OFFSET to compensate.  */
7437
      if (BYTES_BIG_ENDIAN
7438
          && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
7439
        offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
7440
 
7441
      /* We can now move to the desired byte.  */
7442
      offset += (pos / GET_MODE_BITSIZE (wanted_inner_mode))
7443
                * GET_MODE_SIZE (wanted_inner_mode);
7444
      pos %= GET_MODE_BITSIZE (wanted_inner_mode);
7445
 
7446
      if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
7447
          && is_mode != wanted_inner_mode)
7448
        offset = (GET_MODE_SIZE (is_mode)
7449
                  - GET_MODE_SIZE (wanted_inner_mode) - offset);
7450
 
7451
      inner = adjust_address_nv (inner, wanted_inner_mode, offset);
7452
    }
7453
 
7454
  /* If INNER is not memory, get it into the proper mode.  If we are changing
7455
     its mode, POS must be a constant and smaller than the size of the new
7456
     mode.  */
7457
  else if (!MEM_P (inner))
7458
    {
7459
      /* On the LHS, don't create paradoxical subregs implicitely truncating
7460
         the register unless TRULY_NOOP_TRUNCATION.  */
7461
      if (in_dest
7462
          && !TRULY_NOOP_TRUNCATION_MODES_P (GET_MODE (inner),
7463
                                             wanted_inner_mode))
7464
        return NULL_RTX;
7465
 
7466
      if (GET_MODE (inner) != wanted_inner_mode
7467
          && (pos_rtx != 0
7468
              || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode)))
7469
        return NULL_RTX;
7470
 
7471
      if (orig_pos < 0)
7472
        return NULL_RTX;
7473
 
7474
      inner = force_to_mode (inner, wanted_inner_mode,
7475
                             pos_rtx
7476
                             || len + orig_pos >= HOST_BITS_PER_WIDE_INT
7477
                             ? ~(unsigned HOST_WIDE_INT) 0
7478
                             : ((((unsigned HOST_WIDE_INT) 1 << len) - 1)
7479
                                << orig_pos),
7480
                             0);
7481
    }
7482
 
7483
  /* Adjust mode of POS_RTX, if needed.  If we want a wider mode, we
7484
     have to zero extend.  Otherwise, we can just use a SUBREG.  */
7485
  if (pos_rtx != 0
7486
      && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
7487
    {
7488
      rtx temp = gen_rtx_ZERO_EXTEND (pos_mode, pos_rtx);
7489
 
7490
      /* If we know that no extraneous bits are set, and that the high
7491
         bit is not set, convert extraction to cheaper one - either
7492
         SIGN_EXTENSION or ZERO_EXTENSION, that are equivalent in these
7493
         cases.  */
7494
      if (flag_expensive_optimizations
7495
          && (HWI_COMPUTABLE_MODE_P (GET_MODE (pos_rtx))
7496
              && ((nonzero_bits (pos_rtx, GET_MODE (pos_rtx))
7497
                   & ~(((unsigned HOST_WIDE_INT)
7498
                        GET_MODE_MASK (GET_MODE (pos_rtx)))
7499
                       >> 1))
7500
                  == 0)))
7501
        {
7502
          rtx temp1 = gen_rtx_SIGN_EXTEND (pos_mode, pos_rtx);
7503
 
7504
          /* Prefer ZERO_EXTENSION, since it gives more information to
7505
             backends.  */
7506
          if (set_src_cost (temp1, optimize_this_for_speed_p)
7507
              < set_src_cost (temp, optimize_this_for_speed_p))
7508
            temp = temp1;
7509
        }
7510
      pos_rtx = temp;
7511
    }
7512
  else if (pos_rtx != 0
7513
           && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
7514
    pos_rtx = gen_lowpart (pos_mode, pos_rtx);
7515
 
7516
  /* Make POS_RTX unless we already have it and it is correct.  If we don't
7517
     have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
7518
     be a CONST_INT.  */
7519
  if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
7520
    pos_rtx = orig_pos_rtx;
7521
 
7522
  else if (pos_rtx == 0)
7523
    pos_rtx = GEN_INT (pos);
7524
 
7525
  /* Make the required operation.  See if we can use existing rtx.  */
7526
  new_rtx = gen_rtx_fmt_eee (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
7527
                         extraction_mode, inner, GEN_INT (len), pos_rtx);
7528
  if (! in_dest)
7529
    new_rtx = gen_lowpart (mode, new_rtx);
7530
 
7531
  return new_rtx;
7532
}
7533
 
7534
/* See if X contains an ASHIFT of COUNT or more bits that can be commuted
7535
   with any other operations in X.  Return X without that shift if so.  */
7536
 
7537
static rtx
7538
extract_left_shift (rtx x, int count)
7539
{
7540
  enum rtx_code code = GET_CODE (x);
7541
  enum machine_mode mode = GET_MODE (x);
7542
  rtx tem;
7543
 
7544
  switch (code)
7545
    {
7546
    case ASHIFT:
7547
      /* This is the shift itself.  If it is wide enough, we will return
7548
         either the value being shifted if the shift count is equal to
7549
         COUNT or a shift for the difference.  */
7550
      if (CONST_INT_P (XEXP (x, 1))
7551
          && INTVAL (XEXP (x, 1)) >= count)
7552
        return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
7553
                                     INTVAL (XEXP (x, 1)) - count);
7554
      break;
7555
 
7556
    case NEG:  case NOT:
7557
      if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0)
7558
        return simplify_gen_unary (code, mode, tem, mode);
7559
 
7560
      break;
7561
 
7562
    case PLUS:  case IOR:  case XOR:  case AND:
7563
      /* If we can safely shift this constant and we find the inner shift,
7564
         make a new operation.  */
7565
      if (CONST_INT_P (XEXP (x, 1))
7566
          && (UINTVAL (XEXP (x, 1))
7567
              & ((((unsigned HOST_WIDE_INT) 1 << count)) - 1)) == 0
7568
          && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
7569
        return simplify_gen_binary (code, mode, tem,
7570
                                    GEN_INT (INTVAL (XEXP (x, 1)) >> count));
7571
 
7572
      break;
7573
 
7574
    default:
7575
      break;
7576
    }
7577
 
7578
  return 0;
7579
}
7580
 
7581
/* Look at the expression rooted at X.  Look for expressions
7582
   equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
7583
   Form these expressions.
7584
 
7585
   Return the new rtx, usually just X.
7586
 
7587
   Also, for machines like the VAX that don't have logical shift insns,
7588
   try to convert logical to arithmetic shift operations in cases where
7589
   they are equivalent.  This undoes the canonicalizations to logical
7590
   shifts done elsewhere.
7591
 
7592
   We try, as much as possible, to re-use rtl expressions to save memory.
7593
 
7594
   IN_CODE says what kind of expression we are processing.  Normally, it is
7595
   SET.  In a memory address (inside a MEM, PLUS or minus, the latter two
7596
   being kludges), it is MEM.  When processing the arguments of a comparison
7597
   or a COMPARE against zero, it is COMPARE.  */
7598
 
7599
static rtx
7600
make_compound_operation (rtx x, enum rtx_code in_code)
7601
{
7602
  enum rtx_code code = GET_CODE (x);
7603
  enum machine_mode mode = GET_MODE (x);
7604
  int mode_width = GET_MODE_PRECISION (mode);
7605
  rtx rhs, lhs;
7606
  enum rtx_code next_code;
7607
  int i, j;
7608
  rtx new_rtx = 0;
7609
  rtx tem;
7610
  const char *fmt;
7611
 
7612
  /* Select the code to be used in recursive calls.  Once we are inside an
7613
     address, we stay there.  If we have a comparison, set to COMPARE,
7614
     but once inside, go back to our default of SET.  */
7615
 
7616
  next_code = (code == MEM ? MEM
7617
               : ((code == PLUS || code == MINUS)
7618
                  && SCALAR_INT_MODE_P (mode)) ? MEM
7619
               : ((code == COMPARE || COMPARISON_P (x))
7620
                  && XEXP (x, 1) == const0_rtx) ? COMPARE
7621
               : in_code == COMPARE ? SET : in_code);
7622
 
7623
  /* Process depending on the code of this operation.  If NEW is set
7624
     nonzero, it will be returned.  */
7625
 
7626
  switch (code)
7627
    {
7628
    case ASHIFT:
7629
      /* Convert shifts by constants into multiplications if inside
7630
         an address.  */
7631
      if (in_code == MEM && CONST_INT_P (XEXP (x, 1))
7632
          && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
7633
          && INTVAL (XEXP (x, 1)) >= 0
7634
          && SCALAR_INT_MODE_P (mode))
7635
        {
7636
          HOST_WIDE_INT count = INTVAL (XEXP (x, 1));
7637
          HOST_WIDE_INT multval = (HOST_WIDE_INT) 1 << count;
7638
 
7639
          new_rtx = make_compound_operation (XEXP (x, 0), next_code);
7640
          if (GET_CODE (new_rtx) == NEG)
7641
            {
7642
              new_rtx = XEXP (new_rtx, 0);
7643
              multval = -multval;
7644
            }
7645
          multval = trunc_int_for_mode (multval, mode);
7646
          new_rtx = gen_rtx_MULT (mode, new_rtx, GEN_INT (multval));
7647
        }
7648
      break;
7649
 
7650
    case PLUS:
7651
      lhs = XEXP (x, 0);
7652
      rhs = XEXP (x, 1);
7653
      lhs = make_compound_operation (lhs, next_code);
7654
      rhs = make_compound_operation (rhs, next_code);
7655
      if (GET_CODE (lhs) == MULT && GET_CODE (XEXP (lhs, 0)) == NEG
7656
          && SCALAR_INT_MODE_P (mode))
7657
        {
7658
          tem = simplify_gen_binary (MULT, mode, XEXP (XEXP (lhs, 0), 0),
7659
                                     XEXP (lhs, 1));
7660
          new_rtx = simplify_gen_binary (MINUS, mode, rhs, tem);
7661
        }
7662
      else if (GET_CODE (lhs) == MULT
7663
               && (CONST_INT_P (XEXP (lhs, 1)) && INTVAL (XEXP (lhs, 1)) < 0))
7664
        {
7665
          tem = simplify_gen_binary (MULT, mode, XEXP (lhs, 0),
7666
                                     simplify_gen_unary (NEG, mode,
7667
                                                         XEXP (lhs, 1),
7668
                                                         mode));
7669
          new_rtx = simplify_gen_binary (MINUS, mode, rhs, tem);
7670
        }
7671
      else
7672
        {
7673
          SUBST (XEXP (x, 0), lhs);
7674
          SUBST (XEXP (x, 1), rhs);
7675
          goto maybe_swap;
7676
        }
7677
      x = gen_lowpart (mode, new_rtx);
7678
      goto maybe_swap;
7679
 
7680
    case MINUS:
7681
      lhs = XEXP (x, 0);
7682
      rhs = XEXP (x, 1);
7683
      lhs = make_compound_operation (lhs, next_code);
7684
      rhs = make_compound_operation (rhs, next_code);
7685
      if (GET_CODE (rhs) == MULT && GET_CODE (XEXP (rhs, 0)) == NEG
7686
          && SCALAR_INT_MODE_P (mode))
7687
        {
7688
          tem = simplify_gen_binary (MULT, mode, XEXP (XEXP (rhs, 0), 0),
7689
                                     XEXP (rhs, 1));
7690
          new_rtx = simplify_gen_binary (PLUS, mode, tem, lhs);
7691
        }
7692
      else if (GET_CODE (rhs) == MULT
7693
               && (CONST_INT_P (XEXP (rhs, 1)) && INTVAL (XEXP (rhs, 1)) < 0))
7694
        {
7695
          tem = simplify_gen_binary (MULT, mode, XEXP (rhs, 0),
7696
                                     simplify_gen_unary (NEG, mode,
7697
                                                         XEXP (rhs, 1),
7698
                                                         mode));
7699
          new_rtx = simplify_gen_binary (PLUS, mode, tem, lhs);
7700
        }
7701
      else
7702
        {
7703
          SUBST (XEXP (x, 0), lhs);
7704
          SUBST (XEXP (x, 1), rhs);
7705
          return x;
7706
        }
7707
      return gen_lowpart (mode, new_rtx);
7708
 
7709
    case AND:
7710
      /* If the second operand is not a constant, we can't do anything
7711
         with it.  */
7712
      if (!CONST_INT_P (XEXP (x, 1)))
7713
        break;
7714
 
7715
      /* If the constant is a power of two minus one and the first operand
7716
         is a logical right shift, make an extraction.  */
7717
      if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
7718
          && (i = exact_log2 (UINTVAL (XEXP (x, 1)) + 1)) >= 0)
7719
        {
7720
          new_rtx = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
7721
          new_rtx = make_extraction (mode, new_rtx, 0, XEXP (XEXP (x, 0), 1), i, 1,
7722
                                 0, in_code == COMPARE);
7723
        }
7724
 
7725
      /* Same as previous, but for (subreg (lshiftrt ...)) in first op.  */
7726
      else if (GET_CODE (XEXP (x, 0)) == SUBREG
7727
               && subreg_lowpart_p (XEXP (x, 0))
7728
               && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
7729
               && (i = exact_log2 (UINTVAL (XEXP (x, 1)) + 1)) >= 0)
7730
        {
7731
          new_rtx = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
7732
                                         next_code);
7733
          new_rtx = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new_rtx, 0,
7734
                                 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
7735
                                 0, in_code == COMPARE);
7736
        }
7737
      /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)).  */
7738
      else if ((GET_CODE (XEXP (x, 0)) == XOR
7739
                || GET_CODE (XEXP (x, 0)) == IOR)
7740
               && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
7741
               && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
7742
               && (i = exact_log2 (UINTVAL (XEXP (x, 1)) + 1)) >= 0)
7743
        {
7744
          /* Apply the distributive law, and then try to make extractions.  */
7745
          new_rtx = gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)), mode,
7746
                                gen_rtx_AND (mode, XEXP (XEXP (x, 0), 0),
7747
                                             XEXP (x, 1)),
7748
                                gen_rtx_AND (mode, XEXP (XEXP (x, 0), 1),
7749
                                             XEXP (x, 1)));
7750
          new_rtx = make_compound_operation (new_rtx, in_code);
7751
        }
7752
 
7753
      /* If we are have (and (rotate X C) M) and C is larger than the number
7754
         of bits in M, this is an extraction.  */
7755
 
7756
      else if (GET_CODE (XEXP (x, 0)) == ROTATE
7757
               && CONST_INT_P (XEXP (XEXP (x, 0), 1))
7758
               && (i = exact_log2 (UINTVAL (XEXP (x, 1)) + 1)) >= 0
7759
               && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
7760
        {
7761
          new_rtx = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
7762
          new_rtx = make_extraction (mode, new_rtx,
7763
                                 (GET_MODE_PRECISION (mode)
7764
                                  - INTVAL (XEXP (XEXP (x, 0), 1))),
7765
                                 NULL_RTX, i, 1, 0, in_code == COMPARE);
7766
        }
7767
 
7768
      /* On machines without logical shifts, if the operand of the AND is
7769
         a logical shift and our mask turns off all the propagated sign
7770
         bits, we can replace the logical shift with an arithmetic shift.  */
7771
      else if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
7772
               && !have_insn_for (LSHIFTRT, mode)
7773
               && have_insn_for (ASHIFTRT, mode)
7774
               && CONST_INT_P (XEXP (XEXP (x, 0), 1))
7775
               && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
7776
               && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
7777
               && mode_width <= HOST_BITS_PER_WIDE_INT)
7778
        {
7779
          unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
7780
 
7781
          mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
7782
          if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
7783
            SUBST (XEXP (x, 0),
7784
                   gen_rtx_ASHIFTRT (mode,
7785
                                     make_compound_operation
7786
                                     (XEXP (XEXP (x, 0), 0), next_code),
7787
                                     XEXP (XEXP (x, 0), 1)));
7788
        }
7789
 
7790
      /* If the constant is one less than a power of two, this might be
7791
         representable by an extraction even if no shift is present.
7792
         If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
7793
         we are in a COMPARE.  */
7794
      else if ((i = exact_log2 (UINTVAL (XEXP (x, 1)) + 1)) >= 0)
7795
        new_rtx = make_extraction (mode,
7796
                               make_compound_operation (XEXP (x, 0),
7797
                                                        next_code),
7798
                               0, NULL_RTX, i, 1, 0, in_code == COMPARE);
7799
 
7800
      /* If we are in a comparison and this is an AND with a power of two,
7801
         convert this into the appropriate bit extract.  */
7802
      else if (in_code == COMPARE
7803
               && (i = exact_log2 (UINTVAL (XEXP (x, 1)))) >= 0)
7804
        new_rtx = make_extraction (mode,
7805
                               make_compound_operation (XEXP (x, 0),
7806
                                                        next_code),
7807
                               i, NULL_RTX, 1, 1, 0, 1);
7808
 
7809
      break;
7810
 
7811
    case LSHIFTRT:
7812
      /* If the sign bit is known to be zero, replace this with an
7813
         arithmetic shift.  */
7814
      if (have_insn_for (ASHIFTRT, mode)
7815
          && ! have_insn_for (LSHIFTRT, mode)
7816
          && mode_width <= HOST_BITS_PER_WIDE_INT
7817
          && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
7818
        {
7819
          new_rtx = gen_rtx_ASHIFTRT (mode,
7820
                                  make_compound_operation (XEXP (x, 0),
7821
                                                           next_code),
7822
                                  XEXP (x, 1));
7823
          break;
7824
        }
7825
 
7826
      /* ... fall through ...  */
7827
 
7828
    case ASHIFTRT:
7829
      lhs = XEXP (x, 0);
7830
      rhs = XEXP (x, 1);
7831
 
7832
      /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
7833
         this is a SIGN_EXTRACT.  */
7834
      if (CONST_INT_P (rhs)
7835
          && GET_CODE (lhs) == ASHIFT
7836
          && CONST_INT_P (XEXP (lhs, 1))
7837
          && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1))
7838
          && INTVAL (XEXP (lhs, 1)) >= 0
7839
          && INTVAL (rhs) < mode_width)
7840
        {
7841
          new_rtx = make_compound_operation (XEXP (lhs, 0), next_code);
7842
          new_rtx = make_extraction (mode, new_rtx,
7843
                                 INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
7844
                                 NULL_RTX, mode_width - INTVAL (rhs),
7845
                                 code == LSHIFTRT, 0, in_code == COMPARE);
7846
          break;
7847
        }
7848
 
7849
      /* See if we have operations between an ASHIFTRT and an ASHIFT.
7850
         If so, try to merge the shifts into a SIGN_EXTEND.  We could
7851
         also do this for some cases of SIGN_EXTRACT, but it doesn't
7852
         seem worth the effort; the case checked for occurs on Alpha.  */
7853
 
7854
      if (!OBJECT_P (lhs)
7855
          && ! (GET_CODE (lhs) == SUBREG
7856
                && (OBJECT_P (SUBREG_REG (lhs))))
7857
          && CONST_INT_P (rhs)
7858
          && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
7859
          && INTVAL (rhs) < mode_width
7860
          && (new_rtx = extract_left_shift (lhs, INTVAL (rhs))) != 0)
7861
        new_rtx = make_extraction (mode, make_compound_operation (new_rtx, next_code),
7862
                               0, NULL_RTX, mode_width - INTVAL (rhs),
7863
                               code == LSHIFTRT, 0, in_code == COMPARE);
7864
 
7865
      break;
7866
 
7867
    case SUBREG:
7868
      /* Call ourselves recursively on the inner expression.  If we are
7869
         narrowing the object and it has a different RTL code from
7870
         what it originally did, do this SUBREG as a force_to_mode.  */
7871
      {
7872
        rtx inner = SUBREG_REG (x), simplified;
7873
 
7874
        tem = make_compound_operation (inner, in_code);
7875
 
7876
        simplified
7877
          = simplify_subreg (mode, tem, GET_MODE (inner), SUBREG_BYTE (x));
7878
        if (simplified)
7879
          tem = simplified;
7880
 
7881
        if (GET_CODE (tem) != GET_CODE (inner)
7882
            && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (inner))
7883
            && subreg_lowpart_p (x))
7884
          {
7885
            rtx newer
7886
              = force_to_mode (tem, mode, ~(unsigned HOST_WIDE_INT) 0, 0);
7887
 
7888
            /* If we have something other than a SUBREG, we might have
7889
               done an expansion, so rerun ourselves.  */
7890
            if (GET_CODE (newer) != SUBREG)
7891
              newer = make_compound_operation (newer, in_code);
7892
 
7893
            /* force_to_mode can expand compounds.  If it just re-expanded the
7894
               compound, use gen_lowpart to convert to the desired mode.  */
7895
            if (rtx_equal_p (newer, x)
7896
                /* Likewise if it re-expanded the compound only partially.
7897
                   This happens for SUBREG of ZERO_EXTRACT if they extract
7898
                   the same number of bits.  */
7899
                || (GET_CODE (newer) == SUBREG
7900
                    && (GET_CODE (SUBREG_REG (newer)) == LSHIFTRT
7901
                        || GET_CODE (SUBREG_REG (newer)) == ASHIFTRT)
7902
                    && GET_CODE (inner) == AND
7903
                    && rtx_equal_p (SUBREG_REG (newer), XEXP (inner, 0))))
7904
              return gen_lowpart (GET_MODE (x), tem);
7905
 
7906
            return newer;
7907
          }
7908
 
7909
        if (simplified)
7910
          return tem;
7911
      }
7912
      break;
7913
 
7914
    default:
7915
      break;
7916
    }
7917
 
7918
  if (new_rtx)
7919
    {
7920
      x = gen_lowpart (mode, new_rtx);
7921
      code = GET_CODE (x);
7922
    }
7923
 
7924
  /* Now recursively process each operand of this operation.  We need to
7925
     handle ZERO_EXTEND specially so that we don't lose track of the
7926
     inner mode.  */
7927
  if (GET_CODE (x) == ZERO_EXTEND)
7928
    {
7929
      new_rtx = make_compound_operation (XEXP (x, 0), next_code);
7930
      tem = simplify_const_unary_operation (ZERO_EXTEND, GET_MODE (x),
7931
                                            new_rtx, GET_MODE (XEXP (x, 0)));
7932
      if (tem)
7933
        return tem;
7934
      SUBST (XEXP (x, 0), new_rtx);
7935
      return x;
7936
    }
7937
 
7938
  fmt = GET_RTX_FORMAT (code);
7939
  for (i = 0; i < GET_RTX_LENGTH (code); i++)
7940
    if (fmt[i] == 'e')
7941
      {
7942
        new_rtx = make_compound_operation (XEXP (x, i), next_code);
7943
        SUBST (XEXP (x, i), new_rtx);
7944
      }
7945
    else if (fmt[i] == 'E')
7946
      for (j = 0; j < XVECLEN (x, i); j++)
7947
        {
7948
          new_rtx = make_compound_operation (XVECEXP (x, i, j), next_code);
7949
          SUBST (XVECEXP (x, i, j), new_rtx);
7950
        }
7951
 
7952
 maybe_swap:
7953
  /* If this is a commutative operation, the changes to the operands
7954
     may have made it noncanonical.  */
7955
  if (COMMUTATIVE_ARITH_P (x)
7956
      && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
7957
    {
7958
      tem = XEXP (x, 0);
7959
      SUBST (XEXP (x, 0), XEXP (x, 1));
7960
      SUBST (XEXP (x, 1), tem);
7961
    }
7962
 
7963
  return x;
7964
}
7965
 
7966
/* Given M see if it is a value that would select a field of bits
7967
   within an item, but not the entire word.  Return -1 if not.
7968
   Otherwise, return the starting position of the field, where 0 is the
7969
   low-order bit.
7970
 
7971
   *PLEN is set to the length of the field.  */
7972
 
7973
static int
7974
get_pos_from_mask (unsigned HOST_WIDE_INT m, unsigned HOST_WIDE_INT *plen)
7975
{
7976
  /* Get the bit number of the first 1 bit from the right, -1 if none.  */
7977
  int pos = m ? ctz_hwi (m) : -1;
7978
  int len = 0;
7979
 
7980
  if (pos >= 0)
7981
    /* Now shift off the low-order zero bits and see if we have a
7982
       power of two minus 1.  */
7983
    len = exact_log2 ((m >> pos) + 1);
7984
 
7985
  if (len <= 0)
7986
    pos = -1;
7987
 
7988
  *plen = len;
7989
  return pos;
7990
}
7991
 
7992
/* If X refers to a register that equals REG in value, replace these
7993
   references with REG.  */
7994
static rtx
7995
canon_reg_for_combine (rtx x, rtx reg)
7996
{
7997
  rtx op0, op1, op2;
7998
  const char *fmt;
7999
  int i;
8000
  bool copied;
8001
 
8002
  enum rtx_code code = GET_CODE (x);
8003
  switch (GET_RTX_CLASS (code))
8004
    {
8005
    case RTX_UNARY:
8006
      op0 = canon_reg_for_combine (XEXP (x, 0), reg);
8007
      if (op0 != XEXP (x, 0))
8008
        return simplify_gen_unary (GET_CODE (x), GET_MODE (x), op0,
8009
                                   GET_MODE (reg));
8010
      break;
8011
 
8012
    case RTX_BIN_ARITH:
8013
    case RTX_COMM_ARITH:
8014
      op0 = canon_reg_for_combine (XEXP (x, 0), reg);
8015
      op1 = canon_reg_for_combine (XEXP (x, 1), reg);
8016
      if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
8017
        return simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0, op1);
8018
      break;
8019
 
8020
    case RTX_COMPARE:
8021
    case RTX_COMM_COMPARE:
8022
      op0 = canon_reg_for_combine (XEXP (x, 0), reg);
8023
      op1 = canon_reg_for_combine (XEXP (x, 1), reg);
8024
      if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
8025
        return simplify_gen_relational (GET_CODE (x), GET_MODE (x),
8026
                                        GET_MODE (op0), op0, op1);
8027
      break;
8028
 
8029
    case RTX_TERNARY:
8030
    case RTX_BITFIELD_OPS:
8031
      op0 = canon_reg_for_combine (XEXP (x, 0), reg);
8032
      op1 = canon_reg_for_combine (XEXP (x, 1), reg);
8033
      op2 = canon_reg_for_combine (XEXP (x, 2), reg);
8034
      if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1) || op2 != XEXP (x, 2))
8035
        return simplify_gen_ternary (GET_CODE (x), GET_MODE (x),
8036
                                     GET_MODE (op0), op0, op1, op2);
8037
 
8038
    case RTX_OBJ:
8039
      if (REG_P (x))
8040
        {
8041
          if (rtx_equal_p (get_last_value (reg), x)
8042
              || rtx_equal_p (reg, get_last_value (x)))
8043
            return reg;
8044
          else
8045
            break;
8046
        }
8047
 
8048
      /* fall through */
8049
 
8050
    default:
8051
      fmt = GET_RTX_FORMAT (code);
8052
      copied = false;
8053
      for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8054
        if (fmt[i] == 'e')
8055
          {
8056
            rtx op = canon_reg_for_combine (XEXP (x, i), reg);
8057
            if (op != XEXP (x, i))
8058
              {
8059
                if (!copied)
8060
                  {
8061
                    copied = true;
8062
                    x = copy_rtx (x);
8063
                  }
8064
                XEXP (x, i) = op;
8065
              }
8066
          }
8067
        else if (fmt[i] == 'E')
8068
          {
8069
            int j;
8070
            for (j = 0; j < XVECLEN (x, i); j++)
8071
              {
8072
                rtx op = canon_reg_for_combine (XVECEXP (x, i, j), reg);
8073
                if (op != XVECEXP (x, i, j))
8074
                  {
8075
                    if (!copied)
8076
                      {
8077
                        copied = true;
8078
                        x = copy_rtx (x);
8079
                      }
8080
                    XVECEXP (x, i, j) = op;
8081
                  }
8082
              }
8083
          }
8084
 
8085
      break;
8086
    }
8087
 
8088
  return x;
8089
}
8090
 
8091
/* Return X converted to MODE.  If the value is already truncated to
8092
   MODE we can just return a subreg even though in the general case we
8093
   would need an explicit truncation.  */
8094
 
8095
static rtx
8096
gen_lowpart_or_truncate (enum machine_mode mode, rtx x)
8097
{
8098
  if (!CONST_INT_P (x)
8099
      && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x))
8100
      && !TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (x))
8101
      && !(REG_P (x) && reg_truncated_to_mode (mode, x)))
8102
    {
8103
      /* Bit-cast X into an integer mode.  */
8104
      if (!SCALAR_INT_MODE_P (GET_MODE (x)))
8105
        x = gen_lowpart (int_mode_for_mode (GET_MODE (x)), x);
8106
      x = simplify_gen_unary (TRUNCATE, int_mode_for_mode (mode),
8107
                              x, GET_MODE (x));
8108
    }
8109
 
8110
  return gen_lowpart (mode, x);
8111
}
8112
 
8113
/* See if X can be simplified knowing that we will only refer to it in
8114
   MODE and will only refer to those bits that are nonzero in MASK.
8115
   If other bits are being computed or if masking operations are done
8116
   that select a superset of the bits in MASK, they can sometimes be
8117
   ignored.
8118
 
8119
   Return a possibly simplified expression, but always convert X to
8120
   MODE.  If X is a CONST_INT, AND the CONST_INT with MASK.
8121
 
8122
   If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
8123
   are all off in X.  This is used when X will be complemented, by either
8124
   NOT, NEG, or XOR.  */
8125
 
8126
static rtx
8127
force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask,
8128
               int just_select)
8129
{
8130
  enum rtx_code code = GET_CODE (x);
8131
  int next_select = just_select || code == XOR || code == NOT || code == NEG;
8132
  enum machine_mode op_mode;
8133
  unsigned HOST_WIDE_INT fuller_mask, nonzero;
8134
  rtx op0, op1, temp;
8135
 
8136
  /* If this is a CALL or ASM_OPERANDS, don't do anything.  Some of the
8137
     code below will do the wrong thing since the mode of such an
8138
     expression is VOIDmode.
8139
 
8140
     Also do nothing if X is a CLOBBER; this can happen if X was
8141
     the return value from a call to gen_lowpart.  */
8142
  if (code == CALL || code == ASM_OPERANDS || code == CLOBBER)
8143
    return x;
8144
 
8145
  /* We want to perform the operation is its present mode unless we know
8146
     that the operation is valid in MODE, in which case we do the operation
8147
     in MODE.  */
8148
  op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
8149
              && have_insn_for (code, mode))
8150
             ? mode : GET_MODE (x));
8151
 
8152
  /* It is not valid to do a right-shift in a narrower mode
8153
     than the one it came in with.  */
8154
  if ((code == LSHIFTRT || code == ASHIFTRT)
8155
      && GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (GET_MODE (x)))
8156
    op_mode = GET_MODE (x);
8157
 
8158
  /* Truncate MASK to fit OP_MODE.  */
8159
  if (op_mode)
8160
    mask &= GET_MODE_MASK (op_mode);
8161
 
8162
  /* When we have an arithmetic operation, or a shift whose count we
8163
     do not know, we need to assume that all bits up to the highest-order
8164
     bit in MASK will be needed.  This is how we form such a mask.  */
8165
  if (mask & ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)))
8166
    fuller_mask = ~(unsigned HOST_WIDE_INT) 0;
8167
  else
8168
    fuller_mask = (((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1))
8169
                   - 1);
8170
 
8171
  /* Determine what bits of X are guaranteed to be (non)zero.  */
8172
  nonzero = nonzero_bits (x, mode);
8173
 
8174
  /* If none of the bits in X are needed, return a zero.  */
8175
  if (!just_select && (nonzero & mask) == 0 && !side_effects_p (x))
8176
    x = const0_rtx;
8177
 
8178
  /* If X is a CONST_INT, return a new one.  Do this here since the
8179
     test below will fail.  */
8180
  if (CONST_INT_P (x))
8181
    {
8182
      if (SCALAR_INT_MODE_P (mode))
8183
        return gen_int_mode (INTVAL (x) & mask, mode);
8184
      else
8185
        {
8186
          x = GEN_INT (INTVAL (x) & mask);
8187
          return gen_lowpart_common (mode, x);
8188
        }
8189
    }
8190
 
8191
  /* If X is narrower than MODE and we want all the bits in X's mode, just
8192
     get X in the proper mode.  */
8193
  if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
8194
      && (GET_MODE_MASK (GET_MODE (x)) & ~mask) == 0)
8195
    return gen_lowpart (mode, x);
8196
 
8197
  /* We can ignore the effect of a SUBREG if it narrows the mode or
8198
     if the constant masks to zero all the bits the mode doesn't have.  */
8199
  if (GET_CODE (x) == SUBREG
8200
      && subreg_lowpart_p (x)
8201
      && ((GET_MODE_SIZE (GET_MODE (x))
8202
           < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
8203
          || (0 == (mask
8204
                    & GET_MODE_MASK (GET_MODE (x))
8205
                    & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
8206
    return force_to_mode (SUBREG_REG (x), mode, mask, next_select);
8207
 
8208
  /* The arithmetic simplifications here only work for scalar integer modes.  */
8209
  if (!SCALAR_INT_MODE_P (mode) || !SCALAR_INT_MODE_P (GET_MODE (x)))
8210
    return gen_lowpart_or_truncate (mode, x);
8211
 
8212
  switch (code)
8213
    {
8214
    case CLOBBER:
8215
      /* If X is a (clobber (const_int)), return it since we know we are
8216
         generating something that won't match.  */
8217
      return x;
8218
 
8219
    case SIGN_EXTEND:
8220
    case ZERO_EXTEND:
8221
    case ZERO_EXTRACT:
8222
    case SIGN_EXTRACT:
8223
      x = expand_compound_operation (x);
8224
      if (GET_CODE (x) != code)
8225
        return force_to_mode (x, mode, mask, next_select);
8226
      break;
8227
 
8228
    case TRUNCATE:
8229
      /* Similarly for a truncate.  */
8230
      return force_to_mode (XEXP (x, 0), mode, mask, next_select);
8231
 
8232
    case AND:
8233
      /* If this is an AND with a constant, convert it into an AND
8234
         whose constant is the AND of that constant with MASK.  If it
8235
         remains an AND of MASK, delete it since it is redundant.  */
8236
 
8237
      if (CONST_INT_P (XEXP (x, 1)))
8238
        {
8239
          x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
8240
                                      mask & INTVAL (XEXP (x, 1)));
8241
 
8242
          /* If X is still an AND, see if it is an AND with a mask that
8243
             is just some low-order bits.  If so, and it is MASK, we don't
8244
             need it.  */
8245
 
8246
          if (GET_CODE (x) == AND && CONST_INT_P (XEXP (x, 1))
8247
              && ((INTVAL (XEXP (x, 1)) & GET_MODE_MASK (GET_MODE (x)))
8248
                  == mask))
8249
            x = XEXP (x, 0);
8250
 
8251
          /* If it remains an AND, try making another AND with the bits
8252
             in the mode mask that aren't in MASK turned on.  If the
8253
             constant in the AND is wide enough, this might make a
8254
             cheaper constant.  */
8255
 
8256
          if (GET_CODE (x) == AND && CONST_INT_P (XEXP (x, 1))
8257
              && GET_MODE_MASK (GET_MODE (x)) != mask
8258
              && HWI_COMPUTABLE_MODE_P (GET_MODE (x)))
8259
            {
8260
              unsigned HOST_WIDE_INT cval
8261
                = UINTVAL (XEXP (x, 1))
8262
                  | (GET_MODE_MASK (GET_MODE (x)) & ~mask);
8263
              int width = GET_MODE_PRECISION (GET_MODE (x));
8264
              rtx y;
8265
 
8266
              /* If MODE is narrower than HOST_WIDE_INT and CVAL is a negative
8267
                 number, sign extend it.  */
8268
              if (width > 0 && width < HOST_BITS_PER_WIDE_INT
8269
                  && (cval & ((unsigned HOST_WIDE_INT) 1 << (width - 1))) != 0)
8270
                cval |= (unsigned HOST_WIDE_INT) -1 << width;
8271
 
8272
              y = simplify_gen_binary (AND, GET_MODE (x),
8273
                                       XEXP (x, 0), GEN_INT (cval));
8274
              if (set_src_cost (y, optimize_this_for_speed_p)
8275
                  < set_src_cost (x, optimize_this_for_speed_p))
8276
                x = y;
8277
            }
8278
 
8279
          break;
8280
        }
8281
 
8282
      goto binop;
8283
 
8284
    case PLUS:
8285
      /* In (and (plus FOO C1) M), if M is a mask that just turns off
8286
         low-order bits (as in an alignment operation) and FOO is already
8287
         aligned to that boundary, mask C1 to that boundary as well.
8288
         This may eliminate that PLUS and, later, the AND.  */
8289
 
8290
      {
8291
        unsigned int width = GET_MODE_PRECISION (mode);
8292
        unsigned HOST_WIDE_INT smask = mask;
8293
 
8294
        /* If MODE is narrower than HOST_WIDE_INT and mask is a negative
8295
           number, sign extend it.  */
8296
 
8297
        if (width < HOST_BITS_PER_WIDE_INT
8298
            && (smask & ((unsigned HOST_WIDE_INT) 1 << (width - 1))) != 0)
8299
          smask |= (unsigned HOST_WIDE_INT) (-1) << width;
8300
 
8301
        if (CONST_INT_P (XEXP (x, 1))
8302
            && exact_log2 (- smask) >= 0
8303
            && (nonzero_bits (XEXP (x, 0), mode) & ~smask) == 0
8304
            && (INTVAL (XEXP (x, 1)) & ~smask) != 0)
8305
          return force_to_mode (plus_constant (XEXP (x, 0),
8306
                                               (INTVAL (XEXP (x, 1)) & smask)),
8307
                                mode, smask, next_select);
8308
      }
8309
 
8310
      /* ... fall through ...  */
8311
 
8312
    case MULT:
8313
      /* For PLUS, MINUS and MULT, we need any bits less significant than the
8314
         most significant bit in MASK since carries from those bits will
8315
         affect the bits we are interested in.  */
8316
      mask = fuller_mask;
8317
      goto binop;
8318
 
8319
    case MINUS:
8320
      /* If X is (minus C Y) where C's least set bit is larger than any bit
8321
         in the mask, then we may replace with (neg Y).  */
8322
      if (CONST_INT_P (XEXP (x, 0))
8323
          && (((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 0))
8324
                                        & -INTVAL (XEXP (x, 0))))
8325
              > mask))
8326
        {
8327
          x = simplify_gen_unary (NEG, GET_MODE (x), XEXP (x, 1),
8328
                                  GET_MODE (x));
8329
          return force_to_mode (x, mode, mask, next_select);
8330
        }
8331
 
8332
      /* Similarly, if C contains every bit in the fuller_mask, then we may
8333
         replace with (not Y).  */
8334
      if (CONST_INT_P (XEXP (x, 0))
8335
          && ((UINTVAL (XEXP (x, 0)) | fuller_mask) == UINTVAL (XEXP (x, 0))))
8336
        {
8337
          x = simplify_gen_unary (NOT, GET_MODE (x),
8338
                                  XEXP (x, 1), GET_MODE (x));
8339
          return force_to_mode (x, mode, mask, next_select);
8340
        }
8341
 
8342
      mask = fuller_mask;
8343
      goto binop;
8344
 
8345
    case IOR:
8346
    case XOR:
8347
      /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
8348
         LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
8349
         operation which may be a bitfield extraction.  Ensure that the
8350
         constant we form is not wider than the mode of X.  */
8351
 
8352
      if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
8353
          && CONST_INT_P (XEXP (XEXP (x, 0), 1))
8354
          && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
8355
          && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
8356
          && CONST_INT_P (XEXP (x, 1))
8357
          && ((INTVAL (XEXP (XEXP (x, 0), 1))
8358
               + floor_log2 (INTVAL (XEXP (x, 1))))
8359
              < GET_MODE_PRECISION (GET_MODE (x)))
8360
          && (UINTVAL (XEXP (x, 1))
8361
              & ~nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0)
8362
        {
8363
          temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
8364
                          << INTVAL (XEXP (XEXP (x, 0), 1)));
8365
          temp = simplify_gen_binary (GET_CODE (x), GET_MODE (x),
8366
                                      XEXP (XEXP (x, 0), 0), temp);
8367
          x = simplify_gen_binary (LSHIFTRT, GET_MODE (x), temp,
8368
                                   XEXP (XEXP (x, 0), 1));
8369
          return force_to_mode (x, mode, mask, next_select);
8370
        }
8371
 
8372
    binop:
8373
      /* For most binary operations, just propagate into the operation and
8374
         change the mode if we have an operation of that mode.  */
8375
 
8376
      op0 = force_to_mode (XEXP (x, 0), mode, mask, next_select);
8377
      op1 = force_to_mode (XEXP (x, 1), mode, mask, next_select);
8378
 
8379
      /* If we ended up truncating both operands, truncate the result of the
8380
         operation instead.  */
8381
      if (GET_CODE (op0) == TRUNCATE
8382
          && GET_CODE (op1) == TRUNCATE)
8383
        {
8384
          op0 = XEXP (op0, 0);
8385
          op1 = XEXP (op1, 0);
8386
        }
8387
 
8388
      op0 = gen_lowpart_or_truncate (op_mode, op0);
8389
      op1 = gen_lowpart_or_truncate (op_mode, op1);
8390
 
8391
      if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
8392
        x = simplify_gen_binary (code, op_mode, op0, op1);
8393
      break;
8394
 
8395
    case ASHIFT:
8396
      /* For left shifts, do the same, but just for the first operand.
8397
         However, we cannot do anything with shifts where we cannot
8398
         guarantee that the counts are smaller than the size of the mode
8399
         because such a count will have a different meaning in a
8400
         wider mode.  */
8401
 
8402
      if (! (CONST_INT_P (XEXP (x, 1))
8403
             && INTVAL (XEXP (x, 1)) >= 0
8404
             && INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (mode))
8405
          && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
8406
                && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
8407
                    < (unsigned HOST_WIDE_INT) GET_MODE_PRECISION (mode))))
8408
        break;
8409
 
8410
      /* If the shift count is a constant and we can do arithmetic in
8411
         the mode of the shift, refine which bits we need.  Otherwise, use the
8412
         conservative form of the mask.  */
8413
      if (CONST_INT_P (XEXP (x, 1))
8414
          && INTVAL (XEXP (x, 1)) >= 0
8415
          && INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (op_mode)
8416
          && HWI_COMPUTABLE_MODE_P (op_mode))
8417
        mask >>= INTVAL (XEXP (x, 1));
8418
      else
8419
        mask = fuller_mask;
8420
 
8421
      op0 = gen_lowpart_or_truncate (op_mode,
8422
                                     force_to_mode (XEXP (x, 0), op_mode,
8423
                                                    mask, next_select));
8424
 
8425
      if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
8426
        x = simplify_gen_binary (code, op_mode, op0, XEXP (x, 1));
8427
      break;
8428
 
8429
    case LSHIFTRT:
8430
      /* Here we can only do something if the shift count is a constant,
8431
         this shift constant is valid for the host, and we can do arithmetic
8432
         in OP_MODE.  */
8433
 
8434
      if (CONST_INT_P (XEXP (x, 1))
8435
          && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
8436
          && HWI_COMPUTABLE_MODE_P (op_mode))
8437
        {
8438
          rtx inner = XEXP (x, 0);
8439
          unsigned HOST_WIDE_INT inner_mask;
8440
 
8441
          /* Select the mask of the bits we need for the shift operand.  */
8442
          inner_mask = mask << INTVAL (XEXP (x, 1));
8443
 
8444
          /* We can only change the mode of the shift if we can do arithmetic
8445
             in the mode of the shift and INNER_MASK is no wider than the
8446
             width of X's mode.  */
8447
          if ((inner_mask & ~GET_MODE_MASK (GET_MODE (x))) != 0)
8448
            op_mode = GET_MODE (x);
8449
 
8450
          inner = force_to_mode (inner, op_mode, inner_mask, next_select);
8451
 
8452
          if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
8453
            x = simplify_gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
8454
        }
8455
 
8456
      /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
8457
         shift and AND produces only copies of the sign bit (C2 is one less
8458
         than a power of two), we can do this with just a shift.  */
8459
 
8460
      if (GET_CODE (x) == LSHIFTRT
8461
          && CONST_INT_P (XEXP (x, 1))
8462
          /* The shift puts one of the sign bit copies in the least significant
8463
             bit.  */
8464
          && ((INTVAL (XEXP (x, 1))
8465
               + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
8466
              >= GET_MODE_PRECISION (GET_MODE (x)))
8467
          && exact_log2 (mask + 1) >= 0
8468
          /* Number of bits left after the shift must be more than the mask
8469
             needs.  */
8470
          && ((INTVAL (XEXP (x, 1)) + exact_log2 (mask + 1))
8471
              <= GET_MODE_PRECISION (GET_MODE (x)))
8472
          /* Must be more sign bit copies than the mask needs.  */
8473
          && ((int) num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
8474
              >= exact_log2 (mask + 1)))
8475
        x = simplify_gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
8476
                                 GEN_INT (GET_MODE_PRECISION (GET_MODE (x))
8477
                                          - exact_log2 (mask + 1)));
8478
 
8479
      goto shiftrt;
8480
 
8481
    case ASHIFTRT:
8482
      /* If we are just looking for the sign bit, we don't need this shift at
8483
         all, even if it has a variable count.  */
8484
      if (val_signbit_p (GET_MODE (x), mask))
8485
        return force_to_mode (XEXP (x, 0), mode, mask, next_select);
8486
 
8487
      /* If this is a shift by a constant, get a mask that contains those bits
8488
         that are not copies of the sign bit.  We then have two cases:  If
8489
         MASK only includes those bits, this can be a logical shift, which may
8490
         allow simplifications.  If MASK is a single-bit field not within
8491
         those bits, we are requesting a copy of the sign bit and hence can
8492
         shift the sign bit to the appropriate location.  */
8493
 
8494
      if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) >= 0
8495
          && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
8496
        {
8497
          int i;
8498
 
8499
          /* If the considered data is wider than HOST_WIDE_INT, we can't
8500
             represent a mask for all its bits in a single scalar.
8501
             But we only care about the lower bits, so calculate these.  */
8502
 
8503
          if (GET_MODE_PRECISION (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT)
8504
            {
8505
              nonzero = ~(unsigned HOST_WIDE_INT) 0;
8506
 
8507
              /* GET_MODE_PRECISION (GET_MODE (x)) - INTVAL (XEXP (x, 1))
8508
                 is the number of bits a full-width mask would have set.
8509
                 We need only shift if these are fewer than nonzero can
8510
                 hold.  If not, we must keep all bits set in nonzero.  */
8511
 
8512
              if (GET_MODE_PRECISION (GET_MODE (x)) - INTVAL (XEXP (x, 1))
8513
                  < HOST_BITS_PER_WIDE_INT)
8514
                nonzero >>= INTVAL (XEXP (x, 1))
8515
                            + HOST_BITS_PER_WIDE_INT
8516
                            - GET_MODE_PRECISION (GET_MODE (x)) ;
8517
            }
8518
          else
8519
            {
8520
              nonzero = GET_MODE_MASK (GET_MODE (x));
8521
              nonzero >>= INTVAL (XEXP (x, 1));
8522
            }
8523
 
8524
          if ((mask & ~nonzero) == 0)
8525
            {
8526
              x = simplify_shift_const (NULL_RTX, LSHIFTRT, GET_MODE (x),
8527
                                        XEXP (x, 0), INTVAL (XEXP (x, 1)));
8528
              if (GET_CODE (x) != ASHIFTRT)
8529
                return force_to_mode (x, mode, mask, next_select);
8530
            }
8531
 
8532
          else if ((i = exact_log2 (mask)) >= 0)
8533
            {
8534
              x = simplify_shift_const
8535
                  (NULL_RTX, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
8536
                   GET_MODE_PRECISION (GET_MODE (x)) - 1 - i);
8537
 
8538
              if (GET_CODE (x) != ASHIFTRT)
8539
                return force_to_mode (x, mode, mask, next_select);
8540
            }
8541
        }
8542
 
8543
      /* If MASK is 1, convert this to an LSHIFTRT.  This can be done
8544
         even if the shift count isn't a constant.  */
8545
      if (mask == 1)
8546
        x = simplify_gen_binary (LSHIFTRT, GET_MODE (x),
8547
                                 XEXP (x, 0), XEXP (x, 1));
8548
 
8549
    shiftrt:
8550
 
8551
      /* If this is a zero- or sign-extension operation that just affects bits
8552
         we don't care about, remove it.  Be sure the call above returned
8553
         something that is still a shift.  */
8554
 
8555
      if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
8556
          && CONST_INT_P (XEXP (x, 1))
8557
          && INTVAL (XEXP (x, 1)) >= 0
8558
          && (INTVAL (XEXP (x, 1))
8559
              <= GET_MODE_PRECISION (GET_MODE (x)) - (floor_log2 (mask) + 1))
8560
          && GET_CODE (XEXP (x, 0)) == ASHIFT
8561
          && XEXP (XEXP (x, 0), 1) == XEXP (x, 1))
8562
        return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
8563
                              next_select);
8564
 
8565
      break;
8566
 
8567
    case ROTATE:
8568
    case ROTATERT:
8569
      /* If the shift count is constant and we can do computations
8570
         in the mode of X, compute where the bits we care about are.
8571
         Otherwise, we can't do anything.  Don't change the mode of
8572
         the shift or propagate MODE into the shift, though.  */
8573
      if (CONST_INT_P (XEXP (x, 1))
8574
          && INTVAL (XEXP (x, 1)) >= 0)
8575
        {
8576
          temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
8577
                                            GET_MODE (x), GEN_INT (mask),
8578
                                            XEXP (x, 1));
8579
          if (temp && CONST_INT_P (temp))
8580
            SUBST (XEXP (x, 0),
8581
                   force_to_mode (XEXP (x, 0), GET_MODE (x),
8582
                                  INTVAL (temp), next_select));
8583
        }
8584
      break;
8585
 
8586
    case NEG:
8587
      /* If we just want the low-order bit, the NEG isn't needed since it
8588
         won't change the low-order bit.  */
8589
      if (mask == 1)
8590
        return force_to_mode (XEXP (x, 0), mode, mask, just_select);
8591
 
8592
      /* We need any bits less significant than the most significant bit in
8593
         MASK since carries from those bits will affect the bits we are
8594
         interested in.  */
8595
      mask = fuller_mask;
8596
      goto unop;
8597
 
8598
    case NOT:
8599
      /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
8600
         same as the XOR case above.  Ensure that the constant we form is not
8601
         wider than the mode of X.  */
8602
 
8603
      if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
8604
          && CONST_INT_P (XEXP (XEXP (x, 0), 1))
8605
          && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
8606
          && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
8607
              < GET_MODE_PRECISION (GET_MODE (x)))
8608
          && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
8609
        {
8610
          temp = gen_int_mode (mask << INTVAL (XEXP (XEXP (x, 0), 1)),
8611
                               GET_MODE (x));
8612
          temp = simplify_gen_binary (XOR, GET_MODE (x),
8613
                                      XEXP (XEXP (x, 0), 0), temp);
8614
          x = simplify_gen_binary (LSHIFTRT, GET_MODE (x),
8615
                                   temp, XEXP (XEXP (x, 0), 1));
8616
 
8617
          return force_to_mode (x, mode, mask, next_select);
8618
        }
8619
 
8620
      /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must
8621
         use the full mask inside the NOT.  */
8622
      mask = fuller_mask;
8623
 
8624
    unop:
8625
      op0 = gen_lowpart_or_truncate (op_mode,
8626
                                     force_to_mode (XEXP (x, 0), mode, mask,
8627
                                                    next_select));
8628
      if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
8629
        x = simplify_gen_unary (code, op_mode, op0, op_mode);
8630
      break;
8631
 
8632
    case NE:
8633
      /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
8634
         in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
8635
         which is equal to STORE_FLAG_VALUE.  */
8636
      if ((mask & ~STORE_FLAG_VALUE) == 0
8637
          && XEXP (x, 1) == const0_rtx
8638
          && GET_MODE (XEXP (x, 0)) == mode
8639
          && exact_log2 (nonzero_bits (XEXP (x, 0), mode)) >= 0
8640
          && (nonzero_bits (XEXP (x, 0), mode)
8641
              == (unsigned HOST_WIDE_INT) STORE_FLAG_VALUE))
8642
        return force_to_mode (XEXP (x, 0), mode, mask, next_select);
8643
 
8644
      break;
8645
 
8646
    case IF_THEN_ELSE:
8647
      /* We have no way of knowing if the IF_THEN_ELSE can itself be
8648
         written in a narrower mode.  We play it safe and do not do so.  */
8649
 
8650
      SUBST (XEXP (x, 1),
8651
             gen_lowpart_or_truncate (GET_MODE (x),
8652
                                      force_to_mode (XEXP (x, 1), mode,
8653
                                                     mask, next_select)));
8654
      SUBST (XEXP (x, 2),
8655
             gen_lowpart_or_truncate (GET_MODE (x),
8656
                                      force_to_mode (XEXP (x, 2), mode,
8657
                                                     mask, next_select)));
8658
      break;
8659
 
8660
    default:
8661
      break;
8662
    }
8663
 
8664
  /* Ensure we return a value of the proper mode.  */
8665
  return gen_lowpart_or_truncate (mode, x);
8666
}
8667
 
8668
/* Return nonzero if X is an expression that has one of two values depending on
8669
   whether some other value is zero or nonzero.  In that case, we return the
8670
   value that is being tested, *PTRUE is set to the value if the rtx being
8671
   returned has a nonzero value, and *PFALSE is set to the other alternative.
8672
 
8673
   If we return zero, we set *PTRUE and *PFALSE to X.  */
8674
 
8675
static rtx
8676
if_then_else_cond (rtx x, rtx *ptrue, rtx *pfalse)
8677
{
8678
  enum machine_mode mode = GET_MODE (x);
8679
  enum rtx_code code = GET_CODE (x);
8680
  rtx cond0, cond1, true0, true1, false0, false1;
8681
  unsigned HOST_WIDE_INT nz;
8682
 
8683
  /* If we are comparing a value against zero, we are done.  */
8684
  if ((code == NE || code == EQ)
8685
      && XEXP (x, 1) == const0_rtx)
8686
    {
8687
      *ptrue = (code == NE) ? const_true_rtx : const0_rtx;
8688
      *pfalse = (code == NE) ? const0_rtx : const_true_rtx;
8689
      return XEXP (x, 0);
8690
    }
8691
 
8692
  /* If this is a unary operation whose operand has one of two values, apply
8693
     our opcode to compute those values.  */
8694
  else if (UNARY_P (x)
8695
           && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
8696
    {
8697
      *ptrue = simplify_gen_unary (code, mode, true0, GET_MODE (XEXP (x, 0)));
8698
      *pfalse = simplify_gen_unary (code, mode, false0,
8699
                                    GET_MODE (XEXP (x, 0)));
8700
      return cond0;
8701
    }
8702
 
8703
  /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
8704
     make can't possibly match and would suppress other optimizations.  */
8705
  else if (code == COMPARE)
8706
    ;
8707
 
8708
  /* If this is a binary operation, see if either side has only one of two
8709
     values.  If either one does or if both do and they are conditional on
8710
     the same value, compute the new true and false values.  */
8711
  else if (BINARY_P (x))
8712
    {
8713
      cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
8714
      cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
8715
 
8716
      if ((cond0 != 0 || cond1 != 0)
8717
          && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
8718
        {
8719
          /* If if_then_else_cond returned zero, then true/false are the
8720
             same rtl.  We must copy one of them to prevent invalid rtl
8721
             sharing.  */
8722
          if (cond0 == 0)
8723
            true0 = copy_rtx (true0);
8724
          else if (cond1 == 0)
8725
            true1 = copy_rtx (true1);
8726
 
8727
          if (COMPARISON_P (x))
8728
            {
8729
              *ptrue = simplify_gen_relational (code, mode, VOIDmode,
8730
                                                true0, true1);
8731
              *pfalse = simplify_gen_relational (code, mode, VOIDmode,
8732
                                                 false0, false1);
8733
             }
8734
          else
8735
            {
8736
              *ptrue = simplify_gen_binary (code, mode, true0, true1);
8737
              *pfalse = simplify_gen_binary (code, mode, false0, false1);
8738
            }
8739
 
8740
          return cond0 ? cond0 : cond1;
8741
        }
8742
 
8743
      /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
8744
         operands is zero when the other is nonzero, and vice-versa,
8745
         and STORE_FLAG_VALUE is 1 or -1.  */
8746
 
8747
      if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8748
          && (code == PLUS || code == IOR || code == XOR || code == MINUS
8749
              || code == UMAX)
8750
          && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
8751
        {
8752
          rtx op0 = XEXP (XEXP (x, 0), 1);
8753
          rtx op1 = XEXP (XEXP (x, 1), 1);
8754
 
8755
          cond0 = XEXP (XEXP (x, 0), 0);
8756
          cond1 = XEXP (XEXP (x, 1), 0);
8757
 
8758
          if (COMPARISON_P (cond0)
8759
              && COMPARISON_P (cond1)
8760
              && ((GET_CODE (cond0) == reversed_comparison_code (cond1, NULL)
8761
                   && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
8762
                   && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
8763
                  || ((swap_condition (GET_CODE (cond0))
8764
                       == reversed_comparison_code (cond1, NULL))
8765
                      && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
8766
                      && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
8767
              && ! side_effects_p (x))
8768
            {
8769
              *ptrue = simplify_gen_binary (MULT, mode, op0, const_true_rtx);
8770
              *pfalse = simplify_gen_binary (MULT, mode,
8771
                                             (code == MINUS
8772
                                              ? simplify_gen_unary (NEG, mode,
8773
                                                                    op1, mode)
8774
                                              : op1),
8775
                                              const_true_rtx);
8776
              return cond0;
8777
            }
8778
        }
8779
 
8780
      /* Similarly for MULT, AND and UMIN, except that for these the result
8781
         is always zero.  */
8782
      if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8783
          && (code == MULT || code == AND || code == UMIN)
8784
          && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
8785
        {
8786
          cond0 = XEXP (XEXP (x, 0), 0);
8787
          cond1 = XEXP (XEXP (x, 1), 0);
8788
 
8789
          if (COMPARISON_P (cond0)
8790
              && COMPARISON_P (cond1)
8791
              && ((GET_CODE (cond0) == reversed_comparison_code (cond1, NULL)
8792
                   && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
8793
                   && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
8794
                  || ((swap_condition (GET_CODE (cond0))
8795
                       == reversed_comparison_code (cond1, NULL))
8796
                      && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
8797
                      && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
8798
              && ! side_effects_p (x))
8799
            {
8800
              *ptrue = *pfalse = const0_rtx;
8801
              return cond0;
8802
            }
8803
        }
8804
    }
8805
 
8806
  else if (code == IF_THEN_ELSE)
8807
    {
8808
      /* If we have IF_THEN_ELSE already, extract the condition and
8809
         canonicalize it if it is NE or EQ.  */
8810
      cond0 = XEXP (x, 0);
8811
      *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
8812
      if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
8813
        return XEXP (cond0, 0);
8814
      else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
8815
        {
8816
          *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
8817
          return XEXP (cond0, 0);
8818
        }
8819
      else
8820
        return cond0;
8821
    }
8822
 
8823
  /* If X is a SUBREG, we can narrow both the true and false values
8824
     if the inner expression, if there is a condition.  */
8825
  else if (code == SUBREG
8826
           && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
8827
                                               &true0, &false0)))
8828
    {
8829
      true0 = simplify_gen_subreg (mode, true0,
8830
                                   GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
8831
      false0 = simplify_gen_subreg (mode, false0,
8832
                                    GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
8833
      if (true0 && false0)
8834
        {
8835
          *ptrue = true0;
8836
          *pfalse = false0;
8837
          return cond0;
8838
        }
8839
    }
8840
 
8841
  /* If X is a constant, this isn't special and will cause confusions
8842
     if we treat it as such.  Likewise if it is equivalent to a constant.  */
8843
  else if (CONSTANT_P (x)
8844
           || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
8845
    ;
8846
 
8847
  /* If we're in BImode, canonicalize on 0 and STORE_FLAG_VALUE, as that
8848
     will be least confusing to the rest of the compiler.  */
8849
  else if (mode == BImode)
8850
    {
8851
      *ptrue = GEN_INT (STORE_FLAG_VALUE), *pfalse = const0_rtx;
8852
      return x;
8853
    }
8854
 
8855
  /* If X is known to be either 0 or -1, those are the true and
8856
     false values when testing X.  */
8857
  else if (x == constm1_rtx || x == const0_rtx
8858
           || (mode != VOIDmode
8859
               && num_sign_bit_copies (x, mode) == GET_MODE_PRECISION (mode)))
8860
    {
8861
      *ptrue = constm1_rtx, *pfalse = const0_rtx;
8862
      return x;
8863
    }
8864
 
8865
  /* Likewise for 0 or a single bit.  */
8866
  else if (HWI_COMPUTABLE_MODE_P (mode)
8867
           && exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
8868
    {
8869
      *ptrue = gen_int_mode (nz, mode), *pfalse = const0_rtx;
8870
      return x;
8871
    }
8872
 
8873
  /* Otherwise fail; show no condition with true and false values the same.  */
8874
  *ptrue = *pfalse = x;
8875
  return 0;
8876
}
8877
 
8878
/* Return the value of expression X given the fact that condition COND
8879
   is known to be true when applied to REG as its first operand and VAL
8880
   as its second.  X is known to not be shared and so can be modified in
8881
   place.
8882
 
8883
   We only handle the simplest cases, and specifically those cases that
8884
   arise with IF_THEN_ELSE expressions.  */
8885
 
8886
static rtx
8887
known_cond (rtx x, enum rtx_code cond, rtx reg, rtx val)
8888
{
8889
  enum rtx_code code = GET_CODE (x);
8890
  rtx temp;
8891
  const char *fmt;
8892
  int i, j;
8893
 
8894
  if (side_effects_p (x))
8895
    return x;
8896
 
8897
  /* If either operand of the condition is a floating point value,
8898
     then we have to avoid collapsing an EQ comparison.  */
8899
  if (cond == EQ
8900
      && rtx_equal_p (x, reg)
8901
      && ! FLOAT_MODE_P (GET_MODE (x))
8902
      && ! FLOAT_MODE_P (GET_MODE (val)))
8903
    return val;
8904
 
8905
  if (cond == UNEQ && rtx_equal_p (x, reg))
8906
    return val;
8907
 
8908
  /* If X is (abs REG) and we know something about REG's relationship
8909
     with zero, we may be able to simplify this.  */
8910
 
8911
  if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
8912
    switch (cond)
8913
      {
8914
      case GE:  case GT:  case EQ:
8915
        return XEXP (x, 0);
8916
      case LT:  case LE:
8917
        return simplify_gen_unary (NEG, GET_MODE (XEXP (x, 0)),
8918
                                   XEXP (x, 0),
8919
                                   GET_MODE (XEXP (x, 0)));
8920
      default:
8921
        break;
8922
      }
8923
 
8924
  /* The only other cases we handle are MIN, MAX, and comparisons if the
8925
     operands are the same as REG and VAL.  */
8926
 
8927
  else if (COMPARISON_P (x) || COMMUTATIVE_ARITH_P (x))
8928
    {
8929
      if (rtx_equal_p (XEXP (x, 0), val))
8930
        cond = swap_condition (cond), temp = val, val = reg, reg = temp;
8931
 
8932
      if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
8933
        {
8934
          if (COMPARISON_P (x))
8935
            {
8936
              if (comparison_dominates_p (cond, code))
8937
                return const_true_rtx;
8938
 
8939
              code = reversed_comparison_code (x, NULL);
8940
              if (code != UNKNOWN
8941
                  && comparison_dominates_p (cond, code))
8942
                return const0_rtx;
8943
              else
8944
                return x;
8945
            }
8946
          else if (code == SMAX || code == SMIN
8947
                   || code == UMIN || code == UMAX)
8948
            {
8949
              int unsignedp = (code == UMIN || code == UMAX);
8950
 
8951
              /* Do not reverse the condition when it is NE or EQ.
8952
                 This is because we cannot conclude anything about
8953
                 the value of 'SMAX (x, y)' when x is not equal to y,
8954
                 but we can when x equals y.  */
8955
              if ((code == SMAX || code == UMAX)
8956
                  && ! (cond == EQ || cond == NE))
8957
                cond = reverse_condition (cond);
8958
 
8959
              switch (cond)
8960
                {
8961
                case GE:   case GT:
8962
                  return unsignedp ? x : XEXP (x, 1);
8963
                case LE:   case LT:
8964
                  return unsignedp ? x : XEXP (x, 0);
8965
                case GEU:  case GTU:
8966
                  return unsignedp ? XEXP (x, 1) : x;
8967
                case LEU:  case LTU:
8968
                  return unsignedp ? XEXP (x, 0) : x;
8969
                default:
8970
                  break;
8971
                }
8972
            }
8973
        }
8974
    }
8975
  else if (code == SUBREG)
8976
    {
8977
      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
8978
      rtx new_rtx, r = known_cond (SUBREG_REG (x), cond, reg, val);
8979
 
8980
      if (SUBREG_REG (x) != r)
8981
        {
8982
          /* We must simplify subreg here, before we lose track of the
8983
             original inner_mode.  */
8984
          new_rtx = simplify_subreg (GET_MODE (x), r,
8985
                                 inner_mode, SUBREG_BYTE (x));
8986
          if (new_rtx)
8987
            return new_rtx;
8988
          else
8989
            SUBST (SUBREG_REG (x), r);
8990
        }
8991
 
8992
      return x;
8993
    }
8994
  /* We don't have to handle SIGN_EXTEND here, because even in the
8995
     case of replacing something with a modeless CONST_INT, a
8996
     CONST_INT is already (supposed to be) a valid sign extension for
8997
     its narrower mode, which implies it's already properly
8998
     sign-extended for the wider mode.  Now, for ZERO_EXTEND, the
8999
     story is different.  */
9000
  else if (code == ZERO_EXTEND)
9001
    {
9002
      enum machine_mode inner_mode = GET_MODE (XEXP (x, 0));
9003
      rtx new_rtx, r = known_cond (XEXP (x, 0), cond, reg, val);
9004
 
9005
      if (XEXP (x, 0) != r)
9006
        {
9007
          /* We must simplify the zero_extend here, before we lose
9008
             track of the original inner_mode.  */
9009
          new_rtx = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
9010
                                          r, inner_mode);
9011
          if (new_rtx)
9012
            return new_rtx;
9013
          else
9014
            SUBST (XEXP (x, 0), r);
9015
        }
9016
 
9017
      return x;
9018
    }
9019
 
9020
  fmt = GET_RTX_FORMAT (code);
9021
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9022
    {
9023
      if (fmt[i] == 'e')
9024
        SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
9025
      else if (fmt[i] == 'E')
9026
        for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9027
          SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
9028
                                                cond, reg, val));
9029
    }
9030
 
9031
  return x;
9032
}
9033
 
9034
/* See if X and Y are equal for the purposes of seeing if we can rewrite an
9035
   assignment as a field assignment.  */
9036
 
9037
static int
9038
rtx_equal_for_field_assignment_p (rtx x, rtx y)
9039
{
9040
  if (x == y || rtx_equal_p (x, y))
9041
    return 1;
9042
 
9043
  if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y))
9044
    return 0;
9045
 
9046
  /* Check for a paradoxical SUBREG of a MEM compared with the MEM.
9047
     Note that all SUBREGs of MEM are paradoxical; otherwise they
9048
     would have been rewritten.  */
9049
  if (MEM_P (x) && GET_CODE (y) == SUBREG
9050
      && MEM_P (SUBREG_REG (y))
9051
      && rtx_equal_p (SUBREG_REG (y),
9052
                      gen_lowpart (GET_MODE (SUBREG_REG (y)), x)))
9053
    return 1;
9054
 
9055
  if (MEM_P (y) && GET_CODE (x) == SUBREG
9056
      && MEM_P (SUBREG_REG (x))
9057
      && rtx_equal_p (SUBREG_REG (x),
9058
                      gen_lowpart (GET_MODE (SUBREG_REG (x)), y)))
9059
    return 1;
9060
 
9061
  /* We used to see if get_last_value of X and Y were the same but that's
9062
     not correct.  In one direction, we'll cause the assignment to have
9063
     the wrong destination and in the case, we'll import a register into this
9064
     insn that might have already have been dead.   So fail if none of the
9065
     above cases are true.  */
9066
  return 0;
9067
}
9068
 
9069
/* See if X, a SET operation, can be rewritten as a bit-field assignment.
9070
   Return that assignment if so.
9071
 
9072
   We only handle the most common cases.  */
9073
 
9074
static rtx
9075
make_field_assignment (rtx x)
9076
{
9077
  rtx dest = SET_DEST (x);
9078
  rtx src = SET_SRC (x);
9079
  rtx assign;
9080
  rtx rhs, lhs;
9081
  HOST_WIDE_INT c1;
9082
  HOST_WIDE_INT pos;
9083
  unsigned HOST_WIDE_INT len;
9084
  rtx other;
9085
  enum machine_mode mode;
9086
 
9087
  /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
9088
     a clear of a one-bit field.  We will have changed it to
9089
     (and (rotate (const_int -2) POS) DEST), so check for that.  Also check
9090
     for a SUBREG.  */
9091
 
9092
  if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
9093
      && CONST_INT_P (XEXP (XEXP (src, 0), 0))
9094
      && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
9095
      && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
9096
    {
9097
      assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
9098
                                1, 1, 1, 0);
9099
      if (assign != 0)
9100
        return gen_rtx_SET (VOIDmode, assign, const0_rtx);
9101
      return x;
9102
    }
9103
 
9104
  if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
9105
      && subreg_lowpart_p (XEXP (src, 0))
9106
      && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
9107
          < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
9108
      && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
9109
      && CONST_INT_P (XEXP (SUBREG_REG (XEXP (src, 0)), 0))
9110
      && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
9111
      && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
9112
    {
9113
      assign = make_extraction (VOIDmode, dest, 0,
9114
                                XEXP (SUBREG_REG (XEXP (src, 0)), 1),
9115
                                1, 1, 1, 0);
9116
      if (assign != 0)
9117
        return gen_rtx_SET (VOIDmode, assign, const0_rtx);
9118
      return x;
9119
    }
9120
 
9121
  /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
9122
     one-bit field.  */
9123
  if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
9124
      && XEXP (XEXP (src, 0), 0) == const1_rtx
9125
      && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
9126
    {
9127
      assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
9128
                                1, 1, 1, 0);
9129
      if (assign != 0)
9130
        return gen_rtx_SET (VOIDmode, assign, const1_rtx);
9131
      return x;
9132
    }
9133
 
9134
  /* If DEST is already a field assignment, i.e. ZERO_EXTRACT, and the
9135
     SRC is an AND with all bits of that field set, then we can discard
9136
     the AND.  */
9137
  if (GET_CODE (dest) == ZERO_EXTRACT
9138
      && CONST_INT_P (XEXP (dest, 1))
9139
      && GET_CODE (src) == AND
9140
      && CONST_INT_P (XEXP (src, 1)))
9141
    {
9142
      HOST_WIDE_INT width = INTVAL (XEXP (dest, 1));
9143
      unsigned HOST_WIDE_INT and_mask = INTVAL (XEXP (src, 1));
9144
      unsigned HOST_WIDE_INT ze_mask;
9145
 
9146
      if (width >= HOST_BITS_PER_WIDE_INT)
9147
        ze_mask = -1;
9148
      else
9149
        ze_mask = ((unsigned HOST_WIDE_INT)1 << width) - 1;
9150
 
9151
      /* Complete overlap.  We can remove the source AND.  */
9152
      if ((and_mask & ze_mask) == ze_mask)
9153
        return gen_rtx_SET (VOIDmode, dest, XEXP (src, 0));
9154
 
9155
      /* Partial overlap.  We can reduce the source AND.  */
9156
      if ((and_mask & ze_mask) != and_mask)
9157
        {
9158
          mode = GET_MODE (src);
9159
          src = gen_rtx_AND (mode, XEXP (src, 0),
9160
                             gen_int_mode (and_mask & ze_mask, mode));
9161
          return gen_rtx_SET (VOIDmode, dest, src);
9162
        }
9163
    }
9164
 
9165
  /* The other case we handle is assignments into a constant-position
9166
     field.  They look like (ior/xor (and DEST C1) OTHER).  If C1 represents
9167
     a mask that has all one bits except for a group of zero bits and
9168
     OTHER is known to have zeros where C1 has ones, this is such an
9169
     assignment.  Compute the position and length from C1.  Shift OTHER
9170
     to the appropriate position, force it to the required mode, and
9171
     make the extraction.  Check for the AND in both operands.  */
9172
 
9173
  if (GET_CODE (src) != IOR && GET_CODE (src) != XOR)
9174
    return x;
9175
 
9176
  rhs = expand_compound_operation (XEXP (src, 0));
9177
  lhs = expand_compound_operation (XEXP (src, 1));
9178
 
9179
  if (GET_CODE (rhs) == AND
9180
      && CONST_INT_P (XEXP (rhs, 1))
9181
      && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest))
9182
    c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
9183
  else if (GET_CODE (lhs) == AND
9184
           && CONST_INT_P (XEXP (lhs, 1))
9185
           && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest))
9186
    c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
9187
  else
9188
    return x;
9189
 
9190
  pos = get_pos_from_mask ((~c1) & GET_MODE_MASK (GET_MODE (dest)), &len);
9191
  if (pos < 0 || pos + len > GET_MODE_PRECISION (GET_MODE (dest))
9192
      || GET_MODE_PRECISION (GET_MODE (dest)) > HOST_BITS_PER_WIDE_INT
9193
      || (c1 & nonzero_bits (other, GET_MODE (dest))) != 0)
9194
    return x;
9195
 
9196
  assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
9197
  if (assign == 0)
9198
    return x;
9199
 
9200
  /* The mode to use for the source is the mode of the assignment, or of
9201
     what is inside a possible STRICT_LOW_PART.  */
9202
  mode = (GET_CODE (assign) == STRICT_LOW_PART
9203
          ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
9204
 
9205
  /* Shift OTHER right POS places and make it the source, restricting it
9206
     to the proper length and mode.  */
9207
 
9208
  src = canon_reg_for_combine (simplify_shift_const (NULL_RTX, LSHIFTRT,
9209
                                                     GET_MODE (src),
9210
                                                     other, pos),
9211
                               dest);
9212
  src = force_to_mode (src, mode,
9213
                       GET_MODE_PRECISION (mode) >= HOST_BITS_PER_WIDE_INT
9214
                       ? ~(unsigned HOST_WIDE_INT) 0
9215
                       : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
9216
                       0);
9217
 
9218
  /* If SRC is masked by an AND that does not make a difference in
9219
     the value being stored, strip it.  */
9220
  if (GET_CODE (assign) == ZERO_EXTRACT
9221
      && CONST_INT_P (XEXP (assign, 1))
9222
      && INTVAL (XEXP (assign, 1)) < HOST_BITS_PER_WIDE_INT
9223
      && GET_CODE (src) == AND
9224
      && CONST_INT_P (XEXP (src, 1))
9225
      && UINTVAL (XEXP (src, 1))
9226
         == ((unsigned HOST_WIDE_INT) 1 << INTVAL (XEXP (assign, 1))) - 1)
9227
    src = XEXP (src, 0);
9228
 
9229
  return gen_rtx_SET (VOIDmode, assign, src);
9230
}
9231
 
9232
/* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
9233
   if so.  */
9234
 
9235
static rtx
9236
apply_distributive_law (rtx x)
9237
{
9238
  enum rtx_code code = GET_CODE (x);
9239
  enum rtx_code inner_code;
9240
  rtx lhs, rhs, other;
9241
  rtx tem;
9242
 
9243
  /* Distributivity is not true for floating point as it can change the
9244
     value.  So we don't do it unless -funsafe-math-optimizations.  */
9245
  if (FLOAT_MODE_P (GET_MODE (x))
9246
      && ! flag_unsafe_math_optimizations)
9247
    return x;
9248
 
9249
  /* The outer operation can only be one of the following:  */
9250
  if (code != IOR && code != AND && code != XOR
9251
      && code != PLUS && code != MINUS)
9252
    return x;
9253
 
9254
  lhs = XEXP (x, 0);
9255
  rhs = XEXP (x, 1);
9256
 
9257
  /* If either operand is a primitive we can't do anything, so get out
9258
     fast.  */
9259
  if (OBJECT_P (lhs) || OBJECT_P (rhs))
9260
    return x;
9261
 
9262
  lhs = expand_compound_operation (lhs);
9263
  rhs = expand_compound_operation (rhs);
9264
  inner_code = GET_CODE (lhs);
9265
  if (inner_code != GET_CODE (rhs))
9266
    return x;
9267
 
9268
  /* See if the inner and outer operations distribute.  */
9269
  switch (inner_code)
9270
    {
9271
    case LSHIFTRT:
9272
    case ASHIFTRT:
9273
    case AND:
9274
    case IOR:
9275
      /* These all distribute except over PLUS.  */
9276
      if (code == PLUS || code == MINUS)
9277
        return x;
9278
      break;
9279
 
9280
    case MULT:
9281
      if (code != PLUS && code != MINUS)
9282
        return x;
9283
      break;
9284
 
9285
    case ASHIFT:
9286
      /* This is also a multiply, so it distributes over everything.  */
9287
      break;
9288
 
9289
    case SUBREG:
9290
      /* Non-paradoxical SUBREGs distributes over all operations,
9291
         provided the inner modes and byte offsets are the same, this
9292
         is an extraction of a low-order part, we don't convert an fp
9293
         operation to int or vice versa, this is not a vector mode,
9294
         and we would not be converting a single-word operation into a
9295
         multi-word operation.  The latter test is not required, but
9296
         it prevents generating unneeded multi-word operations.  Some
9297
         of the previous tests are redundant given the latter test,
9298
         but are retained because they are required for correctness.
9299
 
9300
         We produce the result slightly differently in this case.  */
9301
 
9302
      if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
9303
          || SUBREG_BYTE (lhs) != SUBREG_BYTE (rhs)
9304
          || ! subreg_lowpart_p (lhs)
9305
          || (GET_MODE_CLASS (GET_MODE (lhs))
9306
              != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
9307
          || paradoxical_subreg_p (lhs)
9308
          || VECTOR_MODE_P (GET_MODE (lhs))
9309
          || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD
9310
          /* Result might need to be truncated.  Don't change mode if
9311
             explicit truncation is needed.  */
9312
          || !TRULY_NOOP_TRUNCATION_MODES_P (GET_MODE (x),
9313
                                             GET_MODE (SUBREG_REG (lhs))))
9314
        return x;
9315
 
9316
      tem = simplify_gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
9317
                                 SUBREG_REG (lhs), SUBREG_REG (rhs));
9318
      return gen_lowpart (GET_MODE (x), tem);
9319
 
9320
    default:
9321
      return x;
9322
    }
9323
 
9324
  /* Set LHS and RHS to the inner operands (A and B in the example
9325
     above) and set OTHER to the common operand (C in the example).
9326
     There is only one way to do this unless the inner operation is
9327
     commutative.  */
9328
  if (COMMUTATIVE_ARITH_P (lhs)
9329
      && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
9330
    other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
9331
  else if (COMMUTATIVE_ARITH_P (lhs)
9332
           && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
9333
    other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
9334
  else if (COMMUTATIVE_ARITH_P (lhs)
9335
           && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
9336
    other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
9337
  else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
9338
    other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
9339
  else
9340
    return x;
9341
 
9342
  /* Form the new inner operation, seeing if it simplifies first.  */
9343
  tem = simplify_gen_binary (code, GET_MODE (x), lhs, rhs);
9344
 
9345
  /* There is one exception to the general way of distributing:
9346
     (a | c) ^ (b | c) -> (a ^ b) & ~c  */
9347
  if (code == XOR && inner_code == IOR)
9348
    {
9349
      inner_code = AND;
9350
      other = simplify_gen_unary (NOT, GET_MODE (x), other, GET_MODE (x));
9351
    }
9352
 
9353
  /* We may be able to continuing distributing the result, so call
9354
     ourselves recursively on the inner operation before forming the
9355
     outer operation, which we return.  */
9356
  return simplify_gen_binary (inner_code, GET_MODE (x),
9357
                              apply_distributive_law (tem), other);
9358
}
9359
 
9360
/* See if X is of the form (* (+ A B) C), and if so convert to
9361
   (+ (* A C) (* B C)) and try to simplify.
9362
 
9363
   Most of the time, this results in no change.  However, if some of
9364
   the operands are the same or inverses of each other, simplifications
9365
   will result.
9366
 
9367
   For example, (and (ior A B) (not B)) can occur as the result of
9368
   expanding a bit field assignment.  When we apply the distributive
9369
   law to this, we get (ior (and (A (not B))) (and (B (not B)))),
9370
   which then simplifies to (and (A (not B))).
9371
 
9372
   Note that no checks happen on the validity of applying the inverse
9373
   distributive law.  This is pointless since we can do it in the
9374
   few places where this routine is called.
9375
 
9376
   N is the index of the term that is decomposed (the arithmetic operation,
9377
   i.e. (+ A B) in the first example above).  !N is the index of the term that
9378
   is distributed, i.e. of C in the first example above.  */
9379
static rtx
9380
distribute_and_simplify_rtx (rtx x, int n)
9381
{
9382
  enum machine_mode mode;
9383
  enum rtx_code outer_code, inner_code;
9384
  rtx decomposed, distributed, inner_op0, inner_op1, new_op0, new_op1, tmp;
9385
 
9386
  /* Distributivity is not true for floating point as it can change the
9387
     value.  So we don't do it unless -funsafe-math-optimizations.  */
9388
  if (FLOAT_MODE_P (GET_MODE (x))
9389
      && ! flag_unsafe_math_optimizations)
9390
    return NULL_RTX;
9391
 
9392
  decomposed = XEXP (x, n);
9393
  if (!ARITHMETIC_P (decomposed))
9394
    return NULL_RTX;
9395
 
9396
  mode = GET_MODE (x);
9397
  outer_code = GET_CODE (x);
9398
  distributed = XEXP (x, !n);
9399
 
9400
  inner_code = GET_CODE (decomposed);
9401
  inner_op0 = XEXP (decomposed, 0);
9402
  inner_op1 = XEXP (decomposed, 1);
9403
 
9404
  /* Special case (and (xor B C) (not A)), which is equivalent to
9405
     (xor (ior A B) (ior A C))  */
9406
  if (outer_code == AND && inner_code == XOR && GET_CODE (distributed) == NOT)
9407
    {
9408
      distributed = XEXP (distributed, 0);
9409
      outer_code = IOR;
9410
    }
9411
 
9412
  if (n == 0)
9413
    {
9414
      /* Distribute the second term.  */
9415
      new_op0 = simplify_gen_binary (outer_code, mode, inner_op0, distributed);
9416
      new_op1 = simplify_gen_binary (outer_code, mode, inner_op1, distributed);
9417
    }
9418
  else
9419
    {
9420
      /* Distribute the first term.  */
9421
      new_op0 = simplify_gen_binary (outer_code, mode, distributed, inner_op0);
9422
      new_op1 = simplify_gen_binary (outer_code, mode, distributed, inner_op1);
9423
    }
9424
 
9425
  tmp = apply_distributive_law (simplify_gen_binary (inner_code, mode,
9426
                                                     new_op0, new_op1));
9427
  if (GET_CODE (tmp) != outer_code
9428
      && (set_src_cost (tmp, optimize_this_for_speed_p)
9429
          < set_src_cost (x, optimize_this_for_speed_p)))
9430
    return tmp;
9431
 
9432
  return NULL_RTX;
9433
}
9434
 
9435
/* Simplify a logical `and' of VAROP with the constant CONSTOP, to be done
9436
   in MODE.  Return an equivalent form, if different from (and VAROP
9437
   (const_int CONSTOP)).  Otherwise, return NULL_RTX.  */
9438
 
9439
static rtx
9440
simplify_and_const_int_1 (enum machine_mode mode, rtx varop,
9441
                          unsigned HOST_WIDE_INT constop)
9442
{
9443
  unsigned HOST_WIDE_INT nonzero;
9444
  unsigned HOST_WIDE_INT orig_constop;
9445
  rtx orig_varop;
9446
  int i;
9447
 
9448
  orig_varop = varop;
9449
  orig_constop = constop;
9450
  if (GET_CODE (varop) == CLOBBER)
9451
    return NULL_RTX;
9452
 
9453
  /* Simplify VAROP knowing that we will be only looking at some of the
9454
     bits in it.
9455
 
9456
     Note by passing in CONSTOP, we guarantee that the bits not set in
9457
     CONSTOP are not significant and will never be examined.  We must
9458
     ensure that is the case by explicitly masking out those bits
9459
     before returning.  */
9460
  varop = force_to_mode (varop, mode, constop, 0);
9461
 
9462
  /* If VAROP is a CLOBBER, we will fail so return it.  */
9463
  if (GET_CODE (varop) == CLOBBER)
9464
    return varop;
9465
 
9466
  /* If VAROP is a CONST_INT, then we need to apply the mask in CONSTOP
9467
     to VAROP and return the new constant.  */
9468
  if (CONST_INT_P (varop))
9469
    return gen_int_mode (INTVAL (varop) & constop, mode);
9470
 
9471
  /* See what bits may be nonzero in VAROP.  Unlike the general case of
9472
     a call to nonzero_bits, here we don't care about bits outside
9473
     MODE.  */
9474
 
9475
  nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
9476
 
9477
  /* Turn off all bits in the constant that are known to already be zero.
9478
     Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
9479
     which is tested below.  */
9480
 
9481
  constop &= nonzero;
9482
 
9483
  /* If we don't have any bits left, return zero.  */
9484
  if (constop == 0)
9485
    return const0_rtx;
9486
 
9487
  /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
9488
     a power of two, we can replace this with an ASHIFT.  */
9489
  if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
9490
      && (i = exact_log2 (constop)) >= 0)
9491
    return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
9492
 
9493
  /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
9494
     or XOR, then try to apply the distributive law.  This may eliminate
9495
     operations if either branch can be simplified because of the AND.
9496
     It may also make some cases more complex, but those cases probably
9497
     won't match a pattern either with or without this.  */
9498
 
9499
  if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
9500
    return
9501
      gen_lowpart
9502
        (mode,
9503
         apply_distributive_law
9504
         (simplify_gen_binary (GET_CODE (varop), GET_MODE (varop),
9505
                               simplify_and_const_int (NULL_RTX,
9506
                                                       GET_MODE (varop),
9507
                                                       XEXP (varop, 0),
9508
                                                       constop),
9509
                               simplify_and_const_int (NULL_RTX,
9510
                                                       GET_MODE (varop),
9511
                                                       XEXP (varop, 1),
9512
                                                       constop))));
9513
 
9514
  /* If VAROP is PLUS, and the constant is a mask of low bits, distribute
9515
     the AND and see if one of the operands simplifies to zero.  If so, we
9516
     may eliminate it.  */
9517
 
9518
  if (GET_CODE (varop) == PLUS
9519
      && exact_log2 (constop + 1) >= 0)
9520
    {
9521
      rtx o0, o1;
9522
 
9523
      o0 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 0), constop);
9524
      o1 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 1), constop);
9525
      if (o0 == const0_rtx)
9526
        return o1;
9527
      if (o1 == const0_rtx)
9528
        return o0;
9529
    }
9530
 
9531
  /* Make a SUBREG if necessary.  If we can't make it, fail.  */
9532
  varop = gen_lowpart (mode, varop);
9533
  if (varop == NULL_RTX || GET_CODE (varop) == CLOBBER)
9534
    return NULL_RTX;
9535
 
9536
  /* If we are only masking insignificant bits, return VAROP.  */
9537
  if (constop == nonzero)
9538
    return varop;
9539
 
9540
  if (varop == orig_varop && constop == orig_constop)
9541
    return NULL_RTX;
9542
 
9543
  /* Otherwise, return an AND.  */
9544
  return simplify_gen_binary (AND, mode, varop, gen_int_mode (constop, mode));
9545
}
9546
 
9547
 
9548
/* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
9549
   in MODE.
9550
 
9551
   Return an equivalent form, if different from X.  Otherwise, return X.  If
9552
   X is zero, we are to always construct the equivalent form.  */
9553
 
9554
static rtx
9555
simplify_and_const_int (rtx x, enum machine_mode mode, rtx varop,
9556
                        unsigned HOST_WIDE_INT constop)
9557
{
9558
  rtx tem = simplify_and_const_int_1 (mode, varop, constop);
9559
  if (tem)
9560
    return tem;
9561
 
9562
  if (!x)
9563
    x = simplify_gen_binary (AND, GET_MODE (varop), varop,
9564
                             gen_int_mode (constop, mode));
9565
  if (GET_MODE (x) != mode)
9566
    x = gen_lowpart (mode, x);
9567
  return x;
9568
}
9569
 
9570
/* Given a REG, X, compute which bits in X can be nonzero.
9571
   We don't care about bits outside of those defined in MODE.
9572
 
9573
   For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
9574
   a shift, AND, or zero_extract, we can do better.  */
9575
 
9576
static rtx
9577
reg_nonzero_bits_for_combine (const_rtx x, enum machine_mode mode,
9578
                              const_rtx known_x ATTRIBUTE_UNUSED,
9579
                              enum machine_mode known_mode ATTRIBUTE_UNUSED,
9580
                              unsigned HOST_WIDE_INT known_ret ATTRIBUTE_UNUSED,
9581
                              unsigned HOST_WIDE_INT *nonzero)
9582
{
9583
  rtx tem;
9584
  reg_stat_type *rsp;
9585
 
9586
  /* If X is a register whose nonzero bits value is current, use it.
9587
     Otherwise, if X is a register whose value we can find, use that
9588
     value.  Otherwise, use the previously-computed global nonzero bits
9589
     for this register.  */
9590
 
9591
  rsp = VEC_index (reg_stat_type, reg_stat, REGNO (x));
9592
  if (rsp->last_set_value != 0
9593
      && (rsp->last_set_mode == mode
9594
          || (GET_MODE_CLASS (rsp->last_set_mode) == MODE_INT
9595
              && GET_MODE_CLASS (mode) == MODE_INT))
9596
      && ((rsp->last_set_label >= label_tick_ebb_start
9597
           && rsp->last_set_label < label_tick)
9598
          || (rsp->last_set_label == label_tick
9599
              && DF_INSN_LUID (rsp->last_set) < subst_low_luid)
9600
          || (REGNO (x) >= FIRST_PSEUDO_REGISTER
9601
              && REG_N_SETS (REGNO (x)) == 1
9602
              && !REGNO_REG_SET_P
9603
                  (DF_LR_IN (ENTRY_BLOCK_PTR->next_bb), REGNO (x)))))
9604
    {
9605
      *nonzero &= rsp->last_set_nonzero_bits;
9606
      return NULL;
9607
    }
9608
 
9609
  tem = get_last_value (x);
9610
 
9611
  if (tem)
9612
    {
9613
#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
9614
      /* If X is narrower than MODE and TEM is a non-negative
9615
         constant that would appear negative in the mode of X,
9616
         sign-extend it for use in reg_nonzero_bits because some
9617
         machines (maybe most) will actually do the sign-extension
9618
         and this is the conservative approach.
9619
 
9620
         ??? For 2.5, try to tighten up the MD files in this regard
9621
         instead of this kludge.  */
9622
 
9623
      if (GET_MODE_PRECISION (GET_MODE (x)) < GET_MODE_PRECISION (mode)
9624
          && CONST_INT_P (tem)
9625
          && INTVAL (tem) > 0
9626
          && val_signbit_known_set_p (GET_MODE (x), INTVAL (tem)))
9627
        tem = GEN_INT (INTVAL (tem) | ~GET_MODE_MASK (GET_MODE (x)));
9628
#endif
9629
      return tem;
9630
    }
9631
  else if (nonzero_sign_valid && rsp->nonzero_bits)
9632
    {
9633
      unsigned HOST_WIDE_INT mask = rsp->nonzero_bits;
9634
 
9635
      if (GET_MODE_PRECISION (GET_MODE (x)) < GET_MODE_PRECISION (mode))
9636
        /* We don't know anything about the upper bits.  */
9637
        mask |= GET_MODE_MASK (mode) ^ GET_MODE_MASK (GET_MODE (x));
9638
      *nonzero &= mask;
9639
    }
9640
 
9641
  return NULL;
9642
}
9643
 
9644
/* Return the number of bits at the high-order end of X that are known to
9645
   be equal to the sign bit.  X will be used in mode MODE; if MODE is
9646
   VOIDmode, X will be used in its own mode.  The returned value  will always
9647
   be between 1 and the number of bits in MODE.  */
9648
 
9649
static rtx
9650
reg_num_sign_bit_copies_for_combine (const_rtx x, enum machine_mode mode,
9651
                                     const_rtx known_x ATTRIBUTE_UNUSED,
9652
                                     enum machine_mode known_mode
9653
                                     ATTRIBUTE_UNUSED,
9654
                                     unsigned int known_ret ATTRIBUTE_UNUSED,
9655
                                     unsigned int *result)
9656
{
9657
  rtx tem;
9658
  reg_stat_type *rsp;
9659
 
9660
  rsp = VEC_index (reg_stat_type, reg_stat, REGNO (x));
9661
  if (rsp->last_set_value != 0
9662
      && rsp->last_set_mode == mode
9663
      && ((rsp->last_set_label >= label_tick_ebb_start
9664
           && rsp->last_set_label < label_tick)
9665
          || (rsp->last_set_label == label_tick
9666
              && DF_INSN_LUID (rsp->last_set) < subst_low_luid)
9667
          || (REGNO (x) >= FIRST_PSEUDO_REGISTER
9668
              && REG_N_SETS (REGNO (x)) == 1
9669
              && !REGNO_REG_SET_P
9670
                  (DF_LR_IN (ENTRY_BLOCK_PTR->next_bb), REGNO (x)))))
9671
    {
9672
      *result = rsp->last_set_sign_bit_copies;
9673
      return NULL;
9674
    }
9675
 
9676
  tem = get_last_value (x);
9677
  if (tem != 0)
9678
    return tem;
9679
 
9680
  if (nonzero_sign_valid && rsp->sign_bit_copies != 0
9681
      && GET_MODE_PRECISION (GET_MODE (x)) == GET_MODE_PRECISION (mode))
9682
    *result = rsp->sign_bit_copies;
9683
 
9684
  return NULL;
9685
}
9686
 
9687
/* Return the number of "extended" bits there are in X, when interpreted
9688
   as a quantity in MODE whose signedness is indicated by UNSIGNEDP.  For
9689
   unsigned quantities, this is the number of high-order zero bits.
9690
   For signed quantities, this is the number of copies of the sign bit
9691
   minus 1.  In both case, this function returns the number of "spare"
9692
   bits.  For example, if two quantities for which this function returns
9693
   at least 1 are added, the addition is known not to overflow.
9694
 
9695
   This function will always return 0 unless called during combine, which
9696
   implies that it must be called from a define_split.  */
9697
 
9698
unsigned int
9699
extended_count (const_rtx x, enum machine_mode mode, int unsignedp)
9700
{
9701
  if (nonzero_sign_valid == 0)
9702
    return 0;
9703
 
9704
  return (unsignedp
9705
          ? (HWI_COMPUTABLE_MODE_P (mode)
9706
             ? (unsigned int) (GET_MODE_PRECISION (mode) - 1
9707
                               - floor_log2 (nonzero_bits (x, mode)))
9708
             : 0)
9709
          : num_sign_bit_copies (x, mode) - 1);
9710
}
9711
 
9712
/* This function is called from `simplify_shift_const' to merge two
9713
   outer operations.  Specifically, we have already found that we need
9714
   to perform operation *POP0 with constant *PCONST0 at the outermost
9715
   position.  We would now like to also perform OP1 with constant CONST1
9716
   (with *POP0 being done last).
9717
 
9718
   Return 1 if we can do the operation and update *POP0 and *PCONST0 with
9719
   the resulting operation.  *PCOMP_P is set to 1 if we would need to
9720
   complement the innermost operand, otherwise it is unchanged.
9721
 
9722
   MODE is the mode in which the operation will be done.  No bits outside
9723
   the width of this mode matter.  It is assumed that the width of this mode
9724
   is smaller than or equal to HOST_BITS_PER_WIDE_INT.
9725
 
9726
   If *POP0 or OP1 are UNKNOWN, it means no operation is required.  Only NEG, PLUS,
9727
   IOR, XOR, and AND are supported.  We may set *POP0 to SET if the proper
9728
   result is simply *PCONST0.
9729
 
9730
   If the resulting operation cannot be expressed as one operation, we
9731
   return 0 and do not change *POP0, *PCONST0, and *PCOMP_P.  */
9732
 
9733
static int
9734
merge_outer_ops (enum rtx_code *pop0, HOST_WIDE_INT *pconst0, enum rtx_code op1, HOST_WIDE_INT const1, enum machine_mode mode, int *pcomp_p)
9735
{
9736
  enum rtx_code op0 = *pop0;
9737
  HOST_WIDE_INT const0 = *pconst0;
9738
 
9739
  const0 &= GET_MODE_MASK (mode);
9740
  const1 &= GET_MODE_MASK (mode);
9741
 
9742
  /* If OP0 is an AND, clear unimportant bits in CONST1.  */
9743
  if (op0 == AND)
9744
    const1 &= const0;
9745
 
9746
  /* If OP0 or OP1 is UNKNOWN, this is easy.  Similarly if they are the same or
9747
     if OP0 is SET.  */
9748
 
9749
  if (op1 == UNKNOWN || op0 == SET)
9750
    return 1;
9751
 
9752
  else if (op0 == UNKNOWN)
9753
    op0 = op1, const0 = const1;
9754
 
9755
  else if (op0 == op1)
9756
    {
9757
      switch (op0)
9758
        {
9759
        case AND:
9760
          const0 &= const1;
9761
          break;
9762
        case IOR:
9763
          const0 |= const1;
9764
          break;
9765
        case XOR:
9766
          const0 ^= const1;
9767
          break;
9768
        case PLUS:
9769
          const0 += const1;
9770
          break;
9771
        case NEG:
9772
          op0 = UNKNOWN;
9773
          break;
9774
        default:
9775
          break;
9776
        }
9777
    }
9778
 
9779
  /* Otherwise, if either is a PLUS or NEG, we can't do anything.  */
9780
  else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
9781
    return 0;
9782
 
9783
  /* If the two constants aren't the same, we can't do anything.  The
9784
     remaining six cases can all be done.  */
9785
  else if (const0 != const1)
9786
    return 0;
9787
 
9788
  else
9789
    switch (op0)
9790
      {
9791
      case IOR:
9792
        if (op1 == AND)
9793
          /* (a & b) | b == b */
9794
          op0 = SET;
9795
        else /* op1 == XOR */
9796
          /* (a ^ b) | b == a | b */
9797
          {;}
9798
        break;
9799
 
9800
      case XOR:
9801
        if (op1 == AND)
9802
          /* (a & b) ^ b == (~a) & b */
9803
          op0 = AND, *pcomp_p = 1;
9804
        else /* op1 == IOR */
9805
          /* (a | b) ^ b == a & ~b */
9806
          op0 = AND, const0 = ~const0;
9807
        break;
9808
 
9809
      case AND:
9810
        if (op1 == IOR)
9811
          /* (a | b) & b == b */
9812
        op0 = SET;
9813
        else /* op1 == XOR */
9814
          /* (a ^ b) & b) == (~a) & b */
9815
          *pcomp_p = 1;
9816
        break;
9817
      default:
9818
        break;
9819
      }
9820
 
9821
  /* Check for NO-OP cases.  */
9822
  const0 &= GET_MODE_MASK (mode);
9823
  if (const0 == 0
9824
      && (op0 == IOR || op0 == XOR || op0 == PLUS))
9825
    op0 = UNKNOWN;
9826
  else if (const0 == 0 && op0 == AND)
9827
    op0 = SET;
9828
  else if ((unsigned HOST_WIDE_INT) const0 == GET_MODE_MASK (mode)
9829
           && op0 == AND)
9830
    op0 = UNKNOWN;
9831
 
9832
  *pop0 = op0;
9833
 
9834
  /* ??? Slightly redundant with the above mask, but not entirely.
9835
     Moving this above means we'd have to sign-extend the mode mask
9836
     for the final test.  */
9837
  if (op0 != UNKNOWN && op0 != NEG)
9838
    *pconst0 = trunc_int_for_mode (const0, mode);
9839
 
9840
  return 1;
9841
}
9842
 
9843
/* A helper to simplify_shift_const_1 to determine the mode we can perform
9844
   the shift in.  The original shift operation CODE is performed on OP in
9845
   ORIG_MODE.  Return the wider mode MODE if we can perform the operation
9846
   in that mode.  Return ORIG_MODE otherwise.  We can also assume that the
9847
   result of the shift is subject to operation OUTER_CODE with operand
9848
   OUTER_CONST.  */
9849
 
9850
static enum machine_mode
9851
try_widen_shift_mode (enum rtx_code code, rtx op, int count,
9852
                      enum machine_mode orig_mode, enum machine_mode mode,
9853
                      enum rtx_code outer_code, HOST_WIDE_INT outer_const)
9854
{
9855
  if (orig_mode == mode)
9856
    return mode;
9857
  gcc_assert (GET_MODE_PRECISION (mode) > GET_MODE_PRECISION (orig_mode));
9858
 
9859
  /* In general we can't perform in wider mode for right shift and rotate.  */
9860
  switch (code)
9861
    {
9862
    case ASHIFTRT:
9863
      /* We can still widen if the bits brought in from the left are identical
9864
         to the sign bit of ORIG_MODE.  */
9865
      if (num_sign_bit_copies (op, mode)
9866
          > (unsigned) (GET_MODE_PRECISION (mode)
9867
                        - GET_MODE_PRECISION (orig_mode)))
9868
        return mode;
9869
      return orig_mode;
9870
 
9871
    case LSHIFTRT:
9872
      /* Similarly here but with zero bits.  */
9873
      if (HWI_COMPUTABLE_MODE_P (mode)
9874
          && (nonzero_bits (op, mode) & ~GET_MODE_MASK (orig_mode)) == 0)
9875
        return mode;
9876
 
9877
      /* We can also widen if the bits brought in will be masked off.  This
9878
         operation is performed in ORIG_MODE.  */
9879
      if (outer_code == AND)
9880
        {
9881
          int care_bits = low_bitmask_len (orig_mode, outer_const);
9882
 
9883
          if (care_bits >= 0
9884
              && GET_MODE_PRECISION (orig_mode) - care_bits >= count)
9885
            return mode;
9886
        }
9887
      /* fall through */
9888
 
9889
    case ROTATE:
9890
      return orig_mode;
9891
 
9892
    case ROTATERT:
9893
      gcc_unreachable ();
9894
 
9895
    default:
9896
      return mode;
9897
    }
9898
}
9899
 
9900
/* Simplify a shift of VAROP by ORIG_COUNT bits.  CODE says what kind
9901
   of shift.  The result of the shift is RESULT_MODE.  Return NULL_RTX
9902
   if we cannot simplify it.  Otherwise, return a simplified value.
9903
 
9904
   The shift is normally computed in the widest mode we find in VAROP, as
9905
   long as it isn't a different number of words than RESULT_MODE.  Exceptions
9906
   are ASHIFTRT and ROTATE, which are always done in their original mode.  */
9907
 
9908
static rtx
9909
simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
9910
                        rtx varop, int orig_count)
9911
{
9912
  enum rtx_code orig_code = code;
9913
  rtx orig_varop = varop;
9914
  int count;
9915
  enum machine_mode mode = result_mode;
9916
  enum machine_mode shift_mode, tmode;
9917
  unsigned int mode_words
9918
    = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
9919
  /* We form (outer_op (code varop count) (outer_const)).  */
9920
  enum rtx_code outer_op = UNKNOWN;
9921
  HOST_WIDE_INT outer_const = 0;
9922
  int complement_p = 0;
9923
  rtx new_rtx, x;
9924
 
9925
  /* Make sure and truncate the "natural" shift on the way in.  We don't
9926
     want to do this inside the loop as it makes it more difficult to
9927
     combine shifts.  */
9928
  if (SHIFT_COUNT_TRUNCATED)
9929
    orig_count &= GET_MODE_BITSIZE (mode) - 1;
9930
 
9931
  /* If we were given an invalid count, don't do anything except exactly
9932
     what was requested.  */
9933
 
9934
  if (orig_count < 0 || orig_count >= (int) GET_MODE_PRECISION (mode))
9935
    return NULL_RTX;
9936
 
9937
  count = orig_count;
9938
 
9939
  /* Unless one of the branches of the `if' in this loop does a `continue',
9940
     we will `break' the loop after the `if'.  */
9941
 
9942
  while (count != 0)
9943
    {
9944
      /* If we have an operand of (clobber (const_int 0)), fail.  */
9945
      if (GET_CODE (varop) == CLOBBER)
9946
        return NULL_RTX;
9947
 
9948
      /* Convert ROTATERT to ROTATE.  */
9949
      if (code == ROTATERT)
9950
        {
9951
          unsigned int bitsize = GET_MODE_PRECISION (result_mode);
9952
          code = ROTATE;
9953
          if (VECTOR_MODE_P (result_mode))
9954
            count = bitsize / GET_MODE_NUNITS (result_mode) - count;
9955
          else
9956
            count = bitsize - count;
9957
        }
9958
 
9959
      shift_mode = try_widen_shift_mode (code, varop, count, result_mode,
9960
                                         mode, outer_op, outer_const);
9961
 
9962
      /* Handle cases where the count is greater than the size of the mode
9963
         minus 1.  For ASHIFT, use the size minus one as the count (this can
9964
         occur when simplifying (lshiftrt (ashiftrt ..))).  For rotates,
9965
         take the count modulo the size.  For other shifts, the result is
9966
         zero.
9967
 
9968
         Since these shifts are being produced by the compiler by combining
9969
         multiple operations, each of which are defined, we know what the
9970
         result is supposed to be.  */
9971
 
9972
      if (count > (GET_MODE_PRECISION (shift_mode) - 1))
9973
        {
9974
          if (code == ASHIFTRT)
9975
            count = GET_MODE_PRECISION (shift_mode) - 1;
9976
          else if (code == ROTATE || code == ROTATERT)
9977
            count %= GET_MODE_PRECISION (shift_mode);
9978
          else
9979
            {
9980
              /* We can't simply return zero because there may be an
9981
                 outer op.  */
9982
              varop = const0_rtx;
9983
              count = 0;
9984
              break;
9985
            }
9986
        }
9987
 
9988
      /* If we discovered we had to complement VAROP, leave.  Making a NOT
9989
         here would cause an infinite loop.  */
9990
      if (complement_p)
9991
        break;
9992
 
9993
      /* An arithmetic right shift of a quantity known to be -1 or 0
9994
         is a no-op.  */
9995
      if (code == ASHIFTRT
9996
          && (num_sign_bit_copies (varop, shift_mode)
9997
              == GET_MODE_PRECISION (shift_mode)))
9998
        {
9999
          count = 0;
10000
          break;
10001
        }
10002
 
10003
      /* If we are doing an arithmetic right shift and discarding all but
10004
         the sign bit copies, this is equivalent to doing a shift by the
10005
         bitsize minus one.  Convert it into that shift because it will often
10006
         allow other simplifications.  */
10007
 
10008
      if (code == ASHIFTRT
10009
          && (count + num_sign_bit_copies (varop, shift_mode)
10010
              >= GET_MODE_PRECISION (shift_mode)))
10011
        count = GET_MODE_PRECISION (shift_mode) - 1;
10012
 
10013
      /* We simplify the tests below and elsewhere by converting
10014
         ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
10015
         `make_compound_operation' will convert it to an ASHIFTRT for
10016
         those machines (such as VAX) that don't have an LSHIFTRT.  */
10017
      if (code == ASHIFTRT
10018
          && val_signbit_known_clear_p (shift_mode,
10019
                                        nonzero_bits (varop, shift_mode)))
10020
        code = LSHIFTRT;
10021
 
10022
      if (((code == LSHIFTRT
10023
            && HWI_COMPUTABLE_MODE_P (shift_mode)
10024
            && !(nonzero_bits (varop, shift_mode) >> count))
10025
           || (code == ASHIFT
10026
               && HWI_COMPUTABLE_MODE_P (shift_mode)
10027
               && !((nonzero_bits (varop, shift_mode) << count)
10028
                    & GET_MODE_MASK (shift_mode))))
10029
          && !side_effects_p (varop))
10030
        varop = const0_rtx;
10031
 
10032
      switch (GET_CODE (varop))
10033
        {
10034
        case SIGN_EXTEND:
10035
        case ZERO_EXTEND:
10036
        case SIGN_EXTRACT:
10037
        case ZERO_EXTRACT:
10038
          new_rtx = expand_compound_operation (varop);
10039
          if (new_rtx != varop)
10040
            {
10041
              varop = new_rtx;
10042
              continue;
10043
            }
10044
          break;
10045
 
10046
        case MEM:
10047
          /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
10048
             minus the width of a smaller mode, we can do this with a
10049
             SIGN_EXTEND or ZERO_EXTEND from the narrower memory location.  */
10050
          if ((code == ASHIFTRT || code == LSHIFTRT)
10051
              && ! mode_dependent_address_p (XEXP (varop, 0))
10052
              && ! MEM_VOLATILE_P (varop)
10053
              && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
10054
                                         MODE_INT, 1)) != BLKmode)
10055
            {
10056
              new_rtx = adjust_address_nv (varop, tmode,
10057
                                       BYTES_BIG_ENDIAN ? 0
10058
                                       : count / BITS_PER_UNIT);
10059
 
10060
              varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND
10061
                                     : ZERO_EXTEND, mode, new_rtx);
10062
              count = 0;
10063
              continue;
10064
            }
10065
          break;
10066
 
10067
        case SUBREG:
10068
          /* If VAROP is a SUBREG, strip it as long as the inner operand has
10069
             the same number of words as what we've seen so far.  Then store
10070
             the widest mode in MODE.  */
10071
          if (subreg_lowpart_p (varop)
10072
              && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
10073
                  > GET_MODE_SIZE (GET_MODE (varop)))
10074
              && (unsigned int) ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
10075
                                  + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
10076
                 == mode_words
10077
              && GET_MODE_CLASS (GET_MODE (varop)) == MODE_INT
10078
              && GET_MODE_CLASS (GET_MODE (SUBREG_REG (varop))) == MODE_INT)
10079
            {
10080
              varop = SUBREG_REG (varop);
10081
              if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
10082
                mode = GET_MODE (varop);
10083
              continue;
10084
            }
10085
          break;
10086
 
10087
        case MULT:
10088
          /* Some machines use MULT instead of ASHIFT because MULT
10089
             is cheaper.  But it is still better on those machines to
10090
             merge two shifts into one.  */
10091
          if (CONST_INT_P (XEXP (varop, 1))
10092
              && exact_log2 (UINTVAL (XEXP (varop, 1))) >= 0)
10093
            {
10094
              varop
10095
                = simplify_gen_binary (ASHIFT, GET_MODE (varop),
10096
                                       XEXP (varop, 0),
10097
                                       GEN_INT (exact_log2 (
10098
                                                UINTVAL (XEXP (varop, 1)))));
10099
              continue;
10100
            }
10101
          break;
10102
 
10103
        case UDIV:
10104
          /* Similar, for when divides are cheaper.  */
10105
          if (CONST_INT_P (XEXP (varop, 1))
10106
              && exact_log2 (UINTVAL (XEXP (varop, 1))) >= 0)
10107
            {
10108
              varop
10109
                = simplify_gen_binary (LSHIFTRT, GET_MODE (varop),
10110
                                       XEXP (varop, 0),
10111
                                       GEN_INT (exact_log2 (
10112
                                                UINTVAL (XEXP (varop, 1)))));
10113
              continue;
10114
            }
10115
          break;
10116
 
10117
        case ASHIFTRT:
10118
          /* If we are extracting just the sign bit of an arithmetic
10119
             right shift, that shift is not needed.  However, the sign
10120
             bit of a wider mode may be different from what would be
10121
             interpreted as the sign bit in a narrower mode, so, if
10122
             the result is narrower, don't discard the shift.  */
10123
          if (code == LSHIFTRT
10124
              && count == (GET_MODE_BITSIZE (result_mode) - 1)
10125
              && (GET_MODE_BITSIZE (result_mode)
10126
                  >= GET_MODE_BITSIZE (GET_MODE (varop))))
10127
            {
10128
              varop = XEXP (varop, 0);
10129
              continue;
10130
            }
10131
 
10132
          /* ... fall through ...  */
10133
 
10134
        case LSHIFTRT:
10135
        case ASHIFT:
10136
        case ROTATE:
10137
          /* Here we have two nested shifts.  The result is usually the
10138
             AND of a new shift with a mask.  We compute the result below.  */
10139
          if (CONST_INT_P (XEXP (varop, 1))
10140
              && INTVAL (XEXP (varop, 1)) >= 0
10141
              && INTVAL (XEXP (varop, 1)) < GET_MODE_PRECISION (GET_MODE (varop))
10142
              && HWI_COMPUTABLE_MODE_P (result_mode)
10143
              && HWI_COMPUTABLE_MODE_P (mode)
10144
              && !VECTOR_MODE_P (result_mode))
10145
            {
10146
              enum rtx_code first_code = GET_CODE (varop);
10147
              unsigned int first_count = INTVAL (XEXP (varop, 1));
10148
              unsigned HOST_WIDE_INT mask;
10149
              rtx mask_rtx;
10150
 
10151
              /* We have one common special case.  We can't do any merging if
10152
                 the inner code is an ASHIFTRT of a smaller mode.  However, if
10153
                 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
10154
                 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
10155
                 we can convert it to
10156
                 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0) C3) C2) C1).
10157
                 This simplifies certain SIGN_EXTEND operations.  */
10158
              if (code == ASHIFT && first_code == ASHIFTRT
10159
                  && count == (GET_MODE_PRECISION (result_mode)
10160
                               - GET_MODE_PRECISION (GET_MODE (varop))))
10161
                {
10162
                  /* C3 has the low-order C1 bits zero.  */
10163
 
10164
                  mask = GET_MODE_MASK (mode)
10165
                         & ~(((unsigned HOST_WIDE_INT) 1 << first_count) - 1);
10166
 
10167
                  varop = simplify_and_const_int (NULL_RTX, result_mode,
10168
                                                  XEXP (varop, 0), mask);
10169
                  varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
10170
                                                varop, count);
10171
                  count = first_count;
10172
                  code = ASHIFTRT;
10173
                  continue;
10174
                }
10175
 
10176
              /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
10177
                 than C1 high-order bits equal to the sign bit, we can convert
10178
                 this to either an ASHIFT or an ASHIFTRT depending on the
10179
                 two counts.
10180
 
10181
                 We cannot do this if VAROP's mode is not SHIFT_MODE.  */
10182
 
10183
              if (code == ASHIFTRT && first_code == ASHIFT
10184
                  && GET_MODE (varop) == shift_mode
10185
                  && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
10186
                      > first_count))
10187
                {
10188
                  varop = XEXP (varop, 0);
10189
                  count -= first_count;
10190
                  if (count < 0)
10191
                    {
10192
                      count = -count;
10193
                      code = ASHIFT;
10194
                    }
10195
 
10196
                  continue;
10197
                }
10198
 
10199
              /* There are some cases we can't do.  If CODE is ASHIFTRT,
10200
                 we can only do this if FIRST_CODE is also ASHIFTRT.
10201
 
10202
                 We can't do the case when CODE is ROTATE and FIRST_CODE is
10203
                 ASHIFTRT.
10204
 
10205
                 If the mode of this shift is not the mode of the outer shift,
10206
                 we can't do this if either shift is a right shift or ROTATE.
10207
 
10208
                 Finally, we can't do any of these if the mode is too wide
10209
                 unless the codes are the same.
10210
 
10211
                 Handle the case where the shift codes are the same
10212
                 first.  */
10213
 
10214
              if (code == first_code)
10215
                {
10216
                  if (GET_MODE (varop) != result_mode
10217
                      && (code == ASHIFTRT || code == LSHIFTRT
10218
                          || code == ROTATE))
10219
                    break;
10220
 
10221
                  count += first_count;
10222
                  varop = XEXP (varop, 0);
10223
                  continue;
10224
                }
10225
 
10226
              if (code == ASHIFTRT
10227
                  || (code == ROTATE && first_code == ASHIFTRT)
10228
                  || GET_MODE_PRECISION (mode) > HOST_BITS_PER_WIDE_INT
10229
                  || (GET_MODE (varop) != result_mode
10230
                      && (first_code == ASHIFTRT || first_code == LSHIFTRT
10231
                          || first_code == ROTATE
10232
                          || code == ROTATE)))
10233
                break;
10234
 
10235
              /* To compute the mask to apply after the shift, shift the
10236
                 nonzero bits of the inner shift the same way the
10237
                 outer shift will.  */
10238
 
10239
              mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
10240
 
10241
              mask_rtx
10242
                = simplify_const_binary_operation (code, result_mode, mask_rtx,
10243
                                                   GEN_INT (count));
10244
 
10245
              /* Give up if we can't compute an outer operation to use.  */
10246
              if (mask_rtx == 0
10247
                  || !CONST_INT_P (mask_rtx)
10248
                  || ! merge_outer_ops (&outer_op, &outer_const, AND,
10249
                                        INTVAL (mask_rtx),
10250
                                        result_mode, &complement_p))
10251
                break;
10252
 
10253
              /* If the shifts are in the same direction, we add the
10254
                 counts.  Otherwise, we subtract them.  */
10255
              if ((code == ASHIFTRT || code == LSHIFTRT)
10256
                  == (first_code == ASHIFTRT || first_code == LSHIFTRT))
10257
                count += first_count;
10258
              else
10259
                count -= first_count;
10260
 
10261
              /* If COUNT is positive, the new shift is usually CODE,
10262
                 except for the two exceptions below, in which case it is
10263
                 FIRST_CODE.  If the count is negative, FIRST_CODE should
10264
                 always be used  */
10265
              if (count > 0
10266
                  && ((first_code == ROTATE && code == ASHIFT)
10267
                      || (first_code == ASHIFTRT && code == LSHIFTRT)))
10268
                code = first_code;
10269
              else if (count < 0)
10270
                code = first_code, count = -count;
10271
 
10272
              varop = XEXP (varop, 0);
10273
              continue;
10274
            }
10275
 
10276
          /* If we have (A << B << C) for any shift, we can convert this to
10277
             (A << C << B).  This wins if A is a constant.  Only try this if
10278
             B is not a constant.  */
10279
 
10280
          else if (GET_CODE (varop) == code
10281
                   && CONST_INT_P (XEXP (varop, 0))
10282
                   && !CONST_INT_P (XEXP (varop, 1)))
10283
            {
10284
              rtx new_rtx = simplify_const_binary_operation (code, mode,
10285
                                                         XEXP (varop, 0),
10286
                                                         GEN_INT (count));
10287
              varop = gen_rtx_fmt_ee (code, mode, new_rtx, XEXP (varop, 1));
10288
              count = 0;
10289
              continue;
10290
            }
10291
          break;
10292
 
10293
        case NOT:
10294
          if (VECTOR_MODE_P (mode))
10295
            break;
10296
 
10297
          /* Make this fit the case below.  */
10298
          varop = gen_rtx_XOR (mode, XEXP (varop, 0),
10299
                               GEN_INT (GET_MODE_MASK (mode)));
10300
          continue;
10301
 
10302
        case IOR:
10303
        case AND:
10304
        case XOR:
10305
          /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
10306
             with C the size of VAROP - 1 and the shift is logical if
10307
             STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
10308
             we have an (le X 0) operation.   If we have an arithmetic shift
10309
             and STORE_FLAG_VALUE is 1 or we have a logical shift with
10310
             STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation.  */
10311
 
10312
          if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
10313
              && XEXP (XEXP (varop, 0), 1) == constm1_rtx
10314
              && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
10315
              && (code == LSHIFTRT || code == ASHIFTRT)
10316
              && count == (GET_MODE_PRECISION (GET_MODE (varop)) - 1)
10317
              && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
10318
            {
10319
              count = 0;
10320
              varop = gen_rtx_LE (GET_MODE (varop), XEXP (varop, 1),
10321
                                  const0_rtx);
10322
 
10323
              if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
10324
                varop = gen_rtx_NEG (GET_MODE (varop), varop);
10325
 
10326
              continue;
10327
            }
10328
 
10329
          /* If we have (shift (logical)), move the logical to the outside
10330
             to allow it to possibly combine with another logical and the
10331
             shift to combine with another shift.  This also canonicalizes to
10332
             what a ZERO_EXTRACT looks like.  Also, some machines have
10333
             (and (shift)) insns.  */
10334
 
10335
          if (CONST_INT_P (XEXP (varop, 1))
10336
              /* We can't do this if we have (ashiftrt (xor))  and the
10337
                 constant has its sign bit set in shift_mode.  */
10338
              && !(code == ASHIFTRT && GET_CODE (varop) == XOR
10339
                   && 0 > trunc_int_for_mode (INTVAL (XEXP (varop, 1)),
10340
                                              shift_mode))
10341
              && (new_rtx = simplify_const_binary_operation (code, result_mode,
10342
                                                         XEXP (varop, 1),
10343
                                                         GEN_INT (count))) != 0
10344
              && CONST_INT_P (new_rtx)
10345
              && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
10346
                                  INTVAL (new_rtx), result_mode, &complement_p))
10347
            {
10348
              varop = XEXP (varop, 0);
10349
              continue;
10350
            }
10351
 
10352
          /* If we can't do that, try to simplify the shift in each arm of the
10353
             logical expression, make a new logical expression, and apply
10354
             the inverse distributive law.  This also can't be done
10355
             for some (ashiftrt (xor)).  */
10356
          if (CONST_INT_P (XEXP (varop, 1))
10357
             && !(code == ASHIFTRT && GET_CODE (varop) == XOR
10358
                  && 0 > trunc_int_for_mode (INTVAL (XEXP (varop, 1)),
10359
                                             shift_mode)))
10360
            {
10361
              rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
10362
                                              XEXP (varop, 0), count);
10363
              rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
10364
                                              XEXP (varop, 1), count);
10365
 
10366
              varop = simplify_gen_binary (GET_CODE (varop), shift_mode,
10367
                                           lhs, rhs);
10368
              varop = apply_distributive_law (varop);
10369
 
10370
              count = 0;
10371
              continue;
10372
            }
10373
          break;
10374
 
10375
        case EQ:
10376
          /* Convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
10377
             says that the sign bit can be tested, FOO has mode MODE, C is
10378
             GET_MODE_PRECISION (MODE) - 1, and FOO has only its low-order bit
10379
             that may be nonzero.  */
10380
          if (code == LSHIFTRT
10381
              && XEXP (varop, 1) == const0_rtx
10382
              && GET_MODE (XEXP (varop, 0)) == result_mode
10383
              && count == (GET_MODE_PRECISION (result_mode) - 1)
10384
              && HWI_COMPUTABLE_MODE_P (result_mode)
10385
              && STORE_FLAG_VALUE == -1
10386
              && nonzero_bits (XEXP (varop, 0), result_mode) == 1
10387
              && merge_outer_ops (&outer_op, &outer_const, XOR, 1, result_mode,
10388
                                  &complement_p))
10389
            {
10390
              varop = XEXP (varop, 0);
10391
              count = 0;
10392
              continue;
10393
            }
10394
          break;
10395
 
10396
        case NEG:
10397
          /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
10398
             than the number of bits in the mode is equivalent to A.  */
10399
          if (code == LSHIFTRT
10400
              && count == (GET_MODE_PRECISION (result_mode) - 1)
10401
              && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
10402
            {
10403
              varop = XEXP (varop, 0);
10404
              count = 0;
10405
              continue;
10406
            }
10407
 
10408
          /* NEG commutes with ASHIFT since it is multiplication.  Move the
10409
             NEG outside to allow shifts to combine.  */
10410
          if (code == ASHIFT
10411
              && merge_outer_ops (&outer_op, &outer_const, NEG, 0, result_mode,
10412
                                  &complement_p))
10413
            {
10414
              varop = XEXP (varop, 0);
10415
              continue;
10416
            }
10417
          break;
10418
 
10419
        case PLUS:
10420
          /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
10421
             is one less than the number of bits in the mode is
10422
             equivalent to (xor A 1).  */
10423
          if (code == LSHIFTRT
10424
              && count == (GET_MODE_PRECISION (result_mode) - 1)
10425
              && XEXP (varop, 1) == constm1_rtx
10426
              && nonzero_bits (XEXP (varop, 0), result_mode) == 1
10427
              && merge_outer_ops (&outer_op, &outer_const, XOR, 1, result_mode,
10428
                                  &complement_p))
10429
            {
10430
              count = 0;
10431
              varop = XEXP (varop, 0);
10432
              continue;
10433
            }
10434
 
10435
          /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
10436
             that might be nonzero in BAR are those being shifted out and those
10437
             bits are known zero in FOO, we can replace the PLUS with FOO.
10438
             Similarly in the other operand order.  This code occurs when
10439
             we are computing the size of a variable-size array.  */
10440
 
10441
          if ((code == ASHIFTRT || code == LSHIFTRT)
10442
              && count < HOST_BITS_PER_WIDE_INT
10443
              && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
10444
              && (nonzero_bits (XEXP (varop, 1), result_mode)
10445
                  & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
10446
            {
10447
              varop = XEXP (varop, 0);
10448
              continue;
10449
            }
10450
          else if ((code == ASHIFTRT || code == LSHIFTRT)
10451
                   && count < HOST_BITS_PER_WIDE_INT
10452
                   && HWI_COMPUTABLE_MODE_P (result_mode)
10453
                   && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
10454
                            >> count)
10455
                   && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
10456
                            & nonzero_bits (XEXP (varop, 1),
10457
                                                 result_mode)))
10458
            {
10459
              varop = XEXP (varop, 1);
10460
              continue;
10461
            }
10462
 
10463
          /* (ashift (plus foo C) N) is (plus (ashift foo N) C').  */
10464
          if (code == ASHIFT
10465
              && CONST_INT_P (XEXP (varop, 1))
10466
              && (new_rtx = simplify_const_binary_operation (ASHIFT, result_mode,
10467
                                                         XEXP (varop, 1),
10468
                                                         GEN_INT (count))) != 0
10469
              && CONST_INT_P (new_rtx)
10470
              && merge_outer_ops (&outer_op, &outer_const, PLUS,
10471
                                  INTVAL (new_rtx), result_mode, &complement_p))
10472
            {
10473
              varop = XEXP (varop, 0);
10474
              continue;
10475
            }
10476
 
10477
          /* Check for 'PLUS signbit', which is the canonical form of 'XOR
10478
             signbit', and attempt to change the PLUS to an XOR and move it to
10479
             the outer operation as is done above in the AND/IOR/XOR case
10480
             leg for shift(logical). See details in logical handling above
10481
             for reasoning in doing so.  */
10482
          if (code == LSHIFTRT
10483
              && CONST_INT_P (XEXP (varop, 1))
10484
              && mode_signbit_p (result_mode, XEXP (varop, 1))
10485
              && (new_rtx = simplify_const_binary_operation (code, result_mode,
10486
                                                         XEXP (varop, 1),
10487
                                                         GEN_INT (count))) != 0
10488
              && CONST_INT_P (new_rtx)
10489
              && merge_outer_ops (&outer_op, &outer_const, XOR,
10490
                                  INTVAL (new_rtx), result_mode, &complement_p))
10491
            {
10492
              varop = XEXP (varop, 0);
10493
              continue;
10494
            }
10495
 
10496
          break;
10497
 
10498
        case MINUS:
10499
          /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
10500
             with C the size of VAROP - 1 and the shift is logical if
10501
             STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
10502
             we have a (gt X 0) operation.  If the shift is arithmetic with
10503
             STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
10504
             we have a (neg (gt X 0)) operation.  */
10505
 
10506
          if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
10507
              && GET_CODE (XEXP (varop, 0)) == ASHIFTRT
10508
              && count == (GET_MODE_PRECISION (GET_MODE (varop)) - 1)
10509
              && (code == LSHIFTRT || code == ASHIFTRT)
10510
              && CONST_INT_P (XEXP (XEXP (varop, 0), 1))
10511
              && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
10512
              && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
10513
            {
10514
              count = 0;
10515
              varop = gen_rtx_GT (GET_MODE (varop), XEXP (varop, 1),
10516
                                  const0_rtx);
10517
 
10518
              if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
10519
                varop = gen_rtx_NEG (GET_MODE (varop), varop);
10520
 
10521
              continue;
10522
            }
10523
          break;
10524
 
10525
        case TRUNCATE:
10526
          /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt))
10527
             if the truncate does not affect the value.  */
10528
          if (code == LSHIFTRT
10529
              && GET_CODE (XEXP (varop, 0)) == LSHIFTRT
10530
              && CONST_INT_P (XEXP (XEXP (varop, 0), 1))
10531
              && (INTVAL (XEXP (XEXP (varop, 0), 1))
10532
                  >= (GET_MODE_PRECISION (GET_MODE (XEXP (varop, 0)))
10533
                      - GET_MODE_PRECISION (GET_MODE (varop)))))
10534
            {
10535
              rtx varop_inner = XEXP (varop, 0);
10536
 
10537
              varop_inner
10538
                = gen_rtx_LSHIFTRT (GET_MODE (varop_inner),
10539
                                    XEXP (varop_inner, 0),
10540
                                    GEN_INT
10541
                                    (count + INTVAL (XEXP (varop_inner, 1))));
10542
              varop = gen_rtx_TRUNCATE (GET_MODE (varop), varop_inner);
10543
              count = 0;
10544
              continue;
10545
            }
10546
          break;
10547
 
10548
        default:
10549
          break;
10550
        }
10551
 
10552
      break;
10553
    }
10554
 
10555
  shift_mode = try_widen_shift_mode (code, varop, count, result_mode, mode,
10556
                                     outer_op, outer_const);
10557
 
10558
  /* We have now finished analyzing the shift.  The result should be
10559
     a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places.  If
10560
     OUTER_OP is non-UNKNOWN, it is an operation that needs to be applied
10561
     to the result of the shift.  OUTER_CONST is the relevant constant,
10562
     but we must turn off all bits turned off in the shift.  */
10563
 
10564
  if (outer_op == UNKNOWN
10565
      && orig_code == code && orig_count == count
10566
      && varop == orig_varop
10567
      && shift_mode == GET_MODE (varop))
10568
    return NULL_RTX;
10569
 
10570
  /* Make a SUBREG if necessary.  If we can't make it, fail.  */
10571
  varop = gen_lowpart (shift_mode, varop);
10572
  if (varop == NULL_RTX || GET_CODE (varop) == CLOBBER)
10573
    return NULL_RTX;
10574
 
10575
  /* If we have an outer operation and we just made a shift, it is
10576
     possible that we could have simplified the shift were it not
10577
     for the outer operation.  So try to do the simplification
10578
     recursively.  */
10579
 
10580
  if (outer_op != UNKNOWN)
10581
    x = simplify_shift_const_1 (code, shift_mode, varop, count);
10582
  else
10583
    x = NULL_RTX;
10584
 
10585
  if (x == NULL_RTX)
10586
    x = simplify_gen_binary (code, shift_mode, varop, GEN_INT (count));
10587
 
10588
  /* If we were doing an LSHIFTRT in a wider mode than it was originally,
10589
     turn off all the bits that the shift would have turned off.  */
10590
  if (orig_code == LSHIFTRT && result_mode != shift_mode)
10591
    x = simplify_and_const_int (NULL_RTX, shift_mode, x,
10592
                                GET_MODE_MASK (result_mode) >> orig_count);
10593
 
10594
  /* Do the remainder of the processing in RESULT_MODE.  */
10595
  x = gen_lowpart_or_truncate (result_mode, x);
10596
 
10597
  /* If COMPLEMENT_P is set, we have to complement X before doing the outer
10598
     operation.  */
10599
  if (complement_p)
10600
    x = simplify_gen_unary (NOT, result_mode, x, result_mode);
10601
 
10602
  if (outer_op != UNKNOWN)
10603
    {
10604
      if (GET_RTX_CLASS (outer_op) != RTX_UNARY
10605
          && GET_MODE_PRECISION (result_mode) < HOST_BITS_PER_WIDE_INT)
10606
        outer_const = trunc_int_for_mode (outer_const, result_mode);
10607
 
10608
      if (outer_op == AND)
10609
        x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
10610
      else if (outer_op == SET)
10611
        {
10612
          /* This means that we have determined that the result is
10613
             equivalent to a constant.  This should be rare.  */
10614
          if (!side_effects_p (x))
10615
            x = GEN_INT (outer_const);
10616
        }
10617
      else if (GET_RTX_CLASS (outer_op) == RTX_UNARY)
10618
        x = simplify_gen_unary (outer_op, result_mode, x, result_mode);
10619
      else
10620
        x = simplify_gen_binary (outer_op, result_mode, x,
10621
                                 GEN_INT (outer_const));
10622
    }
10623
 
10624
  return x;
10625
}
10626
 
10627
/* Simplify a shift of VAROP by COUNT bits.  CODE says what kind of shift.
10628
   The result of the shift is RESULT_MODE.  If we cannot simplify it,
10629
   return X or, if it is NULL, synthesize the expression with
10630
   simplify_gen_binary.  Otherwise, return a simplified value.
10631
 
10632
   The shift is normally computed in the widest mode we find in VAROP, as
10633
   long as it isn't a different number of words than RESULT_MODE.  Exceptions
10634
   are ASHIFTRT and ROTATE, which are always done in their original mode.  */
10635
 
10636
static rtx
10637
simplify_shift_const (rtx x, enum rtx_code code, enum machine_mode result_mode,
10638
                      rtx varop, int count)
10639
{
10640
  rtx tem = simplify_shift_const_1 (code, result_mode, varop, count);
10641
  if (tem)
10642
    return tem;
10643
 
10644
  if (!x)
10645
    x = simplify_gen_binary (code, GET_MODE (varop), varop, GEN_INT (count));
10646
  if (GET_MODE (x) != result_mode)
10647
    x = gen_lowpart (result_mode, x);
10648
  return x;
10649
}
10650
 
10651
 
10652
/* Like recog, but we receive the address of a pointer to a new pattern.
10653
   We try to match the rtx that the pointer points to.
10654
   If that fails, we may try to modify or replace the pattern,
10655
   storing the replacement into the same pointer object.
10656
 
10657
   Modifications include deletion or addition of CLOBBERs.
10658
 
10659
   PNOTES is a pointer to a location where any REG_UNUSED notes added for
10660
   the CLOBBERs are placed.
10661
 
10662
   The value is the final insn code from the pattern ultimately matched,
10663
   or -1.  */
10664
 
10665
static int
10666
recog_for_combine (rtx *pnewpat, rtx insn, rtx *pnotes)
10667
{
10668
  rtx pat = *pnewpat;
10669
  int insn_code_number;
10670
  int num_clobbers_to_add = 0;
10671
  int i;
10672
  rtx notes = 0;
10673
  rtx old_notes, old_pat;
10674
 
10675
  /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
10676
     we use to indicate that something didn't match.  If we find such a
10677
     thing, force rejection.  */
10678
  if (GET_CODE (pat) == PARALLEL)
10679
    for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
10680
      if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
10681
          && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
10682
        return -1;
10683
 
10684
  old_pat = PATTERN (insn);
10685
  old_notes = REG_NOTES (insn);
10686
  PATTERN (insn) = pat;
10687
  REG_NOTES (insn) = 0;
10688
 
10689
  insn_code_number = recog (pat, insn, &num_clobbers_to_add);
10690
  if (dump_file && (dump_flags & TDF_DETAILS))
10691
    {
10692
      if (insn_code_number < 0)
10693
        fputs ("Failed to match this instruction:\n", dump_file);
10694
      else
10695
        fputs ("Successfully matched this instruction:\n", dump_file);
10696
      print_rtl_single (dump_file, pat);
10697
    }
10698
 
10699
  /* If it isn't, there is the possibility that we previously had an insn
10700
     that clobbered some register as a side effect, but the combined
10701
     insn doesn't need to do that.  So try once more without the clobbers
10702
     unless this represents an ASM insn.  */
10703
 
10704
  if (insn_code_number < 0 && ! check_asm_operands (pat)
10705
      && GET_CODE (pat) == PARALLEL)
10706
    {
10707
      int pos;
10708
 
10709
      for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
10710
        if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
10711
          {
10712
            if (i != pos)
10713
              SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
10714
            pos++;
10715
          }
10716
 
10717
      SUBST_INT (XVECLEN (pat, 0), pos);
10718
 
10719
      if (pos == 1)
10720
        pat = XVECEXP (pat, 0, 0);
10721
 
10722
      PATTERN (insn) = pat;
10723
      insn_code_number = recog (pat, insn, &num_clobbers_to_add);
10724
      if (dump_file && (dump_flags & TDF_DETAILS))
10725
        {
10726
          if (insn_code_number < 0)
10727
            fputs ("Failed to match this instruction:\n", dump_file);
10728
          else
10729
            fputs ("Successfully matched this instruction:\n", dump_file);
10730
          print_rtl_single (dump_file, pat);
10731
        }
10732
    }
10733
  PATTERN (insn) = old_pat;
10734
  REG_NOTES (insn) = old_notes;
10735
 
10736
  /* Recognize all noop sets, these will be killed by followup pass.  */
10737
  if (insn_code_number < 0 && GET_CODE (pat) == SET && set_noop_p (pat))
10738
    insn_code_number = NOOP_MOVE_INSN_CODE, num_clobbers_to_add = 0;
10739
 
10740
  /* If we had any clobbers to add, make a new pattern than contains
10741
     them.  Then check to make sure that all of them are dead.  */
10742
  if (num_clobbers_to_add)
10743
    {
10744
      rtx newpat = gen_rtx_PARALLEL (VOIDmode,
10745
                                     rtvec_alloc (GET_CODE (pat) == PARALLEL
10746
                                                  ? (XVECLEN (pat, 0)
10747
                                                     + num_clobbers_to_add)
10748
                                                  : num_clobbers_to_add + 1));
10749
 
10750
      if (GET_CODE (pat) == PARALLEL)
10751
        for (i = 0; i < XVECLEN (pat, 0); i++)
10752
          XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
10753
      else
10754
        XVECEXP (newpat, 0, 0) = pat;
10755
 
10756
      add_clobbers (newpat, insn_code_number);
10757
 
10758
      for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
10759
           i < XVECLEN (newpat, 0); i++)
10760
        {
10761
          if (REG_P (XEXP (XVECEXP (newpat, 0, i), 0))
10762
              && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
10763
            return -1;
10764
          if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) != SCRATCH)
10765
            {
10766
              gcc_assert (REG_P (XEXP (XVECEXP (newpat, 0, i), 0)));
10767
              notes = alloc_reg_note (REG_UNUSED,
10768
                                      XEXP (XVECEXP (newpat, 0, i), 0), notes);
10769
            }
10770
        }
10771
      pat = newpat;
10772
    }
10773
 
10774
  *pnewpat = pat;
10775
  *pnotes = notes;
10776
 
10777
  return insn_code_number;
10778
}
10779
 
10780
/* Like gen_lowpart_general but for use by combine.  In combine it
10781
   is not possible to create any new pseudoregs.  However, it is
10782
   safe to create invalid memory addresses, because combine will
10783
   try to recognize them and all they will do is make the combine
10784
   attempt fail.
10785
 
10786
   If for some reason this cannot do its job, an rtx
10787
   (clobber (const_int 0)) is returned.
10788
   An insn containing that will not be recognized.  */
10789
 
10790
static rtx
10791
gen_lowpart_for_combine (enum machine_mode omode, rtx x)
10792
{
10793
  enum machine_mode imode = GET_MODE (x);
10794
  unsigned int osize = GET_MODE_SIZE (omode);
10795
  unsigned int isize = GET_MODE_SIZE (imode);
10796
  rtx result;
10797
 
10798
  if (omode == imode)
10799
    return x;
10800
 
10801
  /* Return identity if this is a CONST or symbolic reference.  */
10802
  if (omode == Pmode
10803
      && (GET_CODE (x) == CONST
10804
          || GET_CODE (x) == SYMBOL_REF
10805
          || GET_CODE (x) == LABEL_REF))
10806
    return x;
10807
 
10808
  /* We can only support MODE being wider than a word if X is a
10809
     constant integer or has a mode the same size.  */
10810
  if (GET_MODE_SIZE (omode) > UNITS_PER_WORD
10811
      && ! ((imode == VOIDmode
10812
             && (CONST_INT_P (x)
10813
                 || GET_CODE (x) == CONST_DOUBLE))
10814
            || isize == osize))
10815
    goto fail;
10816
 
10817
  /* X might be a paradoxical (subreg (mem)).  In that case, gen_lowpart
10818
     won't know what to do.  So we will strip off the SUBREG here and
10819
     process normally.  */
10820
  if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
10821
    {
10822
      x = SUBREG_REG (x);
10823
 
10824
      /* For use in case we fall down into the address adjustments
10825
         further below, we need to adjust the known mode and size of
10826
         x; imode and isize, since we just adjusted x.  */
10827
      imode = GET_MODE (x);
10828
 
10829
      if (imode == omode)
10830
        return x;
10831
 
10832
      isize = GET_MODE_SIZE (imode);
10833
    }
10834
 
10835
  result = gen_lowpart_common (omode, x);
10836
 
10837
  if (result)
10838
    return result;
10839
 
10840
  if (MEM_P (x))
10841
    {
10842
      int offset = 0;
10843
 
10844
      /* Refuse to work on a volatile memory ref or one with a mode-dependent
10845
         address.  */
10846
      if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
10847
        goto fail;
10848
 
10849
      /* If we want to refer to something bigger than the original memref,
10850
         generate a paradoxical subreg instead.  That will force a reload
10851
         of the original memref X.  */
10852
      if (isize < osize)
10853
        return gen_rtx_SUBREG (omode, x, 0);
10854
 
10855
      if (WORDS_BIG_ENDIAN)
10856
        offset = MAX (isize, UNITS_PER_WORD) - MAX (osize, UNITS_PER_WORD);
10857
 
10858
      /* Adjust the address so that the address-after-the-data is
10859
         unchanged.  */
10860
      if (BYTES_BIG_ENDIAN)
10861
        offset -= MIN (UNITS_PER_WORD, osize) - MIN (UNITS_PER_WORD, isize);
10862
 
10863
      return adjust_address_nv (x, omode, offset);
10864
    }
10865
 
10866
  /* If X is a comparison operator, rewrite it in a new mode.  This
10867
     probably won't match, but may allow further simplifications.  */
10868
  else if (COMPARISON_P (x))
10869
    return gen_rtx_fmt_ee (GET_CODE (x), omode, XEXP (x, 0), XEXP (x, 1));
10870
 
10871
  /* If we couldn't simplify X any other way, just enclose it in a
10872
     SUBREG.  Normally, this SUBREG won't match, but some patterns may
10873
     include an explicit SUBREG or we may simplify it further in combine.  */
10874
  else
10875
    {
10876
      int offset = 0;
10877
      rtx res;
10878
 
10879
      offset = subreg_lowpart_offset (omode, imode);
10880
      if (imode == VOIDmode)
10881
        {
10882
          imode = int_mode_for_mode (omode);
10883
          x = gen_lowpart_common (imode, x);
10884
          if (x == NULL)
10885
            goto fail;
10886
        }
10887
      res = simplify_gen_subreg (omode, x, imode, offset);
10888
      if (res)
10889
        return res;
10890
    }
10891
 
10892
 fail:
10893
  return gen_rtx_CLOBBER (omode, const0_rtx);
10894
}
10895
 
10896
/* Try to simplify a comparison between OP0 and a constant OP1,
10897
   where CODE is the comparison code that will be tested, into a
10898
   (CODE OP0 const0_rtx) form.
10899
 
10900
   The result is a possibly different comparison code to use.
10901
   *POP1 may be updated.  */
10902
 
10903
static enum rtx_code
10904
simplify_compare_const (enum rtx_code code, rtx op0, rtx *pop1)
10905
{
10906
  enum machine_mode mode = GET_MODE (op0);
10907
  unsigned int mode_width = GET_MODE_PRECISION (mode);
10908
  HOST_WIDE_INT const_op = INTVAL (*pop1);
10909
 
10910
  /* Get the constant we are comparing against and turn off all bits
10911
     not on in our mode.  */
10912
  if (mode != VOIDmode)
10913
    const_op = trunc_int_for_mode (const_op, mode);
10914
 
10915
  /* If we are comparing against a constant power of two and the value
10916
     being compared can only have that single bit nonzero (e.g., it was
10917
     `and'ed with that bit), we can replace this with a comparison
10918
     with zero.  */
10919
  if (const_op
10920
      && (code == EQ || code == NE || code == GE || code == GEU
10921
          || code == LT || code == LTU)
10922
      && mode_width <= HOST_BITS_PER_WIDE_INT
10923
      && exact_log2 (const_op) >= 0
10924
      && nonzero_bits (op0, mode) == (unsigned HOST_WIDE_INT) const_op)
10925
    {
10926
      code = (code == EQ || code == GE || code == GEU ? NE : EQ);
10927
      const_op = 0;
10928
    }
10929
 
10930
  /* Similarly, if we are comparing a value known to be either -1 or
10931
 
10932
  if (const_op == -1
10933
      && (code == EQ || code == NE || code == GT || code == LE
10934
          || code == GEU || code == LTU)
10935
      && num_sign_bit_copies (op0, mode) == mode_width)
10936
    {
10937
      code = (code == EQ || code == LE || code == GEU ? NE : EQ);
10938
      const_op = 0;
10939
    }
10940
 
10941
  /* Do some canonicalizations based on the comparison code.  We prefer
10942
     comparisons against zero and then prefer equality comparisons.
10943
     If we can reduce the size of a constant, we will do that too.  */
10944
  switch (code)
10945
    {
10946
    case LT:
10947
      /* < C is equivalent to <= (C - 1) */
10948
      if (const_op > 0)
10949
        {
10950
          const_op -= 1;
10951
          code = LE;
10952
          /* ... fall through to LE case below.  */
10953
        }
10954
      else
10955
        break;
10956
 
10957
    case LE:
10958
      /* <= C is equivalent to < (C + 1); we do this for C < 0  */
10959
      if (const_op < 0)
10960
        {
10961
          const_op += 1;
10962
          code = LT;
10963
        }
10964
 
10965
      /* If we are doing a <= 0 comparison on a value known to have
10966
         a zero sign bit, we can replace this with == 0.  */
10967
      else if (const_op == 0
10968
               && mode_width <= HOST_BITS_PER_WIDE_INT
10969
               && (nonzero_bits (op0, mode)
10970
                   & ((unsigned HOST_WIDE_INT) 1 << (mode_width - 1)))
10971
               == 0)
10972
        code = EQ;
10973
      break;
10974
 
10975
    case GE:
10976
      /* >= C is equivalent to > (C - 1).  */
10977
      if (const_op > 0)
10978
        {
10979
          const_op -= 1;
10980
          code = GT;
10981
          /* ... fall through to GT below.  */
10982
        }
10983
      else
10984
        break;
10985
 
10986
    case GT:
10987
      /* > C is equivalent to >= (C + 1); we do this for C < 0.  */
10988
      if (const_op < 0)
10989
        {
10990
          const_op += 1;
10991
          code = GE;
10992
        }
10993
 
10994
      /* If we are doing a > 0 comparison on a value known to have
10995
         a zero sign bit, we can replace this with != 0.  */
10996
      else if (const_op == 0
10997
               && mode_width <= HOST_BITS_PER_WIDE_INT
10998
               && (nonzero_bits (op0, mode)
10999
                   & ((unsigned HOST_WIDE_INT) 1 << (mode_width - 1)))
11000
               == 0)
11001
        code = NE;
11002
      break;
11003
 
11004
    case LTU:
11005
      /* < C is equivalent to <= (C - 1).  */
11006
      if (const_op > 0)
11007
        {
11008
          const_op -= 1;
11009
          code = LEU;
11010
          /* ... fall through ...  */
11011
        }
11012
      /* (unsigned) < 0x80000000 is equivalent to >= 0.  */
11013
      else if (mode_width <= HOST_BITS_PER_WIDE_INT
11014
               && (unsigned HOST_WIDE_INT) const_op
11015
               == (unsigned HOST_WIDE_INT) 1 << (mode_width - 1))
11016
        {
11017
          const_op = 0;
11018
          code = GE;
11019
          break;
11020
        }
11021
      else
11022
        break;
11023
 
11024
    case LEU:
11025
      /* unsigned <= 0 is equivalent to == 0 */
11026
      if (const_op == 0)
11027
        code = EQ;
11028
      /* (unsigned) <= 0x7fffffff is equivalent to >= 0.  */
11029
      else if (mode_width <= HOST_BITS_PER_WIDE_INT
11030
               && (unsigned HOST_WIDE_INT) const_op
11031
               == ((unsigned HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
11032
        {
11033
          const_op = 0;
11034
          code = GE;
11035
        }
11036
      break;
11037
 
11038
    case GEU:
11039
      /* >= C is equivalent to > (C - 1).  */
11040
      if (const_op > 1)
11041
        {
11042
          const_op -= 1;
11043
          code = GTU;
11044
          /* ... fall through ...  */
11045
        }
11046
 
11047
      /* (unsigned) >= 0x80000000 is equivalent to < 0.  */
11048
      else if (mode_width <= HOST_BITS_PER_WIDE_INT
11049
               && (unsigned HOST_WIDE_INT) const_op
11050
               == (unsigned HOST_WIDE_INT) 1 << (mode_width - 1))
11051
        {
11052
          const_op = 0;
11053
          code = LT;
11054
          break;
11055
        }
11056
      else
11057
        break;
11058
 
11059
    case GTU:
11060
      /* unsigned > 0 is equivalent to != 0 */
11061
      if (const_op == 0)
11062
        code = NE;
11063
      /* (unsigned) > 0x7fffffff is equivalent to < 0.  */
11064
      else if (mode_width <= HOST_BITS_PER_WIDE_INT
11065
               && (unsigned HOST_WIDE_INT) const_op
11066
               == ((unsigned HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
11067
        {
11068
          const_op = 0;
11069
          code = LT;
11070
        }
11071
      break;
11072
 
11073
    default:
11074
      break;
11075
    }
11076
 
11077
  *pop1 = GEN_INT (const_op);
11078
  return code;
11079
}
11080
 
11081
/* Simplify a comparison between *POP0 and *POP1 where CODE is the
11082
   comparison code that will be tested.
11083
 
11084
   The result is a possibly different comparison code to use.  *POP0 and
11085
   *POP1 may be updated.
11086
 
11087
   It is possible that we might detect that a comparison is either always
11088
   true or always false.  However, we do not perform general constant
11089
   folding in combine, so this knowledge isn't useful.  Such tautologies
11090
   should have been detected earlier.  Hence we ignore all such cases.  */
11091
 
11092
static enum rtx_code
11093
simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
11094
{
11095
  rtx op0 = *pop0;
11096
  rtx op1 = *pop1;
11097
  rtx tem, tem1;
11098
  int i;
11099
  enum machine_mode mode, tmode;
11100
 
11101
  /* Try a few ways of applying the same transformation to both operands.  */
11102
  while (1)
11103
    {
11104
#ifndef WORD_REGISTER_OPERATIONS
11105
      /* The test below this one won't handle SIGN_EXTENDs on these machines,
11106
         so check specially.  */
11107
      if (code != GTU && code != GEU && code != LTU && code != LEU
11108
          && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
11109
          && GET_CODE (XEXP (op0, 0)) == ASHIFT
11110
          && GET_CODE (XEXP (op1, 0)) == ASHIFT
11111
          && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
11112
          && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
11113
          && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
11114
              == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
11115
          && CONST_INT_P (XEXP (op0, 1))
11116
          && XEXP (op0, 1) == XEXP (op1, 1)
11117
          && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
11118
          && XEXP (op0, 1) == XEXP (XEXP (op1, 0), 1)
11119
          && (INTVAL (XEXP (op0, 1))
11120
              == (GET_MODE_PRECISION (GET_MODE (op0))
11121
                  - (GET_MODE_PRECISION
11122
                     (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
11123
        {
11124
          op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
11125
          op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
11126
        }
11127
#endif
11128
 
11129
      /* If both operands are the same constant shift, see if we can ignore the
11130
         shift.  We can if the shift is a rotate or if the bits shifted out of
11131
         this shift are known to be zero for both inputs and if the type of
11132
         comparison is compatible with the shift.  */
11133
      if (GET_CODE (op0) == GET_CODE (op1)
11134
          && HWI_COMPUTABLE_MODE_P (GET_MODE(op0))
11135
          && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
11136
              || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
11137
                  && (code != GT && code != LT && code != GE && code != LE))
11138
              || (GET_CODE (op0) == ASHIFTRT
11139
                  && (code != GTU && code != LTU
11140
                      && code != GEU && code != LEU)))
11141
          && CONST_INT_P (XEXP (op0, 1))
11142
          && INTVAL (XEXP (op0, 1)) >= 0
11143
          && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
11144
          && XEXP (op0, 1) == XEXP (op1, 1))
11145
        {
11146
          enum machine_mode mode = GET_MODE (op0);
11147
          unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
11148
          int shift_count = INTVAL (XEXP (op0, 1));
11149
 
11150
          if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
11151
            mask &= (mask >> shift_count) << shift_count;
11152
          else if (GET_CODE (op0) == ASHIFT)
11153
            mask = (mask & (mask << shift_count)) >> shift_count;
11154
 
11155
          if ((nonzero_bits (XEXP (op0, 0), mode) & ~mask) == 0
11156
              && (nonzero_bits (XEXP (op1, 0), mode) & ~mask) == 0)
11157
            op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
11158
          else
11159
            break;
11160
        }
11161
 
11162
      /* If both operands are AND's of a paradoxical SUBREG by constant, the
11163
         SUBREGs are of the same mode, and, in both cases, the AND would
11164
         be redundant if the comparison was done in the narrower mode,
11165
         do the comparison in the narrower mode (e.g., we are AND'ing with 1
11166
         and the operand's possibly nonzero bits are 0xffffff01; in that case
11167
         if we only care about QImode, we don't need the AND).  This case
11168
         occurs if the output mode of an scc insn is not SImode and
11169
         STORE_FLAG_VALUE == 1 (e.g., the 386).
11170
 
11171
         Similarly, check for a case where the AND's are ZERO_EXTEND
11172
         operations from some narrower mode even though a SUBREG is not
11173
         present.  */
11174
 
11175
      else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
11176
               && CONST_INT_P (XEXP (op0, 1))
11177
               && CONST_INT_P (XEXP (op1, 1)))
11178
        {
11179
          rtx inner_op0 = XEXP (op0, 0);
11180
          rtx inner_op1 = XEXP (op1, 0);
11181
          HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
11182
          HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
11183
          int changed = 0;
11184
 
11185
          if (paradoxical_subreg_p (inner_op0)
11186
              && GET_CODE (inner_op1) == SUBREG
11187
              && (GET_MODE (SUBREG_REG (inner_op0))
11188
                  == GET_MODE (SUBREG_REG (inner_op1)))
11189
              && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (inner_op0)))
11190
                  <= HOST_BITS_PER_WIDE_INT)
11191
              && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0),
11192
                                             GET_MODE (SUBREG_REG (inner_op0)))))
11193
              && (0 == ((~c1) & nonzero_bits (SUBREG_REG (inner_op1),
11194
                                             GET_MODE (SUBREG_REG (inner_op1))))))
11195
            {
11196
              op0 = SUBREG_REG (inner_op0);
11197
              op1 = SUBREG_REG (inner_op1);
11198
 
11199
              /* The resulting comparison is always unsigned since we masked
11200
                 off the original sign bit.  */
11201
              code = unsigned_condition (code);
11202
 
11203
              changed = 1;
11204
            }
11205
 
11206
          else if (c0 == c1)
11207
            for (tmode = GET_CLASS_NARROWEST_MODE
11208
                 (GET_MODE_CLASS (GET_MODE (op0)));
11209
                 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
11210
              if ((unsigned HOST_WIDE_INT) c0 == GET_MODE_MASK (tmode))
11211
                {
11212
                  op0 = gen_lowpart (tmode, inner_op0);
11213
                  op1 = gen_lowpart (tmode, inner_op1);
11214
                  code = unsigned_condition (code);
11215
                  changed = 1;
11216
                  break;
11217
                }
11218
 
11219
          if (! changed)
11220
            break;
11221
        }
11222
 
11223
      /* If both operands are NOT, we can strip off the outer operation
11224
         and adjust the comparison code for swapped operands; similarly for
11225
         NEG, except that this must be an equality comparison.  */
11226
      else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
11227
               || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
11228
                   && (code == EQ || code == NE)))
11229
        op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
11230
 
11231
      else
11232
        break;
11233
    }
11234
 
11235
  /* If the first operand is a constant, swap the operands and adjust the
11236
     comparison code appropriately, but don't do this if the second operand
11237
     is already a constant integer.  */
11238
  if (swap_commutative_operands_p (op0, op1))
11239
    {
11240
      tem = op0, op0 = op1, op1 = tem;
11241
      code = swap_condition (code);
11242
    }
11243
 
11244
  /* We now enter a loop during which we will try to simplify the comparison.
11245
     For the most part, we only are concerned with comparisons with zero,
11246
     but some things may really be comparisons with zero but not start
11247
     out looking that way.  */
11248
 
11249
  while (CONST_INT_P (op1))
11250
    {
11251
      enum machine_mode mode = GET_MODE (op0);
11252
      unsigned int mode_width = GET_MODE_PRECISION (mode);
11253
      unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
11254
      int equality_comparison_p;
11255
      int sign_bit_comparison_p;
11256
      int unsigned_comparison_p;
11257
      HOST_WIDE_INT const_op;
11258
 
11259
      /* We only want to handle integral modes.  This catches VOIDmode,
11260
         CCmode, and the floating-point modes.  An exception is that we
11261
         can handle VOIDmode if OP0 is a COMPARE or a comparison
11262
         operation.  */
11263
 
11264
      if (GET_MODE_CLASS (mode) != MODE_INT
11265
          && ! (mode == VOIDmode
11266
                && (GET_CODE (op0) == COMPARE || COMPARISON_P (op0))))
11267
        break;
11268
 
11269
      /* Try to simplify the compare to constant, possibly changing the
11270
         comparison op, and/or changing op1 to zero.  */
11271
      code = simplify_compare_const (code, op0, &op1);
11272
      const_op = INTVAL (op1);
11273
 
11274
      /* Compute some predicates to simplify code below.  */
11275
 
11276
      equality_comparison_p = (code == EQ || code == NE);
11277
      sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
11278
      unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
11279
                               || code == GEU);
11280
 
11281
      /* If this is a sign bit comparison and we can do arithmetic in
11282
         MODE, say that we will only be needing the sign bit of OP0.  */
11283
      if (sign_bit_comparison_p && HWI_COMPUTABLE_MODE_P (mode))
11284
        op0 = force_to_mode (op0, mode,
11285
                             (unsigned HOST_WIDE_INT) 1
11286
                             << (GET_MODE_PRECISION (mode) - 1),
11287
                             0);
11288
 
11289
      /* Now try cases based on the opcode of OP0.  If none of the cases
11290
         does a "continue", we exit this loop immediately after the
11291
         switch.  */
11292
 
11293
      switch (GET_CODE (op0))
11294
        {
11295
        case ZERO_EXTRACT:
11296
          /* If we are extracting a single bit from a variable position in
11297
             a constant that has only a single bit set and are comparing it
11298
             with zero, we can convert this into an equality comparison
11299
             between the position and the location of the single bit.  */
11300
          /* Except we can't if SHIFT_COUNT_TRUNCATED is set, since we might
11301
             have already reduced the shift count modulo the word size.  */
11302
          if (!SHIFT_COUNT_TRUNCATED
11303
              && CONST_INT_P (XEXP (op0, 0))
11304
              && XEXP (op0, 1) == const1_rtx
11305
              && equality_comparison_p && const_op == 0
11306
              && (i = exact_log2 (UINTVAL (XEXP (op0, 0)))) >= 0)
11307
            {
11308
              if (BITS_BIG_ENDIAN)
11309
                {
11310
                  enum machine_mode new_mode
11311
                    = mode_for_extraction (EP_extzv, 1);
11312
                  if (new_mode == MAX_MACHINE_MODE)
11313
                    i = BITS_PER_WORD - 1 - i;
11314
                  else
11315
                    {
11316
                      mode = new_mode;
11317
                      i = (GET_MODE_PRECISION (mode) - 1 - i);
11318
                    }
11319
                }
11320
 
11321
              op0 = XEXP (op0, 2);
11322
              op1 = GEN_INT (i);
11323
              const_op = i;
11324
 
11325
              /* Result is nonzero iff shift count is equal to I.  */
11326
              code = reverse_condition (code);
11327
              continue;
11328
            }
11329
 
11330
          /* ... fall through ...  */
11331
 
11332
        case SIGN_EXTRACT:
11333
          tem = expand_compound_operation (op0);
11334
          if (tem != op0)
11335
            {
11336
              op0 = tem;
11337
              continue;
11338
            }
11339
          break;
11340
 
11341
        case NOT:
11342
          /* If testing for equality, we can take the NOT of the constant.  */
11343
          if (equality_comparison_p
11344
              && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
11345
            {
11346
              op0 = XEXP (op0, 0);
11347
              op1 = tem;
11348
              continue;
11349
            }
11350
 
11351
          /* If just looking at the sign bit, reverse the sense of the
11352
             comparison.  */
11353
          if (sign_bit_comparison_p)
11354
            {
11355
              op0 = XEXP (op0, 0);
11356
              code = (code == GE ? LT : GE);
11357
              continue;
11358
            }
11359
          break;
11360
 
11361
        case NEG:
11362
          /* If testing for equality, we can take the NEG of the constant.  */
11363
          if (equality_comparison_p
11364
              && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
11365
            {
11366
              op0 = XEXP (op0, 0);
11367
              op1 = tem;
11368
              continue;
11369
            }
11370
 
11371
          /* The remaining cases only apply to comparisons with zero.  */
11372
          if (const_op != 0)
11373
            break;
11374
 
11375
          /* When X is ABS or is known positive,
11376
             (neg X) is < 0 if and only if X != 0.  */
11377
 
11378
          if (sign_bit_comparison_p
11379
              && (GET_CODE (XEXP (op0, 0)) == ABS
11380
                  || (mode_width <= HOST_BITS_PER_WIDE_INT
11381
                      && (nonzero_bits (XEXP (op0, 0), mode)
11382
                          & ((unsigned HOST_WIDE_INT) 1 << (mode_width - 1)))
11383
                         == 0)))
11384
            {
11385
              op0 = XEXP (op0, 0);
11386
              code = (code == LT ? NE : EQ);
11387
              continue;
11388
            }
11389
 
11390
          /* If we have NEG of something whose two high-order bits are the
11391
             same, we know that "(-a) < 0" is equivalent to "a > 0".  */
11392
          if (num_sign_bit_copies (op0, mode) >= 2)
11393
            {
11394
              op0 = XEXP (op0, 0);
11395
              code = swap_condition (code);
11396
              continue;
11397
            }
11398
          break;
11399
 
11400
        case ROTATE:
11401
          /* If we are testing equality and our count is a constant, we
11402
             can perform the inverse operation on our RHS.  */
11403
          if (equality_comparison_p && CONST_INT_P (XEXP (op0, 1))
11404
              && (tem = simplify_binary_operation (ROTATERT, mode,
11405
                                                   op1, XEXP (op0, 1))) != 0)
11406
            {
11407
              op0 = XEXP (op0, 0);
11408
              op1 = tem;
11409
              continue;
11410
            }
11411
 
11412
          /* If we are doing a < 0 or >= 0 comparison, it means we are testing
11413
             a particular bit.  Convert it to an AND of a constant of that
11414
             bit.  This will be converted into a ZERO_EXTRACT.  */
11415
          if (const_op == 0 && sign_bit_comparison_p
11416
              && CONST_INT_P (XEXP (op0, 1))
11417
              && mode_width <= HOST_BITS_PER_WIDE_INT)
11418
            {
11419
              op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
11420
                                            ((unsigned HOST_WIDE_INT) 1
11421
                                             << (mode_width - 1
11422
                                                 - INTVAL (XEXP (op0, 1)))));
11423
              code = (code == LT ? NE : EQ);
11424
              continue;
11425
            }
11426
 
11427
          /* Fall through.  */
11428
 
11429
        case ABS:
11430
          /* ABS is ignorable inside an equality comparison with zero.  */
11431
          if (const_op == 0 && equality_comparison_p)
11432
            {
11433
              op0 = XEXP (op0, 0);
11434
              continue;
11435
            }
11436
          break;
11437
 
11438
        case SIGN_EXTEND:
11439
          /* Can simplify (compare (zero/sign_extend FOO) CONST) to
11440
             (compare FOO CONST) if CONST fits in FOO's mode and we
11441
             are either testing inequality or have an unsigned
11442
             comparison with ZERO_EXTEND or a signed comparison with
11443
             SIGN_EXTEND.  But don't do it if we don't have a compare
11444
             insn of the given mode, since we'd have to revert it
11445
             later on, and then we wouldn't know whether to sign- or
11446
             zero-extend.  */
11447
          mode = GET_MODE (XEXP (op0, 0));
11448
          if (GET_MODE_CLASS (mode) == MODE_INT
11449
              && ! unsigned_comparison_p
11450
              && HWI_COMPUTABLE_MODE_P (mode)
11451
              && trunc_int_for_mode (const_op, mode) == const_op
11452
              && have_insn_for (COMPARE, mode))
11453
            {
11454
              op0 = XEXP (op0, 0);
11455
              continue;
11456
            }
11457
          break;
11458
 
11459
        case SUBREG:
11460
          /* Check for the case where we are comparing A - C1 with C2, that is
11461
 
11462
               (subreg:MODE (plus (A) (-C1))) op (C2)
11463
 
11464
             with C1 a constant, and try to lift the SUBREG, i.e. to do the
11465
             comparison in the wider mode.  One of the following two conditions
11466
             must be true in order for this to be valid:
11467
 
11468
               1. The mode extension results in the same bit pattern being added
11469
                  on both sides and the comparison is equality or unsigned.  As
11470
                  C2 has been truncated to fit in MODE, the pattern can only be
11471
                  all 0s or all 1s.
11472
 
11473
               2. The mode extension results in the sign bit being copied on
11474
                  each side.
11475
 
11476
             The difficulty here is that we have predicates for A but not for
11477
             (A - C1) so we need to check that C1 is within proper bounds so
11478
             as to perturbate A as little as possible.  */
11479
 
11480
          if (mode_width <= HOST_BITS_PER_WIDE_INT
11481
              && subreg_lowpart_p (op0)
11482
              && GET_MODE_PRECISION (GET_MODE (SUBREG_REG (op0))) > mode_width
11483
              && GET_CODE (SUBREG_REG (op0)) == PLUS
11484
              && CONST_INT_P (XEXP (SUBREG_REG (op0), 1)))
11485
            {
11486
              enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
11487
              rtx a = XEXP (SUBREG_REG (op0), 0);
11488
              HOST_WIDE_INT c1 = -INTVAL (XEXP (SUBREG_REG (op0), 1));
11489
 
11490
              if ((c1 > 0
11491
                   && (unsigned HOST_WIDE_INT) c1
11492
                       < (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)
11493
                   && (equality_comparison_p || unsigned_comparison_p)
11494
                   /* (A - C1) zero-extends if it is positive and sign-extends
11495
                      if it is negative, C2 both zero- and sign-extends.  */
11496
                   && ((0 == (nonzero_bits (a, inner_mode)
11497
                              & ~GET_MODE_MASK (mode))
11498
                        && const_op >= 0)
11499
                       /* (A - C1) sign-extends if it is positive and 1-extends
11500
                          if it is negative, C2 both sign- and 1-extends.  */
11501
                       || (num_sign_bit_copies (a, inner_mode)
11502
                           > (unsigned int) (GET_MODE_PRECISION (inner_mode)
11503
                                             - mode_width)
11504
                           && const_op < 0)))
11505
                  || ((unsigned HOST_WIDE_INT) c1
11506
                       < (unsigned HOST_WIDE_INT) 1 << (mode_width - 2)
11507
                      /* (A - C1) always sign-extends, like C2.  */
11508
                      && num_sign_bit_copies (a, inner_mode)
11509
                         > (unsigned int) (GET_MODE_PRECISION (inner_mode)
11510
                                           - (mode_width - 1))))
11511
                {
11512
                  op0 = SUBREG_REG (op0);
11513
                  continue;
11514
                }
11515
            }
11516
 
11517
          /* If the inner mode is narrower and we are extracting the low part,
11518
             we can treat the SUBREG as if it were a ZERO_EXTEND.  */
11519
          if (subreg_lowpart_p (op0)
11520
              && GET_MODE_PRECISION (GET_MODE (SUBREG_REG (op0))) < mode_width)
11521
            /* Fall through */ ;
11522
          else
11523
            break;
11524
 
11525
          /* ... fall through ...  */
11526
 
11527
        case ZERO_EXTEND:
11528
          mode = GET_MODE (XEXP (op0, 0));
11529
          if (GET_MODE_CLASS (mode) == MODE_INT
11530
              && (unsigned_comparison_p || equality_comparison_p)
11531
              && HWI_COMPUTABLE_MODE_P (mode)
11532
              && (unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (mode)
11533
              && const_op >= 0
11534
              && have_insn_for (COMPARE, mode))
11535
            {
11536
              op0 = XEXP (op0, 0);
11537
              continue;
11538
            }
11539
          break;
11540
 
11541
        case PLUS:
11542
          /* (eq (plus X A) B) -> (eq X (minus B A)).  We can only do
11543
             this for equality comparisons due to pathological cases involving
11544
             overflows.  */
11545
          if (equality_comparison_p
11546
              && 0 != (tem = simplify_binary_operation (MINUS, mode,
11547
                                                        op1, XEXP (op0, 1))))
11548
            {
11549
              op0 = XEXP (op0, 0);
11550
              op1 = tem;
11551
              continue;
11552
            }
11553
 
11554
          /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0.  */
11555
          if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
11556
              && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
11557
            {
11558
              op0 = XEXP (XEXP (op0, 0), 0);
11559
              code = (code == LT ? EQ : NE);
11560
              continue;
11561
            }
11562
          break;
11563
 
11564
        case MINUS:
11565
          /* We used to optimize signed comparisons against zero, but that
11566
             was incorrect.  Unsigned comparisons against zero (GTU, LEU)
11567
             arrive here as equality comparisons, or (GEU, LTU) are
11568
             optimized away.  No need to special-case them.  */
11569
 
11570
          /* (eq (minus A B) C) -> (eq A (plus B C)) or
11571
             (eq B (minus A C)), whichever simplifies.  We can only do
11572
             this for equality comparisons due to pathological cases involving
11573
             overflows.  */
11574
          if (equality_comparison_p
11575
              && 0 != (tem = simplify_binary_operation (PLUS, mode,
11576
                                                        XEXP (op0, 1), op1)))
11577
            {
11578
              op0 = XEXP (op0, 0);
11579
              op1 = tem;
11580
              continue;
11581
            }
11582
 
11583
          if (equality_comparison_p
11584
              && 0 != (tem = simplify_binary_operation (MINUS, mode,
11585
                                                        XEXP (op0, 0), op1)))
11586
            {
11587
              op0 = XEXP (op0, 1);
11588
              op1 = tem;
11589
              continue;
11590
            }
11591
 
11592
          /* The sign bit of (minus (ashiftrt X C) X), where C is the number
11593
             of bits in X minus 1, is one iff X > 0.  */
11594
          if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
11595
              && CONST_INT_P (XEXP (XEXP (op0, 0), 1))
11596
              && UINTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
11597
              && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
11598
            {
11599
              op0 = XEXP (op0, 1);
11600
              code = (code == GE ? LE : GT);
11601
              continue;
11602
            }
11603
          break;
11604
 
11605
        case XOR:
11606
          /* (eq (xor A B) C) -> (eq A (xor B C)).  This is a simplification
11607
             if C is zero or B is a constant.  */
11608
          if (equality_comparison_p
11609
              && 0 != (tem = simplify_binary_operation (XOR, mode,
11610
                                                        XEXP (op0, 1), op1)))
11611
            {
11612
              op0 = XEXP (op0, 0);
11613
              op1 = tem;
11614
              continue;
11615
            }
11616
          break;
11617
 
11618
        case EQ:  case NE:
11619
        case UNEQ:  case LTGT:
11620
        case LT:  case LTU:  case UNLT:  case LE:  case LEU:  case UNLE:
11621
        case GT:  case GTU:  case UNGT:  case GE:  case GEU:  case UNGE:
11622
        case UNORDERED: case ORDERED:
11623
          /* We can't do anything if OP0 is a condition code value, rather
11624
             than an actual data value.  */
11625
          if (const_op != 0
11626
              || CC0_P (XEXP (op0, 0))
11627
              || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
11628
            break;
11629
 
11630
          /* Get the two operands being compared.  */
11631
          if (GET_CODE (XEXP (op0, 0)) == COMPARE)
11632
            tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
11633
          else
11634
            tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
11635
 
11636
          /* Check for the cases where we simply want the result of the
11637
             earlier test or the opposite of that result.  */
11638
          if (code == NE || code == EQ
11639
              || (val_signbit_known_set_p (GET_MODE (op0), STORE_FLAG_VALUE)
11640
                  && (code == LT || code == GE)))
11641
            {
11642
              enum rtx_code new_code;
11643
              if (code == LT || code == NE)
11644
                new_code = GET_CODE (op0);
11645
              else
11646
                new_code = reversed_comparison_code (op0, NULL);
11647
 
11648
              if (new_code != UNKNOWN)
11649
                {
11650
                  code = new_code;
11651
                  op0 = tem;
11652
                  op1 = tem1;
11653
                  continue;
11654
                }
11655
            }
11656
          break;
11657
 
11658
        case IOR:
11659
          /* The sign bit of (ior (plus X (const_int -1)) X) is nonzero
11660
             iff X <= 0.  */
11661
          if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
11662
              && XEXP (XEXP (op0, 0), 1) == constm1_rtx
11663
              && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
11664
            {
11665
              op0 = XEXP (op0, 1);
11666
              code = (code == GE ? GT : LE);
11667
              continue;
11668
            }
11669
          break;
11670
 
11671
        case AND:
11672
          /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1).  This
11673
             will be converted to a ZERO_EXTRACT later.  */
11674
          if (const_op == 0 && equality_comparison_p
11675
              && GET_CODE (XEXP (op0, 0)) == ASHIFT
11676
              && XEXP (XEXP (op0, 0), 0) == const1_rtx)
11677
            {
11678
              op0 = gen_rtx_LSHIFTRT (mode, XEXP (op0, 1),
11679
                                      XEXP (XEXP (op0, 0), 1));
11680
              op0 = simplify_and_const_int (NULL_RTX, mode, op0, 1);
11681
              continue;
11682
            }
11683
 
11684
          /* If we are comparing (and (lshiftrt X C1) C2) for equality with
11685
             zero and X is a comparison and C1 and C2 describe only bits set
11686
             in STORE_FLAG_VALUE, we can compare with X.  */
11687
          if (const_op == 0 && equality_comparison_p
11688
              && mode_width <= HOST_BITS_PER_WIDE_INT
11689
              && CONST_INT_P (XEXP (op0, 1))
11690
              && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
11691
              && CONST_INT_P (XEXP (XEXP (op0, 0), 1))
11692
              && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
11693
              && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
11694
            {
11695
              mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
11696
                      << INTVAL (XEXP (XEXP (op0, 0), 1)));
11697
              if ((~STORE_FLAG_VALUE & mask) == 0
11698
                  && (COMPARISON_P (XEXP (XEXP (op0, 0), 0))
11699
                      || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
11700
                          && COMPARISON_P (tem))))
11701
                {
11702
                  op0 = XEXP (XEXP (op0, 0), 0);
11703
                  continue;
11704
                }
11705
            }
11706
 
11707
          /* If we are doing an equality comparison of an AND of a bit equal
11708
             to the sign bit, replace this with a LT or GE comparison of
11709
             the underlying value.  */
11710
          if (equality_comparison_p
11711
              && const_op == 0
11712
              && CONST_INT_P (XEXP (op0, 1))
11713
              && mode_width <= HOST_BITS_PER_WIDE_INT
11714
              && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
11715
                  == (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)))
11716
            {
11717
              op0 = XEXP (op0, 0);
11718
              code = (code == EQ ? GE : LT);
11719
              continue;
11720
            }
11721
 
11722
          /* If this AND operation is really a ZERO_EXTEND from a narrower
11723
             mode, the constant fits within that mode, and this is either an
11724
             equality or unsigned comparison, try to do this comparison in
11725
             the narrower mode.
11726
 
11727
             Note that in:
11728
 
11729
             (ne:DI (and:DI (reg:DI 4) (const_int 0xffffffff)) (const_int 0))
11730
             -> (ne:DI (reg:SI 4) (const_int 0))
11731
 
11732
             unless TRULY_NOOP_TRUNCATION allows it or the register is
11733
             known to hold a value of the required mode the
11734
             transformation is invalid.  */
11735
          if ((equality_comparison_p || unsigned_comparison_p)
11736
              && CONST_INT_P (XEXP (op0, 1))
11737
              && (i = exact_log2 ((UINTVAL (XEXP (op0, 1))
11738
                                   & GET_MODE_MASK (mode))
11739
                                  + 1)) >= 0
11740
              && const_op >> i == 0
11741
              && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode
11742
              && (TRULY_NOOP_TRUNCATION_MODES_P (tmode, GET_MODE (op0))
11743
                  || (REG_P (XEXP (op0, 0))
11744
                      && reg_truncated_to_mode (tmode, XEXP (op0, 0)))))
11745
            {
11746
              op0 = gen_lowpart (tmode, XEXP (op0, 0));
11747
              continue;
11748
            }
11749
 
11750
          /* If this is (and:M1 (subreg:M2 X 0) (const_int C1)) where C1
11751
             fits in both M1 and M2 and the SUBREG is either paradoxical
11752
             or represents the low part, permute the SUBREG and the AND
11753
             and try again.  */
11754
          if (GET_CODE (XEXP (op0, 0)) == SUBREG)
11755
            {
11756
              unsigned HOST_WIDE_INT c1;
11757
              tmode = GET_MODE (SUBREG_REG (XEXP (op0, 0)));
11758
              /* Require an integral mode, to avoid creating something like
11759
                 (AND:SF ...).  */
11760
              if (SCALAR_INT_MODE_P (tmode)
11761
                  /* It is unsafe to commute the AND into the SUBREG if the
11762
                     SUBREG is paradoxical and WORD_REGISTER_OPERATIONS is
11763
                     not defined.  As originally written the upper bits
11764
                     have a defined value due to the AND operation.
11765
                     However, if we commute the AND inside the SUBREG then
11766
                     they no longer have defined values and the meaning of
11767
                     the code has been changed.  */
11768
                  && (0
11769
#ifdef WORD_REGISTER_OPERATIONS
11770
                      || (mode_width > GET_MODE_PRECISION (tmode)
11771
                          && mode_width <= BITS_PER_WORD)
11772
#endif
11773
                      || (mode_width <= GET_MODE_PRECISION (tmode)
11774
                          && subreg_lowpart_p (XEXP (op0, 0))))
11775
                  && CONST_INT_P (XEXP (op0, 1))
11776
                  && mode_width <= HOST_BITS_PER_WIDE_INT
11777
                  && HWI_COMPUTABLE_MODE_P (tmode)
11778
                  && ((c1 = INTVAL (XEXP (op0, 1))) & ~mask) == 0
11779
                  && (c1 & ~GET_MODE_MASK (tmode)) == 0
11780
                  && c1 != mask
11781
                  && c1 != GET_MODE_MASK (tmode))
11782
                {
11783
                  op0 = simplify_gen_binary (AND, tmode,
11784
                                             SUBREG_REG (XEXP (op0, 0)),
11785
                                             gen_int_mode (c1, tmode));
11786
                  op0 = gen_lowpart (mode, op0);
11787
                  continue;
11788
                }
11789
            }
11790
 
11791
          /* Convert (ne (and (not X) 1) 0) to (eq (and X 1) 0).  */
11792
          if (const_op == 0 && equality_comparison_p
11793
              && XEXP (op0, 1) == const1_rtx
11794
              && GET_CODE (XEXP (op0, 0)) == NOT)
11795
            {
11796
              op0 = simplify_and_const_int (NULL_RTX, mode,
11797
                                            XEXP (XEXP (op0, 0), 0), 1);
11798
              code = (code == NE ? EQ : NE);
11799
              continue;
11800
            }
11801
 
11802
          /* Convert (ne (and (lshiftrt (not X)) 1) 0) to
11803
             (eq (and (lshiftrt X) 1) 0).
11804
             Also handle the case where (not X) is expressed using xor.  */
11805
          if (const_op == 0 && equality_comparison_p
11806
              && XEXP (op0, 1) == const1_rtx
11807
              && GET_CODE (XEXP (op0, 0)) == LSHIFTRT)
11808
            {
11809
              rtx shift_op = XEXP (XEXP (op0, 0), 0);
11810
              rtx shift_count = XEXP (XEXP (op0, 0), 1);
11811
 
11812
              if (GET_CODE (shift_op) == NOT
11813
                  || (GET_CODE (shift_op) == XOR
11814
                      && CONST_INT_P (XEXP (shift_op, 1))
11815
                      && CONST_INT_P (shift_count)
11816
                      && HWI_COMPUTABLE_MODE_P (mode)
11817
                      && (UINTVAL (XEXP (shift_op, 1))
11818
                          == (unsigned HOST_WIDE_INT) 1
11819
                               << INTVAL (shift_count))))
11820
                {
11821
                  op0
11822
                    = gen_rtx_LSHIFTRT (mode, XEXP (shift_op, 0), shift_count);
11823
                  op0 = simplify_and_const_int (NULL_RTX, mode, op0, 1);
11824
                  code = (code == NE ? EQ : NE);
11825
                  continue;
11826
                }
11827
            }
11828
          break;
11829
 
11830
        case ASHIFT:
11831
          /* If we have (compare (ashift FOO N) (const_int C)) and
11832
             the high order N bits of FOO (N+1 if an inequality comparison)
11833
             are known to be zero, we can do this by comparing FOO with C
11834
             shifted right N bits so long as the low-order N bits of C are
11835
             zero.  */
11836
          if (CONST_INT_P (XEXP (op0, 1))
11837
              && INTVAL (XEXP (op0, 1)) >= 0
11838
              && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
11839
                  < HOST_BITS_PER_WIDE_INT)
11840
              && (((unsigned HOST_WIDE_INT) const_op
11841
                   & (((unsigned HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1)))
11842
                      - 1)) == 0)
11843
              && mode_width <= HOST_BITS_PER_WIDE_INT
11844
              && (nonzero_bits (XEXP (op0, 0), mode)
11845
                  & ~(mask >> (INTVAL (XEXP (op0, 1))
11846
                               + ! equality_comparison_p))) == 0)
11847
            {
11848
              /* We must perform a logical shift, not an arithmetic one,
11849
                 as we want the top N bits of C to be zero.  */
11850
              unsigned HOST_WIDE_INT temp = const_op & GET_MODE_MASK (mode);
11851
 
11852
              temp >>= INTVAL (XEXP (op0, 1));
11853
              op1 = gen_int_mode (temp, mode);
11854
              op0 = XEXP (op0, 0);
11855
              continue;
11856
            }
11857
 
11858
          /* If we are doing a sign bit comparison, it means we are testing
11859
             a particular bit.  Convert it to the appropriate AND.  */
11860
          if (sign_bit_comparison_p && CONST_INT_P (XEXP (op0, 1))
11861
              && mode_width <= HOST_BITS_PER_WIDE_INT)
11862
            {
11863
              op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
11864
                                            ((unsigned HOST_WIDE_INT) 1
11865
                                             << (mode_width - 1
11866
                                                 - INTVAL (XEXP (op0, 1)))));
11867
              code = (code == LT ? NE : EQ);
11868
              continue;
11869
            }
11870
 
11871
          /* If this an equality comparison with zero and we are shifting
11872
             the low bit to the sign bit, we can convert this to an AND of the
11873
             low-order bit.  */
11874
          if (const_op == 0 && equality_comparison_p
11875
              && CONST_INT_P (XEXP (op0, 1))
11876
              && UINTVAL (XEXP (op0, 1)) == mode_width - 1)
11877
            {
11878
              op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0), 1);
11879
              continue;
11880
            }
11881
          break;
11882
 
11883
        case ASHIFTRT:
11884
          /* If this is an equality comparison with zero, we can do this
11885
             as a logical shift, which might be much simpler.  */
11886
          if (equality_comparison_p && const_op == 0
11887
              && CONST_INT_P (XEXP (op0, 1)))
11888
            {
11889
              op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
11890
                                          XEXP (op0, 0),
11891
                                          INTVAL (XEXP (op0, 1)));
11892
              continue;
11893
            }
11894
 
11895
          /* If OP0 is a sign extension and CODE is not an unsigned comparison,
11896
             do the comparison in a narrower mode.  */
11897
          if (! unsigned_comparison_p
11898
              && CONST_INT_P (XEXP (op0, 1))
11899
              && GET_CODE (XEXP (op0, 0)) == ASHIFT
11900
              && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
11901
              && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
11902
                                         MODE_INT, 1)) != BLKmode
11903
              && (((unsigned HOST_WIDE_INT) const_op
11904
                   + (GET_MODE_MASK (tmode) >> 1) + 1)
11905
                  <= GET_MODE_MASK (tmode)))
11906
            {
11907
              op0 = gen_lowpart (tmode, XEXP (XEXP (op0, 0), 0));
11908
              continue;
11909
            }
11910
 
11911
          /* Likewise if OP0 is a PLUS of a sign extension with a
11912
             constant, which is usually represented with the PLUS
11913
             between the shifts.  */
11914
          if (! unsigned_comparison_p
11915
              && CONST_INT_P (XEXP (op0, 1))
11916
              && GET_CODE (XEXP (op0, 0)) == PLUS
11917
              && CONST_INT_P (XEXP (XEXP (op0, 0), 1))
11918
              && GET_CODE (XEXP (XEXP (op0, 0), 0)) == ASHIFT
11919
              && XEXP (op0, 1) == XEXP (XEXP (XEXP (op0, 0), 0), 1)
11920
              && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
11921
                                         MODE_INT, 1)) != BLKmode
11922
              && (((unsigned HOST_WIDE_INT) const_op
11923
                   + (GET_MODE_MASK (tmode) >> 1) + 1)
11924
                  <= GET_MODE_MASK (tmode)))
11925
            {
11926
              rtx inner = XEXP (XEXP (XEXP (op0, 0), 0), 0);
11927
              rtx add_const = XEXP (XEXP (op0, 0), 1);
11928
              rtx new_const = simplify_gen_binary (ASHIFTRT, GET_MODE (op0),
11929
                                                   add_const, XEXP (op0, 1));
11930
 
11931
              op0 = simplify_gen_binary (PLUS, tmode,
11932
                                         gen_lowpart (tmode, inner),
11933
                                         new_const);
11934
              continue;
11935
            }
11936
 
11937
          /* ... fall through ...  */
11938
        case LSHIFTRT:
11939
          /* If we have (compare (xshiftrt FOO N) (const_int C)) and
11940
             the low order N bits of FOO are known to be zero, we can do this
11941
             by comparing FOO with C shifted left N bits so long as no
11942
             overflow occurs.  Even if the low order N bits of FOO aren't known
11943
             to be zero, if the comparison is >= or < we can use the same
11944
             optimization and for > or <= by setting all the low
11945
             order N bits in the comparison constant.  */
11946
          if (CONST_INT_P (XEXP (op0, 1))
11947
              && INTVAL (XEXP (op0, 1)) > 0
11948
              && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
11949
              && mode_width <= HOST_BITS_PER_WIDE_INT
11950
              && (((unsigned HOST_WIDE_INT) const_op
11951
                   + (GET_CODE (op0) != LSHIFTRT
11952
                      ? ((GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1)) >> 1)
11953
                         + 1)
11954
                      : 0))
11955
                  <= GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1))))
11956
            {
11957
              unsigned HOST_WIDE_INT low_bits
11958
                = (nonzero_bits (XEXP (op0, 0), mode)
11959
                   & (((unsigned HOST_WIDE_INT) 1
11960
                       << INTVAL (XEXP (op0, 1))) - 1));
11961
              if (low_bits == 0 || !equality_comparison_p)
11962
                {
11963
                  /* If the shift was logical, then we must make the condition
11964
                     unsigned.  */
11965
                  if (GET_CODE (op0) == LSHIFTRT)
11966
                    code = unsigned_condition (code);
11967
 
11968
                  const_op <<= INTVAL (XEXP (op0, 1));
11969
                  if (low_bits != 0
11970
                      && (code == GT || code == GTU
11971
                          || code == LE || code == LEU))
11972
                    const_op
11973
                      |= (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1);
11974
                  op1 = GEN_INT (const_op);
11975
                  op0 = XEXP (op0, 0);
11976
                  continue;
11977
                }
11978
            }
11979
 
11980
          /* If we are using this shift to extract just the sign bit, we
11981
             can replace this with an LT or GE comparison.  */
11982
          if (const_op == 0
11983
              && (equality_comparison_p || sign_bit_comparison_p)
11984
              && CONST_INT_P (XEXP (op0, 1))
11985
              && UINTVAL (XEXP (op0, 1)) == mode_width - 1)
11986
            {
11987
              op0 = XEXP (op0, 0);
11988
              code = (code == NE || code == GT ? LT : GE);
11989
              continue;
11990
            }
11991
          break;
11992
 
11993
        default:
11994
          break;
11995
        }
11996
 
11997
      break;
11998
    }
11999
 
12000
  /* Now make any compound operations involved in this comparison.  Then,
12001
     check for an outmost SUBREG on OP0 that is not doing anything or is
12002
     paradoxical.  The latter transformation must only be performed when
12003
     it is known that the "extra" bits will be the same in op0 and op1 or
12004
     that they don't matter.  There are three cases to consider:
12005
 
12006
     1. SUBREG_REG (op0) is a register.  In this case the bits are don't
12007
     care bits and we can assume they have any convenient value.  So
12008
     making the transformation is safe.
12009
 
12010
     2. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is not defined.
12011
     In this case the upper bits of op0 are undefined.  We should not make
12012
     the simplification in that case as we do not know the contents of
12013
     those bits.
12014
 
12015
     3. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is defined and not
12016
     UNKNOWN.  In that case we know those bits are zeros or ones.  We must
12017
     also be sure that they are the same as the upper bits of op1.
12018
 
12019
     We can never remove a SUBREG for a non-equality comparison because
12020
     the sign bit is in a different place in the underlying object.  */
12021
 
12022
  op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
12023
  op1 = make_compound_operation (op1, SET);
12024
 
12025
  if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
12026
      && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
12027
      && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op0))) == MODE_INT
12028
      && (code == NE || code == EQ))
12029
    {
12030
      if (paradoxical_subreg_p (op0))
12031
        {
12032
          /* For paradoxical subregs, allow case 1 as above.  Case 3 isn't
12033
             implemented.  */
12034
          if (REG_P (SUBREG_REG (op0)))
12035
            {
12036
              op0 = SUBREG_REG (op0);
12037
              op1 = gen_lowpart (GET_MODE (op0), op1);
12038
            }
12039
        }
12040
      else if ((GET_MODE_PRECISION (GET_MODE (SUBREG_REG (op0)))
12041
                <= HOST_BITS_PER_WIDE_INT)
12042
               && (nonzero_bits (SUBREG_REG (op0),
12043
                                 GET_MODE (SUBREG_REG (op0)))
12044
                   & ~GET_MODE_MASK (GET_MODE (op0))) == 0)
12045
        {
12046
          tem = gen_lowpart (GET_MODE (SUBREG_REG (op0)), op1);
12047
 
12048
          if ((nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
12049
               & ~GET_MODE_MASK (GET_MODE (op0))) == 0)
12050
            op0 = SUBREG_REG (op0), op1 = tem;
12051
        }
12052
    }
12053
 
12054
  /* We now do the opposite procedure: Some machines don't have compare
12055
     insns in all modes.  If OP0's mode is an integer mode smaller than a
12056
     word and we can't do a compare in that mode, see if there is a larger
12057
     mode for which we can do the compare.  There are a number of cases in
12058
     which we can use the wider mode.  */
12059
 
12060
  mode = GET_MODE (op0);
12061
  if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
12062
      && GET_MODE_SIZE (mode) < UNITS_PER_WORD
12063
      && ! have_insn_for (COMPARE, mode))
12064
    for (tmode = GET_MODE_WIDER_MODE (mode);
12065
         (tmode != VOIDmode && HWI_COMPUTABLE_MODE_P (tmode));
12066
         tmode = GET_MODE_WIDER_MODE (tmode))
12067
      if (have_insn_for (COMPARE, tmode))
12068
        {
12069
          int zero_extended;
12070
 
12071
          /* If this is a test for negative, we can make an explicit
12072
             test of the sign bit.  Test this first so we can use
12073
             a paradoxical subreg to extend OP0.  */
12074
 
12075
          if (op1 == const0_rtx && (code == LT || code == GE)
12076
              && HWI_COMPUTABLE_MODE_P (mode))
12077
            {
12078
              op0 = simplify_gen_binary (AND, tmode,
12079
                                         gen_lowpart (tmode, op0),
12080
                                         GEN_INT ((unsigned HOST_WIDE_INT) 1
12081
                                                  << (GET_MODE_BITSIZE (mode)
12082
                                                      - 1)));
12083
              code = (code == LT) ? NE : EQ;
12084
              break;
12085
            }
12086
 
12087
          /* If the only nonzero bits in OP0 and OP1 are those in the
12088
             narrower mode and this is an equality or unsigned comparison,
12089
             we can use the wider mode.  Similarly for sign-extended
12090
             values, in which case it is true for all comparisons.  */
12091
          zero_extended = ((code == EQ || code == NE
12092
                            || code == GEU || code == GTU
12093
                            || code == LEU || code == LTU)
12094
                           && (nonzero_bits (op0, tmode)
12095
                               & ~GET_MODE_MASK (mode)) == 0
12096
                           && ((CONST_INT_P (op1)
12097
                                || (nonzero_bits (op1, tmode)
12098
                                    & ~GET_MODE_MASK (mode)) == 0)));
12099
 
12100
          if (zero_extended
12101
              || ((num_sign_bit_copies (op0, tmode)
12102
                   > (unsigned int) (GET_MODE_PRECISION (tmode)
12103
                                     - GET_MODE_PRECISION (mode)))
12104
                  && (num_sign_bit_copies (op1, tmode)
12105
                      > (unsigned int) (GET_MODE_PRECISION (tmode)
12106
                                        - GET_MODE_PRECISION (mode)))))
12107
            {
12108
              /* If OP0 is an AND and we don't have an AND in MODE either,
12109
                 make a new AND in the proper mode.  */
12110
              if (GET_CODE (op0) == AND
12111
                  && !have_insn_for (AND, mode))
12112
                op0 = simplify_gen_binary (AND, tmode,
12113
                                           gen_lowpart (tmode,
12114
                                                        XEXP (op0, 0)),
12115
                                           gen_lowpart (tmode,
12116
                                                        XEXP (op0, 1)));
12117
              else
12118
                {
12119
                  if (zero_extended)
12120
                    {
12121
                      op0 = simplify_gen_unary (ZERO_EXTEND, tmode, op0, mode);
12122
                      op1 = simplify_gen_unary (ZERO_EXTEND, tmode, op1, mode);
12123
                    }
12124
                  else
12125
                    {
12126
                      op0 = simplify_gen_unary (SIGN_EXTEND, tmode, op0, mode);
12127
                      op1 = simplify_gen_unary (SIGN_EXTEND, tmode, op1, mode);
12128
                    }
12129
                  break;
12130
                }
12131
            }
12132
        }
12133
 
12134
#ifdef CANONICALIZE_COMPARISON
12135
  /* If this machine only supports a subset of valid comparisons, see if we
12136
     can convert an unsupported one into a supported one.  */
12137
  CANONICALIZE_COMPARISON (code, op0, op1);
12138
#endif
12139
 
12140
  *pop0 = op0;
12141
  *pop1 = op1;
12142
 
12143
  return code;
12144
}
12145
 
12146
/* Utility function for record_value_for_reg.  Count number of
12147
   rtxs in X.  */
12148
static int
12149
count_rtxs (rtx x)
12150
{
12151
  enum rtx_code code = GET_CODE (x);
12152
  const char *fmt;
12153
  int i, j, ret = 1;
12154
 
12155
  if (GET_RTX_CLASS (code) == '2'
12156
      || GET_RTX_CLASS (code) == 'c')
12157
    {
12158
      rtx x0 = XEXP (x, 0);
12159
      rtx x1 = XEXP (x, 1);
12160
 
12161
      if (x0 == x1)
12162
        return 1 + 2 * count_rtxs (x0);
12163
 
12164
      if ((GET_RTX_CLASS (GET_CODE (x1)) == '2'
12165
           || GET_RTX_CLASS (GET_CODE (x1)) == 'c')
12166
          && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
12167
        return 2 + 2 * count_rtxs (x0)
12168
               + count_rtxs (x == XEXP (x1, 0)
12169
                             ? XEXP (x1, 1) : XEXP (x1, 0));
12170
 
12171
      if ((GET_RTX_CLASS (GET_CODE (x0)) == '2'
12172
           || GET_RTX_CLASS (GET_CODE (x0)) == 'c')
12173
          && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
12174
        return 2 + 2 * count_rtxs (x1)
12175
               + count_rtxs (x == XEXP (x0, 0)
12176
                             ? XEXP (x0, 1) : XEXP (x0, 0));
12177
    }
12178
 
12179
  fmt = GET_RTX_FORMAT (code);
12180
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
12181
    if (fmt[i] == 'e')
12182
      ret += count_rtxs (XEXP (x, i));
12183
    else if (fmt[i] == 'E')
12184
      for (j = 0; j < XVECLEN (x, i); j++)
12185
        ret += count_rtxs (XVECEXP (x, i, j));
12186
 
12187
  return ret;
12188
}
12189
 
12190
/* Utility function for following routine.  Called when X is part of a value
12191
   being stored into last_set_value.  Sets last_set_table_tick
12192
   for each register mentioned.  Similar to mention_regs in cse.c  */
12193
 
12194
static void
12195
update_table_tick (rtx x)
12196
{
12197
  enum rtx_code code = GET_CODE (x);
12198
  const char *fmt = GET_RTX_FORMAT (code);
12199
  int i, j;
12200
 
12201
  if (code == REG)
12202
    {
12203
      unsigned int regno = REGNO (x);
12204
      unsigned int endregno = END_REGNO (x);
12205
      unsigned int r;
12206
 
12207
      for (r = regno; r < endregno; r++)
12208
        {
12209
          reg_stat_type *rsp = VEC_index (reg_stat_type, reg_stat, r);
12210
          rsp->last_set_table_tick = label_tick;
12211
        }
12212
 
12213
      return;
12214
    }
12215
 
12216
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
12217
    if (fmt[i] == 'e')
12218
      {
12219
        /* Check for identical subexpressions.  If x contains
12220
           identical subexpression we only have to traverse one of
12221
           them.  */
12222
        if (i == 0 && ARITHMETIC_P (x))
12223
          {
12224
            /* Note that at this point x1 has already been
12225
               processed.  */
12226
            rtx x0 = XEXP (x, 0);
12227
            rtx x1 = XEXP (x, 1);
12228
 
12229
            /* If x0 and x1 are identical then there is no need to
12230
               process x0.  */
12231
            if (x0 == x1)
12232
              break;
12233
 
12234
            /* If x0 is identical to a subexpression of x1 then while
12235
               processing x1, x0 has already been processed.  Thus we
12236
               are done with x.  */
12237
            if (ARITHMETIC_P (x1)
12238
                && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
12239
              break;
12240
 
12241
            /* If x1 is identical to a subexpression of x0 then we
12242
               still have to process the rest of x0.  */
12243
            if (ARITHMETIC_P (x0)
12244
                && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
12245
              {
12246
                update_table_tick (XEXP (x0, x1 == XEXP (x0, 0) ? 1 : 0));
12247
                break;
12248
              }
12249
          }
12250
 
12251
        update_table_tick (XEXP (x, i));
12252
      }
12253
    else if (fmt[i] == 'E')
12254
      for (j = 0; j < XVECLEN (x, i); j++)
12255
        update_table_tick (XVECEXP (x, i, j));
12256
}
12257
 
12258
/* Record that REG is set to VALUE in insn INSN.  If VALUE is zero, we
12259
   are saying that the register is clobbered and we no longer know its
12260
   value.  If INSN is zero, don't update reg_stat[].last_set; this is
12261
   only permitted with VALUE also zero and is used to invalidate the
12262
   register.  */
12263
 
12264
static void
12265
record_value_for_reg (rtx reg, rtx insn, rtx value)
12266
{
12267
  unsigned int regno = REGNO (reg);
12268
  unsigned int endregno = END_REGNO (reg);
12269
  unsigned int i;
12270
  reg_stat_type *rsp;
12271
 
12272
  /* If VALUE contains REG and we have a previous value for REG, substitute
12273
     the previous value.  */
12274
  if (value && insn && reg_overlap_mentioned_p (reg, value))
12275
    {
12276
      rtx tem;
12277
 
12278
      /* Set things up so get_last_value is allowed to see anything set up to
12279
         our insn.  */
12280
      subst_low_luid = DF_INSN_LUID (insn);
12281
      tem = get_last_value (reg);
12282
 
12283
      /* If TEM is simply a binary operation with two CLOBBERs as operands,
12284
         it isn't going to be useful and will take a lot of time to process,
12285
         so just use the CLOBBER.  */
12286
 
12287
      if (tem)
12288
        {
12289
          if (ARITHMETIC_P (tem)
12290
              && GET_CODE (XEXP (tem, 0)) == CLOBBER
12291
              && GET_CODE (XEXP (tem, 1)) == CLOBBER)
12292
            tem = XEXP (tem, 0);
12293
          else if (count_occurrences (value, reg, 1) >= 2)
12294
            {
12295
              /* If there are two or more occurrences of REG in VALUE,
12296
                 prevent the value from growing too much.  */
12297
              if (count_rtxs (tem) > MAX_LAST_VALUE_RTL)
12298
                tem = gen_rtx_CLOBBER (GET_MODE (tem), const0_rtx);
12299
            }
12300
 
12301
          value = replace_rtx (copy_rtx (value), reg, tem);
12302
        }
12303
    }
12304
 
12305
  /* For each register modified, show we don't know its value, that
12306
     we don't know about its bitwise content, that its value has been
12307
     updated, and that we don't know the location of the death of the
12308
     register.  */
12309
  for (i = regno; i < endregno; i++)
12310
    {
12311
      rsp = VEC_index (reg_stat_type, reg_stat, i);
12312
 
12313
      if (insn)
12314
        rsp->last_set = insn;
12315
 
12316
      rsp->last_set_value = 0;
12317
      rsp->last_set_mode = VOIDmode;
12318
      rsp->last_set_nonzero_bits = 0;
12319
      rsp->last_set_sign_bit_copies = 0;
12320
      rsp->last_death = 0;
12321
      rsp->truncated_to_mode = VOIDmode;
12322
    }
12323
 
12324
  /* Mark registers that are being referenced in this value.  */
12325
  if (value)
12326
    update_table_tick (value);
12327
 
12328
  /* Now update the status of each register being set.
12329
     If someone is using this register in this block, set this register
12330
     to invalid since we will get confused between the two lives in this
12331
     basic block.  This makes using this register always invalid.  In cse, we
12332
     scan the table to invalidate all entries using this register, but this
12333
     is too much work for us.  */
12334
 
12335
  for (i = regno; i < endregno; i++)
12336
    {
12337
      rsp = VEC_index (reg_stat_type, reg_stat, i);
12338
      rsp->last_set_label = label_tick;
12339
      if (!insn
12340
          || (value && rsp->last_set_table_tick >= label_tick_ebb_start))
12341
        rsp->last_set_invalid = 1;
12342
      else
12343
        rsp->last_set_invalid = 0;
12344
    }
12345
 
12346
  /* The value being assigned might refer to X (like in "x++;").  In that
12347
     case, we must replace it with (clobber (const_int 0)) to prevent
12348
     infinite loops.  */
12349
  rsp = VEC_index (reg_stat_type, reg_stat, regno);
12350
  if (value && !get_last_value_validate (&value, insn, label_tick, 0))
12351
    {
12352
      value = copy_rtx (value);
12353
      if (!get_last_value_validate (&value, insn, label_tick, 1))
12354
        value = 0;
12355
    }
12356
 
12357
  /* For the main register being modified, update the value, the mode, the
12358
     nonzero bits, and the number of sign bit copies.  */
12359
 
12360
  rsp->last_set_value = value;
12361
 
12362
  if (value)
12363
    {
12364
      enum machine_mode mode = GET_MODE (reg);
12365
      subst_low_luid = DF_INSN_LUID (insn);
12366
      rsp->last_set_mode = mode;
12367
      if (GET_MODE_CLASS (mode) == MODE_INT
12368
          && HWI_COMPUTABLE_MODE_P (mode))
12369
        mode = nonzero_bits_mode;
12370
      rsp->last_set_nonzero_bits = nonzero_bits (value, mode);
12371
      rsp->last_set_sign_bit_copies
12372
        = num_sign_bit_copies (value, GET_MODE (reg));
12373
    }
12374
}
12375
 
12376
/* Called via note_stores from record_dead_and_set_regs to handle one
12377
   SET or CLOBBER in an insn.  DATA is the instruction in which the
12378
   set is occurring.  */
12379
 
12380
static void
12381
record_dead_and_set_regs_1 (rtx dest, const_rtx setter, void *data)
12382
{
12383
  rtx record_dead_insn = (rtx) data;
12384
 
12385
  if (GET_CODE (dest) == SUBREG)
12386
    dest = SUBREG_REG (dest);
12387
 
12388
  if (!record_dead_insn)
12389
    {
12390
      if (REG_P (dest))
12391
        record_value_for_reg (dest, NULL_RTX, NULL_RTX);
12392
      return;
12393
    }
12394
 
12395
  if (REG_P (dest))
12396
    {
12397
      /* If we are setting the whole register, we know its value.  Otherwise
12398
         show that we don't know the value.  We can handle SUBREG in
12399
         some cases.  */
12400
      if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
12401
        record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
12402
      else if (GET_CODE (setter) == SET
12403
               && GET_CODE (SET_DEST (setter)) == SUBREG
12404
               && SUBREG_REG (SET_DEST (setter)) == dest
12405
               && GET_MODE_PRECISION (GET_MODE (dest)) <= BITS_PER_WORD
12406
               && subreg_lowpart_p (SET_DEST (setter)))
12407
        record_value_for_reg (dest, record_dead_insn,
12408
                              gen_lowpart (GET_MODE (dest),
12409
                                                       SET_SRC (setter)));
12410
      else
12411
        record_value_for_reg (dest, record_dead_insn, NULL_RTX);
12412
    }
12413
  else if (MEM_P (dest)
12414
           /* Ignore pushes, they clobber nothing.  */
12415
           && ! push_operand (dest, GET_MODE (dest)))
12416
    mem_last_set = DF_INSN_LUID (record_dead_insn);
12417
}
12418
 
12419
/* Update the records of when each REG was most recently set or killed
12420
   for the things done by INSN.  This is the last thing done in processing
12421
   INSN in the combiner loop.
12422
 
12423
   We update reg_stat[], in particular fields last_set, last_set_value,
12424
   last_set_mode, last_set_nonzero_bits, last_set_sign_bit_copies,
12425
   last_death, and also the similar information mem_last_set (which insn
12426
   most recently modified memory) and last_call_luid (which insn was the
12427
   most recent subroutine call).  */
12428
 
12429
static void
12430
record_dead_and_set_regs (rtx insn)
12431
{
12432
  rtx link;
12433
  unsigned int i;
12434
 
12435
  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
12436
    {
12437
      if (REG_NOTE_KIND (link) == REG_DEAD
12438
          && REG_P (XEXP (link, 0)))
12439
        {
12440
          unsigned int regno = REGNO (XEXP (link, 0));
12441
          unsigned int endregno = END_REGNO (XEXP (link, 0));
12442
 
12443
          for (i = regno; i < endregno; i++)
12444
            {
12445
              reg_stat_type *rsp;
12446
 
12447
              rsp = VEC_index (reg_stat_type, reg_stat, i);
12448
              rsp->last_death = insn;
12449
            }
12450
        }
12451
      else if (REG_NOTE_KIND (link) == REG_INC)
12452
        record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
12453
    }
12454
 
12455
  if (CALL_P (insn))
12456
    {
12457
      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
12458
        if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
12459
          {
12460
            reg_stat_type *rsp;
12461
 
12462
            rsp = VEC_index (reg_stat_type, reg_stat, i);
12463
            rsp->last_set_invalid = 1;
12464
            rsp->last_set = insn;
12465
            rsp->last_set_value = 0;
12466
            rsp->last_set_mode = VOIDmode;
12467
            rsp->last_set_nonzero_bits = 0;
12468
            rsp->last_set_sign_bit_copies = 0;
12469
            rsp->last_death = 0;
12470
            rsp->truncated_to_mode = VOIDmode;
12471
          }
12472
 
12473
      last_call_luid = mem_last_set = DF_INSN_LUID (insn);
12474
 
12475
      /* We can't combine into a call pattern.  Remember, though, that
12476
         the return value register is set at this LUID.  We could
12477
         still replace a register with the return value from the
12478
         wrong subroutine call!  */
12479
      note_stores (PATTERN (insn), record_dead_and_set_regs_1, NULL_RTX);
12480
    }
12481
  else
12482
    note_stores (PATTERN (insn), record_dead_and_set_regs_1, insn);
12483
}
12484
 
12485
/* If a SUBREG has the promoted bit set, it is in fact a property of the
12486
   register present in the SUBREG, so for each such SUBREG go back and
12487
   adjust nonzero and sign bit information of the registers that are
12488
   known to have some zero/sign bits set.
12489
 
12490
   This is needed because when combine blows the SUBREGs away, the
12491
   information on zero/sign bits is lost and further combines can be
12492
   missed because of that.  */
12493
 
12494
static void
12495
record_promoted_value (rtx insn, rtx subreg)
12496
{
12497
  struct insn_link *links;
12498
  rtx set;
12499
  unsigned int regno = REGNO (SUBREG_REG (subreg));
12500
  enum machine_mode mode = GET_MODE (subreg);
12501
 
12502
  if (GET_MODE_PRECISION (mode) > HOST_BITS_PER_WIDE_INT)
12503
    return;
12504
 
12505
  for (links = LOG_LINKS (insn); links;)
12506
    {
12507
      reg_stat_type *rsp;
12508
 
12509
      insn = links->insn;
12510
      set = single_set (insn);
12511
 
12512
      if (! set || !REG_P (SET_DEST (set))
12513
          || REGNO (SET_DEST (set)) != regno
12514
          || GET_MODE (SET_DEST (set)) != GET_MODE (SUBREG_REG (subreg)))
12515
        {
12516
          links = links->next;
12517
          continue;
12518
        }
12519
 
12520
      rsp = VEC_index (reg_stat_type, reg_stat, regno);
12521
      if (rsp->last_set == insn)
12522
        {
12523
          if (SUBREG_PROMOTED_UNSIGNED_P (subreg) > 0)
12524
            rsp->last_set_nonzero_bits &= GET_MODE_MASK (mode);
12525
        }
12526
 
12527
      if (REG_P (SET_SRC (set)))
12528
        {
12529
          regno = REGNO (SET_SRC (set));
12530
          links = LOG_LINKS (insn);
12531
        }
12532
      else
12533
        break;
12534
    }
12535
}
12536
 
12537
/* Check if X, a register, is known to contain a value already
12538
   truncated to MODE.  In this case we can use a subreg to refer to
12539
   the truncated value even though in the generic case we would need
12540
   an explicit truncation.  */
12541
 
12542
static bool
12543
reg_truncated_to_mode (enum machine_mode mode, const_rtx x)
12544
{
12545
  reg_stat_type *rsp = VEC_index (reg_stat_type, reg_stat, REGNO (x));
12546
  enum machine_mode truncated = rsp->truncated_to_mode;
12547
 
12548
  if (truncated == 0
12549
      || rsp->truncation_label < label_tick_ebb_start)
12550
    return false;
12551
  if (GET_MODE_SIZE (truncated) <= GET_MODE_SIZE (mode))
12552
    return true;
12553
  if (TRULY_NOOP_TRUNCATION_MODES_P (mode, truncated))
12554
    return true;
12555
  return false;
12556
}
12557
 
12558
/* Callback for for_each_rtx.  If *P is a hard reg or a subreg record the mode
12559
   that the register is accessed in.  For non-TRULY_NOOP_TRUNCATION targets we
12560
   might be able to turn a truncate into a subreg using this information.
12561
   Return -1 if traversing *P is complete or 0 otherwise.  */
12562
 
12563
static int
12564
record_truncated_value (rtx *p, void *data ATTRIBUTE_UNUSED)
12565
{
12566
  rtx x = *p;
12567
  enum machine_mode truncated_mode;
12568
  reg_stat_type *rsp;
12569
 
12570
  if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x)))
12571
    {
12572
      enum machine_mode original_mode = GET_MODE (SUBREG_REG (x));
12573
      truncated_mode = GET_MODE (x);
12574
 
12575
      if (GET_MODE_SIZE (original_mode) <= GET_MODE_SIZE (truncated_mode))
12576
        return -1;
12577
 
12578
      if (TRULY_NOOP_TRUNCATION_MODES_P (truncated_mode, original_mode))
12579
        return -1;
12580
 
12581
      x = SUBREG_REG (x);
12582
    }
12583
  /* ??? For hard-regs we now record everything.  We might be able to
12584
     optimize this using last_set_mode.  */
12585
  else if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
12586
    truncated_mode = GET_MODE (x);
12587
  else
12588
    return 0;
12589
 
12590
  rsp = VEC_index (reg_stat_type, reg_stat, REGNO (x));
12591
  if (rsp->truncated_to_mode == 0
12592
      || rsp->truncation_label < label_tick_ebb_start
12593
      || (GET_MODE_SIZE (truncated_mode)
12594
          < GET_MODE_SIZE (rsp->truncated_to_mode)))
12595
    {
12596
      rsp->truncated_to_mode = truncated_mode;
12597
      rsp->truncation_label = label_tick;
12598
    }
12599
 
12600
  return -1;
12601
}
12602
 
12603
/* Callback for note_uses.  Find hardregs and subregs of pseudos and
12604
   the modes they are used in.  This can help truning TRUNCATEs into
12605
   SUBREGs.  */
12606
 
12607
static void
12608
record_truncated_values (rtx *x, void *data ATTRIBUTE_UNUSED)
12609
{
12610
  for_each_rtx (x, record_truncated_value, NULL);
12611
}
12612
 
12613
/* Scan X for promoted SUBREGs.  For each one found,
12614
   note what it implies to the registers used in it.  */
12615
 
12616
static void
12617
check_promoted_subreg (rtx insn, rtx x)
12618
{
12619
  if (GET_CODE (x) == SUBREG
12620
      && SUBREG_PROMOTED_VAR_P (x)
12621
      && REG_P (SUBREG_REG (x)))
12622
    record_promoted_value (insn, x);
12623
  else
12624
    {
12625
      const char *format = GET_RTX_FORMAT (GET_CODE (x));
12626
      int i, j;
12627
 
12628
      for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
12629
        switch (format[i])
12630
          {
12631
          case 'e':
12632
            check_promoted_subreg (insn, XEXP (x, i));
12633
            break;
12634
          case 'V':
12635
          case 'E':
12636
            if (XVEC (x, i) != 0)
12637
              for (j = 0; j < XVECLEN (x, i); j++)
12638
                check_promoted_subreg (insn, XVECEXP (x, i, j));
12639
            break;
12640
          }
12641
    }
12642
}
12643
 
12644
/* Verify that all the registers and memory references mentioned in *LOC are
12645
   still valid.  *LOC was part of a value set in INSN when label_tick was
12646
   equal to TICK.  Return 0 if some are not.  If REPLACE is nonzero, replace
12647
   the invalid references with (clobber (const_int 0)) and return 1.  This
12648
   replacement is useful because we often can get useful information about
12649
   the form of a value (e.g., if it was produced by a shift that always
12650
   produces -1 or 0) even though we don't know exactly what registers it
12651
   was produced from.  */
12652
 
12653
static int
12654
get_last_value_validate (rtx *loc, rtx insn, int tick, int replace)
12655
{
12656
  rtx x = *loc;
12657
  const char *fmt = GET_RTX_FORMAT (GET_CODE (x));
12658
  int len = GET_RTX_LENGTH (GET_CODE (x));
12659
  int i, j;
12660
 
12661
  if (REG_P (x))
12662
    {
12663
      unsigned int regno = REGNO (x);
12664
      unsigned int endregno = END_REGNO (x);
12665
      unsigned int j;
12666
 
12667
      for (j = regno; j < endregno; j++)
12668
        {
12669
          reg_stat_type *rsp = VEC_index (reg_stat_type, reg_stat, j);
12670
          if (rsp->last_set_invalid
12671
              /* If this is a pseudo-register that was only set once and not
12672
                 live at the beginning of the function, it is always valid.  */
12673
              || (! (regno >= FIRST_PSEUDO_REGISTER
12674
                     && REG_N_SETS (regno) == 1
12675
                     && (!REGNO_REG_SET_P
12676
                         (DF_LR_IN (ENTRY_BLOCK_PTR->next_bb), regno)))
12677
                  && rsp->last_set_label > tick))
12678
          {
12679
            if (replace)
12680
              *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
12681
            return replace;
12682
          }
12683
        }
12684
 
12685
      return 1;
12686
    }
12687
  /* If this is a memory reference, make sure that there were no stores after
12688
     it that might have clobbered the value.  We don't have alias info, so we
12689
     assume any store invalidates it.  Moreover, we only have local UIDs, so
12690
     we also assume that there were stores in the intervening basic blocks.  */
12691
  else if (MEM_P (x) && !MEM_READONLY_P (x)
12692
           && (tick != label_tick || DF_INSN_LUID (insn) <= mem_last_set))
12693
    {
12694
      if (replace)
12695
        *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
12696
      return replace;
12697
    }
12698
 
12699
  for (i = 0; i < len; i++)
12700
    {
12701
      if (fmt[i] == 'e')
12702
        {
12703
          /* Check for identical subexpressions.  If x contains
12704
             identical subexpression we only have to traverse one of
12705
             them.  */
12706
          if (i == 1 && ARITHMETIC_P (x))
12707
            {
12708
              /* Note that at this point x0 has already been checked
12709
                 and found valid.  */
12710
              rtx x0 = XEXP (x, 0);
12711
              rtx x1 = XEXP (x, 1);
12712
 
12713
              /* If x0 and x1 are identical then x is also valid.  */
12714
              if (x0 == x1)
12715
                return 1;
12716
 
12717
              /* If x1 is identical to a subexpression of x0 then
12718
                 while checking x0, x1 has already been checked.  Thus
12719
                 it is valid and so as x.  */
12720
              if (ARITHMETIC_P (x0)
12721
                  && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
12722
                return 1;
12723
 
12724
              /* If x0 is identical to a subexpression of x1 then x is
12725
                 valid iff the rest of x1 is valid.  */
12726
              if (ARITHMETIC_P (x1)
12727
                  && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
12728
                return
12729
                  get_last_value_validate (&XEXP (x1,
12730
                                                  x0 == XEXP (x1, 0) ? 1 : 0),
12731
                                           insn, tick, replace);
12732
            }
12733
 
12734
          if (get_last_value_validate (&XEXP (x, i), insn, tick,
12735
                                       replace) == 0)
12736
            return 0;
12737
        }
12738
      else if (fmt[i] == 'E')
12739
        for (j = 0; j < XVECLEN (x, i); j++)
12740
          if (get_last_value_validate (&XVECEXP (x, i, j),
12741
                                       insn, tick, replace) == 0)
12742
            return 0;
12743
    }
12744
 
12745
  /* If we haven't found a reason for it to be invalid, it is valid.  */
12746
  return 1;
12747
}
12748
 
12749
/* Get the last value assigned to X, if known.  Some registers
12750
   in the value may be replaced with (clobber (const_int 0)) if their value
12751
   is known longer known reliably.  */
12752
 
12753
static rtx
12754
get_last_value (const_rtx x)
12755
{
12756
  unsigned int regno;
12757
  rtx value;
12758
  reg_stat_type *rsp;
12759
 
12760
  /* If this is a non-paradoxical SUBREG, get the value of its operand and
12761
     then convert it to the desired mode.  If this is a paradoxical SUBREG,
12762
     we cannot predict what values the "extra" bits might have.  */
12763
  if (GET_CODE (x) == SUBREG
12764
      && subreg_lowpart_p (x)
12765
      && !paradoxical_subreg_p (x)
12766
      && (value = get_last_value (SUBREG_REG (x))) != 0)
12767
    return gen_lowpart (GET_MODE (x), value);
12768
 
12769
  if (!REG_P (x))
12770
    return 0;
12771
 
12772
  regno = REGNO (x);
12773
  rsp = VEC_index (reg_stat_type, reg_stat, regno);
12774
  value = rsp->last_set_value;
12775
 
12776
  /* If we don't have a value, or if it isn't for this basic block and
12777
     it's either a hard register, set more than once, or it's a live
12778
     at the beginning of the function, return 0.
12779
 
12780
     Because if it's not live at the beginning of the function then the reg
12781
     is always set before being used (is never used without being set).
12782
     And, if it's set only once, and it's always set before use, then all
12783
     uses must have the same last value, even if it's not from this basic
12784
     block.  */
12785
 
12786
  if (value == 0
12787
      || (rsp->last_set_label < label_tick_ebb_start
12788
          && (regno < FIRST_PSEUDO_REGISTER
12789
              || REG_N_SETS (regno) != 1
12790
              || REGNO_REG_SET_P
12791
                 (DF_LR_IN (ENTRY_BLOCK_PTR->next_bb), regno))))
12792
    return 0;
12793
 
12794
  /* If the value was set in a later insn than the ones we are processing,
12795
     we can't use it even if the register was only set once.  */
12796
  if (rsp->last_set_label == label_tick
12797
      && DF_INSN_LUID (rsp->last_set) >= subst_low_luid)
12798
    return 0;
12799
 
12800
  /* If the value has all its registers valid, return it.  */
12801
  if (get_last_value_validate (&value, rsp->last_set, rsp->last_set_label, 0))
12802
    return value;
12803
 
12804
  /* Otherwise, make a copy and replace any invalid register with
12805
     (clobber (const_int 0)).  If that fails for some reason, return 0.  */
12806
 
12807
  value = copy_rtx (value);
12808
  if (get_last_value_validate (&value, rsp->last_set, rsp->last_set_label, 1))
12809
    return value;
12810
 
12811
  return 0;
12812
}
12813
 
12814
/* Return nonzero if expression X refers to a REG or to memory
12815
   that is set in an instruction more recent than FROM_LUID.  */
12816
 
12817
static int
12818
use_crosses_set_p (const_rtx x, int from_luid)
12819
{
12820
  const char *fmt;
12821
  int i;
12822
  enum rtx_code code = GET_CODE (x);
12823
 
12824
  if (code == REG)
12825
    {
12826
      unsigned int regno = REGNO (x);
12827
      unsigned endreg = END_REGNO (x);
12828
 
12829
#ifdef PUSH_ROUNDING
12830
      /* Don't allow uses of the stack pointer to be moved,
12831
         because we don't know whether the move crosses a push insn.  */
12832
      if (regno == STACK_POINTER_REGNUM && PUSH_ARGS)
12833
        return 1;
12834
#endif
12835
      for (; regno < endreg; regno++)
12836
        {
12837
          reg_stat_type *rsp = VEC_index (reg_stat_type, reg_stat, regno);
12838
          if (rsp->last_set
12839
              && rsp->last_set_label == label_tick
12840
              && DF_INSN_LUID (rsp->last_set) > from_luid)
12841
            return 1;
12842
        }
12843
      return 0;
12844
    }
12845
 
12846
  if (code == MEM && mem_last_set > from_luid)
12847
    return 1;
12848
 
12849
  fmt = GET_RTX_FORMAT (code);
12850
 
12851
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
12852
    {
12853
      if (fmt[i] == 'E')
12854
        {
12855
          int j;
12856
          for (j = XVECLEN (x, i) - 1; j >= 0; j--)
12857
            if (use_crosses_set_p (XVECEXP (x, i, j), from_luid))
12858
              return 1;
12859
        }
12860
      else if (fmt[i] == 'e'
12861
               && use_crosses_set_p (XEXP (x, i), from_luid))
12862
        return 1;
12863
    }
12864
  return 0;
12865
}
12866
 
12867
/* Define three variables used for communication between the following
12868
   routines.  */
12869
 
12870
static unsigned int reg_dead_regno, reg_dead_endregno;
12871
static int reg_dead_flag;
12872
 
12873
/* Function called via note_stores from reg_dead_at_p.
12874
 
12875
   If DEST is within [reg_dead_regno, reg_dead_endregno), set
12876
   reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET.  */
12877
 
12878
static void
12879
reg_dead_at_p_1 (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
12880
{
12881
  unsigned int regno, endregno;
12882
 
12883
  if (!REG_P (dest))
12884
    return;
12885
 
12886
  regno = REGNO (dest);
12887
  endregno = END_REGNO (dest);
12888
  if (reg_dead_endregno > regno && reg_dead_regno < endregno)
12889
    reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
12890
}
12891
 
12892
/* Return nonzero if REG is known to be dead at INSN.
12893
 
12894
   We scan backwards from INSN.  If we hit a REG_DEAD note or a CLOBBER
12895
   referencing REG, it is dead.  If we hit a SET referencing REG, it is
12896
   live.  Otherwise, see if it is live or dead at the start of the basic
12897
   block we are in.  Hard regs marked as being live in NEWPAT_USED_REGS
12898
   must be assumed to be always live.  */
12899
 
12900
static int
12901
reg_dead_at_p (rtx reg, rtx insn)
12902
{
12903
  basic_block block;
12904
  unsigned int i;
12905
 
12906
  /* Set variables for reg_dead_at_p_1.  */
12907
  reg_dead_regno = REGNO (reg);
12908
  reg_dead_endregno = END_REGNO (reg);
12909
 
12910
  reg_dead_flag = 0;
12911
 
12912
  /* Check that reg isn't mentioned in NEWPAT_USED_REGS.  For fixed registers
12913
     we allow the machine description to decide whether use-and-clobber
12914
     patterns are OK.  */
12915
  if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
12916
    {
12917
      for (i = reg_dead_regno; i < reg_dead_endregno; i++)
12918
        if (!fixed_regs[i] && TEST_HARD_REG_BIT (newpat_used_regs, i))
12919
          return 0;
12920
    }
12921
 
12922
  /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, or
12923
     beginning of basic block.  */
12924
  block = BLOCK_FOR_INSN (insn);
12925
  for (;;)
12926
    {
12927
      if (INSN_P (insn))
12928
        {
12929
          note_stores (PATTERN (insn), reg_dead_at_p_1, NULL);
12930
          if (reg_dead_flag)
12931
            return reg_dead_flag == 1 ? 1 : 0;
12932
 
12933
          if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
12934
            return 1;
12935
        }
12936
 
12937
      if (insn == BB_HEAD (block))
12938
        break;
12939
 
12940
      insn = PREV_INSN (insn);
12941
    }
12942
 
12943
  /* Look at live-in sets for the basic block that we were in.  */
12944
  for (i = reg_dead_regno; i < reg_dead_endregno; i++)
12945
    if (REGNO_REG_SET_P (df_get_live_in (block), i))
12946
      return 0;
12947
 
12948
  return 1;
12949
}
12950
 
12951
/* Note hard registers in X that are used.  */
12952
 
12953
static void
12954
mark_used_regs_combine (rtx x)
12955
{
12956
  RTX_CODE code = GET_CODE (x);
12957
  unsigned int regno;
12958
  int i;
12959
 
12960
  switch (code)
12961
    {
12962
    case LABEL_REF:
12963
    case SYMBOL_REF:
12964
    case CONST_INT:
12965
    case CONST:
12966
    case CONST_DOUBLE:
12967
    case CONST_VECTOR:
12968
    case PC:
12969
    case ADDR_VEC:
12970
    case ADDR_DIFF_VEC:
12971
    case ASM_INPUT:
12972
#ifdef HAVE_cc0
12973
    /* CC0 must die in the insn after it is set, so we don't need to take
12974
       special note of it here.  */
12975
    case CC0:
12976
#endif
12977
      return;
12978
 
12979
    case CLOBBER:
12980
      /* If we are clobbering a MEM, mark any hard registers inside the
12981
         address as used.  */
12982
      if (MEM_P (XEXP (x, 0)))
12983
        mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
12984
      return;
12985
 
12986
    case REG:
12987
      regno = REGNO (x);
12988
      /* A hard reg in a wide mode may really be multiple registers.
12989
         If so, mark all of them just like the first.  */
12990
      if (regno < FIRST_PSEUDO_REGISTER)
12991
        {
12992
          /* None of this applies to the stack, frame or arg pointers.  */
12993
          if (regno == STACK_POINTER_REGNUM
12994
#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
12995
              || regno == HARD_FRAME_POINTER_REGNUM
12996
#endif
12997
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
12998
              || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
12999
#endif
13000
              || regno == FRAME_POINTER_REGNUM)
13001
            return;
13002
 
13003
          add_to_hard_reg_set (&newpat_used_regs, GET_MODE (x), regno);
13004
        }
13005
      return;
13006
 
13007
    case SET:
13008
      {
13009
        /* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
13010
           the address.  */
13011
        rtx testreg = SET_DEST (x);
13012
 
13013
        while (GET_CODE (testreg) == SUBREG
13014
               || GET_CODE (testreg) == ZERO_EXTRACT
13015
               || GET_CODE (testreg) == STRICT_LOW_PART)
13016
          testreg = XEXP (testreg, 0);
13017
 
13018
        if (MEM_P (testreg))
13019
          mark_used_regs_combine (XEXP (testreg, 0));
13020
 
13021
        mark_used_regs_combine (SET_SRC (x));
13022
      }
13023
      return;
13024
 
13025
    default:
13026
      break;
13027
    }
13028
 
13029
  /* Recursively scan the operands of this expression.  */
13030
 
13031
  {
13032
    const char *fmt = GET_RTX_FORMAT (code);
13033
 
13034
    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
13035
      {
13036
        if (fmt[i] == 'e')
13037
          mark_used_regs_combine (XEXP (x, i));
13038
        else if (fmt[i] == 'E')
13039
          {
13040
            int j;
13041
 
13042
            for (j = 0; j < XVECLEN (x, i); j++)
13043
              mark_used_regs_combine (XVECEXP (x, i, j));
13044
          }
13045
      }
13046
  }
13047
}
13048
 
13049
/* Remove register number REGNO from the dead registers list of INSN.
13050
 
13051
   Return the note used to record the death, if there was one.  */
13052
 
13053
rtx
13054
remove_death (unsigned int regno, rtx insn)
13055
{
13056
  rtx note = find_regno_note (insn, REG_DEAD, regno);
13057
 
13058
  if (note)
13059
    remove_note (insn, note);
13060
 
13061
  return note;
13062
}
13063
 
13064
/* For each register (hardware or pseudo) used within expression X, if its
13065
   death is in an instruction with luid between FROM_LUID (inclusive) and
13066
   TO_INSN (exclusive), put a REG_DEAD note for that register in the
13067
   list headed by PNOTES.
13068
 
13069
   That said, don't move registers killed by maybe_kill_insn.
13070
 
13071
   This is done when X is being merged by combination into TO_INSN.  These
13072
   notes will then be distributed as needed.  */
13073
 
13074
static void
13075
move_deaths (rtx x, rtx maybe_kill_insn, int from_luid, rtx to_insn,
13076
             rtx *pnotes)
13077
{
13078
  const char *fmt;
13079
  int len, i;
13080
  enum rtx_code code = GET_CODE (x);
13081
 
13082
  if (code == REG)
13083
    {
13084
      unsigned int regno = REGNO (x);
13085
      rtx where_dead = VEC_index (reg_stat_type, reg_stat, regno)->last_death;
13086
 
13087
      /* Don't move the register if it gets killed in between from and to.  */
13088
      if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
13089
          && ! reg_referenced_p (x, maybe_kill_insn))
13090
        return;
13091
 
13092
      if (where_dead
13093
          && BLOCK_FOR_INSN (where_dead) == BLOCK_FOR_INSN (to_insn)
13094
          && DF_INSN_LUID (where_dead) >= from_luid
13095
          && DF_INSN_LUID (where_dead) < DF_INSN_LUID (to_insn))
13096
        {
13097
          rtx note = remove_death (regno, where_dead);
13098
 
13099
          /* It is possible for the call above to return 0.  This can occur
13100
             when last_death points to I2 or I1 that we combined with.
13101
             In that case make a new note.
13102
 
13103
             We must also check for the case where X is a hard register
13104
             and NOTE is a death note for a range of hard registers
13105
             including X.  In that case, we must put REG_DEAD notes for
13106
             the remaining registers in place of NOTE.  */
13107
 
13108
          if (note != 0 && regno < FIRST_PSEUDO_REGISTER
13109
              && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
13110
                  > GET_MODE_SIZE (GET_MODE (x))))
13111
            {
13112
              unsigned int deadregno = REGNO (XEXP (note, 0));
13113
              unsigned int deadend = END_HARD_REGNO (XEXP (note, 0));
13114
              unsigned int ourend = END_HARD_REGNO (x);
13115
              unsigned int i;
13116
 
13117
              for (i = deadregno; i < deadend; i++)
13118
                if (i < regno || i >= ourend)
13119
                  add_reg_note (where_dead, REG_DEAD, regno_reg_rtx[i]);
13120
            }
13121
 
13122
          /* If we didn't find any note, or if we found a REG_DEAD note that
13123
             covers only part of the given reg, and we have a multi-reg hard
13124
             register, then to be safe we must check for REG_DEAD notes
13125
             for each register other than the first.  They could have
13126
             their own REG_DEAD notes lying around.  */
13127
          else if ((note == 0
13128
                    || (note != 0
13129
                        && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
13130
                            < GET_MODE_SIZE (GET_MODE (x)))))
13131
                   && regno < FIRST_PSEUDO_REGISTER
13132
                   && hard_regno_nregs[regno][GET_MODE (x)] > 1)
13133
            {
13134
              unsigned int ourend = END_HARD_REGNO (x);
13135
              unsigned int i, offset;
13136
              rtx oldnotes = 0;
13137
 
13138
              if (note)
13139
                offset = hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))];
13140
              else
13141
                offset = 1;
13142
 
13143
              for (i = regno + offset; i < ourend; i++)
13144
                move_deaths (regno_reg_rtx[i],
13145
                             maybe_kill_insn, from_luid, to_insn, &oldnotes);
13146
            }
13147
 
13148
          if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
13149
            {
13150
              XEXP (note, 1) = *pnotes;
13151
              *pnotes = note;
13152
            }
13153
          else
13154
            *pnotes = alloc_reg_note (REG_DEAD, x, *pnotes);
13155
        }
13156
 
13157
      return;
13158
    }
13159
 
13160
  else if (GET_CODE (x) == SET)
13161
    {
13162
      rtx dest = SET_DEST (x);
13163
 
13164
      move_deaths (SET_SRC (x), maybe_kill_insn, from_luid, to_insn, pnotes);
13165
 
13166
      /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
13167
         that accesses one word of a multi-word item, some
13168
         piece of everything register in the expression is used by
13169
         this insn, so remove any old death.  */
13170
      /* ??? So why do we test for equality of the sizes?  */
13171
 
13172
      if (GET_CODE (dest) == ZERO_EXTRACT
13173
          || GET_CODE (dest) == STRICT_LOW_PART
13174
          || (GET_CODE (dest) == SUBREG
13175
              && (((GET_MODE_SIZE (GET_MODE (dest))
13176
                    + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
13177
                  == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
13178
                       + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
13179
        {
13180
          move_deaths (dest, maybe_kill_insn, from_luid, to_insn, pnotes);
13181
          return;
13182
        }
13183
 
13184
      /* If this is some other SUBREG, we know it replaces the entire
13185
         value, so use that as the destination.  */
13186
      if (GET_CODE (dest) == SUBREG)
13187
        dest = SUBREG_REG (dest);
13188
 
13189
      /* If this is a MEM, adjust deaths of anything used in the address.
13190
         For a REG (the only other possibility), the entire value is
13191
         being replaced so the old value is not used in this insn.  */
13192
 
13193
      if (MEM_P (dest))
13194
        move_deaths (XEXP (dest, 0), maybe_kill_insn, from_luid,
13195
                     to_insn, pnotes);
13196
      return;
13197
    }
13198
 
13199
  else if (GET_CODE (x) == CLOBBER)
13200
    return;
13201
 
13202
  len = GET_RTX_LENGTH (code);
13203
  fmt = GET_RTX_FORMAT (code);
13204
 
13205
  for (i = 0; i < len; i++)
13206
    {
13207
      if (fmt[i] == 'E')
13208
        {
13209
          int j;
13210
          for (j = XVECLEN (x, i) - 1; j >= 0; j--)
13211
            move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_luid,
13212
                         to_insn, pnotes);
13213
        }
13214
      else if (fmt[i] == 'e')
13215
        move_deaths (XEXP (x, i), maybe_kill_insn, from_luid, to_insn, pnotes);
13216
    }
13217
}
13218
 
13219
/* Return 1 if X is the target of a bit-field assignment in BODY, the
13220
   pattern of an insn.  X must be a REG.  */
13221
 
13222
static int
13223
reg_bitfield_target_p (rtx x, rtx body)
13224
{
13225
  int i;
13226
 
13227
  if (GET_CODE (body) == SET)
13228
    {
13229
      rtx dest = SET_DEST (body);
13230
      rtx target;
13231
      unsigned int regno, tregno, endregno, endtregno;
13232
 
13233
      if (GET_CODE (dest) == ZERO_EXTRACT)
13234
        target = XEXP (dest, 0);
13235
      else if (GET_CODE (dest) == STRICT_LOW_PART)
13236
        target = SUBREG_REG (XEXP (dest, 0));
13237
      else
13238
        return 0;
13239
 
13240
      if (GET_CODE (target) == SUBREG)
13241
        target = SUBREG_REG (target);
13242
 
13243
      if (!REG_P (target))
13244
        return 0;
13245
 
13246
      tregno = REGNO (target), regno = REGNO (x);
13247
      if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
13248
        return target == x;
13249
 
13250
      endtregno = end_hard_regno (GET_MODE (target), tregno);
13251
      endregno = end_hard_regno (GET_MODE (x), regno);
13252
 
13253
      return endregno > tregno && regno < endtregno;
13254
    }
13255
 
13256
  else if (GET_CODE (body) == PARALLEL)
13257
    for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
13258
      if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
13259
        return 1;
13260
 
13261
  return 0;
13262
}
13263
 
13264
/* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
13265
   as appropriate.  I3 and I2 are the insns resulting from the combination
13266
   insns including FROM (I2 may be zero).
13267
 
13268
   ELIM_I2 and ELIM_I1 are either zero or registers that we know will
13269
   not need REG_DEAD notes because they are being substituted for.  This
13270
   saves searching in the most common cases.
13271
 
13272
   Each note in the list is either ignored or placed on some insns, depending
13273
   on the type of note.  */
13274
 
13275
static void
13276
distribute_notes (rtx notes, rtx from_insn, rtx i3, rtx i2, rtx elim_i2,
13277
                  rtx elim_i1, rtx elim_i0)
13278
{
13279
  rtx note, next_note;
13280
  rtx tem;
13281
 
13282
  for (note = notes; note; note = next_note)
13283
    {
13284
      rtx place = 0, place2 = 0;
13285
 
13286
      next_note = XEXP (note, 1);
13287
      switch (REG_NOTE_KIND (note))
13288
        {
13289
        case REG_BR_PROB:
13290
        case REG_BR_PRED:
13291
          /* Doesn't matter much where we put this, as long as it's somewhere.
13292
             It is preferable to keep these notes on branches, which is most
13293
             likely to be i3.  */
13294
          place = i3;
13295
          break;
13296
 
13297
        case REG_NON_LOCAL_GOTO:
13298
          if (JUMP_P (i3))
13299
            place = i3;
13300
          else
13301
            {
13302
              gcc_assert (i2 && JUMP_P (i2));
13303
              place = i2;
13304
            }
13305
          break;
13306
 
13307
        case REG_EH_REGION:
13308
          /* These notes must remain with the call or trapping instruction.  */
13309
          if (CALL_P (i3))
13310
            place = i3;
13311
          else if (i2 && CALL_P (i2))
13312
            place = i2;
13313
          else
13314
            {
13315
              gcc_assert (cfun->can_throw_non_call_exceptions);
13316
              if (may_trap_p (i3))
13317
                place = i3;
13318
              else if (i2 && may_trap_p (i2))
13319
                place = i2;
13320
              /* ??? Otherwise assume we've combined things such that we
13321
                 can now prove that the instructions can't trap.  Drop the
13322
                 note in this case.  */
13323
            }
13324
          break;
13325
 
13326
        case REG_ARGS_SIZE:
13327
          /* ??? How to distribute between i3-i1.  Assume i3 contains the
13328
             entire adjustment.  Assert i3 contains at least some adjust.  */
13329
          if (!noop_move_p (i3))
13330
            {
13331
              int old_size, args_size = INTVAL (XEXP (note, 0));
13332
              /* fixup_args_size_notes looks at REG_NORETURN note,
13333
                 so ensure the note is placed there first.  */
13334
              if (CALL_P (i3))
13335
                {
13336
                  rtx *np;
13337
                  for (np = &next_note; *np; np = &XEXP (*np, 1))
13338
                    if (REG_NOTE_KIND (*np) == REG_NORETURN)
13339
                      {
13340
                        rtx n = *np;
13341
                        *np = XEXP (n, 1);
13342
                        XEXP (n, 1) = REG_NOTES (i3);
13343
                        REG_NOTES (i3) = n;
13344
                        break;
13345
                      }
13346
                }
13347
              old_size = fixup_args_size_notes (PREV_INSN (i3), i3, args_size);
13348
              /* emit_call_1 adds for !ACCUMULATE_OUTGOING_ARGS
13349
                 REG_ARGS_SIZE note to all noreturn calls, allow that here.  */
13350
              gcc_assert (old_size != args_size
13351
                          || (CALL_P (i3)
13352
                              && !ACCUMULATE_OUTGOING_ARGS
13353
                              && find_reg_note (i3, REG_NORETURN, NULL_RTX)));
13354
            }
13355
          break;
13356
 
13357
        case REG_NORETURN:
13358
        case REG_SETJMP:
13359
        case REG_TM:
13360
          /* These notes must remain with the call.  It should not be
13361
             possible for both I2 and I3 to be a call.  */
13362
          if (CALL_P (i3))
13363
            place = i3;
13364
          else
13365
            {
13366
              gcc_assert (i2 && CALL_P (i2));
13367
              place = i2;
13368
            }
13369
          break;
13370
 
13371
        case REG_UNUSED:
13372
          /* Any clobbers for i3 may still exist, and so we must process
13373
             REG_UNUSED notes from that insn.
13374
 
13375
             Any clobbers from i2 or i1 can only exist if they were added by
13376
             recog_for_combine.  In that case, recog_for_combine created the
13377
             necessary REG_UNUSED notes.  Trying to keep any original
13378
             REG_UNUSED notes from these insns can cause incorrect output
13379
             if it is for the same register as the original i3 dest.
13380
             In that case, we will notice that the register is set in i3,
13381
             and then add a REG_UNUSED note for the destination of i3, which
13382
             is wrong.  However, it is possible to have REG_UNUSED notes from
13383
             i2 or i1 for register which were both used and clobbered, so
13384
             we keep notes from i2 or i1 if they will turn into REG_DEAD
13385
             notes.  */
13386
 
13387
          /* If this register is set or clobbered in I3, put the note there
13388
             unless there is one already.  */
13389
          if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
13390
            {
13391
              if (from_insn != i3)
13392
                break;
13393
 
13394
              if (! (REG_P (XEXP (note, 0))
13395
                     ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
13396
                     : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
13397
                place = i3;
13398
            }
13399
          /* Otherwise, if this register is used by I3, then this register
13400
             now dies here, so we must put a REG_DEAD note here unless there
13401
             is one already.  */
13402
          else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
13403
                   && ! (REG_P (XEXP (note, 0))
13404
                         ? find_regno_note (i3, REG_DEAD,
13405
                                            REGNO (XEXP (note, 0)))
13406
                         : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
13407
            {
13408
              PUT_REG_NOTE_KIND (note, REG_DEAD);
13409
              place = i3;
13410
            }
13411
          break;
13412
 
13413
        case REG_EQUAL:
13414
        case REG_EQUIV:
13415
        case REG_NOALIAS:
13416
          /* These notes say something about results of an insn.  We can
13417
             only support them if they used to be on I3 in which case they
13418
             remain on I3.  Otherwise they are ignored.
13419
 
13420
             If the note refers to an expression that is not a constant, we
13421
             must also ignore the note since we cannot tell whether the
13422
             equivalence is still true.  It might be possible to do
13423
             slightly better than this (we only have a problem if I2DEST
13424
             or I1DEST is present in the expression), but it doesn't
13425
             seem worth the trouble.  */
13426
 
13427
          if (from_insn == i3
13428
              && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
13429
            place = i3;
13430
          break;
13431
 
13432
        case REG_INC:
13433
          /* These notes say something about how a register is used.  They must
13434
             be present on any use of the register in I2 or I3.  */
13435
          if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
13436
            place = i3;
13437
 
13438
          if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
13439
            {
13440
              if (place)
13441
                place2 = i2;
13442
              else
13443
                place = i2;
13444
            }
13445
          break;
13446
 
13447
        case REG_LABEL_TARGET:
13448
        case REG_LABEL_OPERAND:
13449
          /* This can show up in several ways -- either directly in the
13450
             pattern, or hidden off in the constant pool with (or without?)
13451
             a REG_EQUAL note.  */
13452
          /* ??? Ignore the without-reg_equal-note problem for now.  */
13453
          if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3))
13454
              || ((tem = find_reg_note (i3, REG_EQUAL, NULL_RTX))
13455
                  && GET_CODE (XEXP (tem, 0)) == LABEL_REF
13456
                  && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0)))
13457
            place = i3;
13458
 
13459
          if (i2
13460
              && (reg_mentioned_p (XEXP (note, 0), PATTERN (i2))
13461
                  || ((tem = find_reg_note (i2, REG_EQUAL, NULL_RTX))
13462
                      && GET_CODE (XEXP (tem, 0)) == LABEL_REF
13463
                      && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0))))
13464
            {
13465
              if (place)
13466
                place2 = i2;
13467
              else
13468
                place = i2;
13469
            }
13470
 
13471
          /* For REG_LABEL_TARGET on a JUMP_P, we prefer to put the note
13472
             as a JUMP_LABEL or decrement LABEL_NUSES if it's already
13473
             there.  */
13474
          if (place && JUMP_P (place)
13475
              && REG_NOTE_KIND (note) == REG_LABEL_TARGET
13476
              && (JUMP_LABEL (place) == NULL
13477
                  || JUMP_LABEL (place) == XEXP (note, 0)))
13478
            {
13479
              rtx label = JUMP_LABEL (place);
13480
 
13481
              if (!label)
13482
                JUMP_LABEL (place) = XEXP (note, 0);
13483
              else if (LABEL_P (label))
13484
                LABEL_NUSES (label)--;
13485
            }
13486
 
13487
          if (place2 && JUMP_P (place2)
13488
              && REG_NOTE_KIND (note) == REG_LABEL_TARGET
13489
              && (JUMP_LABEL (place2) == NULL
13490
                  || JUMP_LABEL (place2) == XEXP (note, 0)))
13491
            {
13492
              rtx label = JUMP_LABEL (place2);
13493
 
13494
              if (!label)
13495
                JUMP_LABEL (place2) = XEXP (note, 0);
13496
              else if (LABEL_P (label))
13497
                LABEL_NUSES (label)--;
13498
              place2 = 0;
13499
            }
13500
          break;
13501
 
13502
        case REG_NONNEG:
13503
          /* This note says something about the value of a register prior
13504
             to the execution of an insn.  It is too much trouble to see
13505
             if the note is still correct in all situations.  It is better
13506
             to simply delete it.  */
13507
          break;
13508
 
13509
        case REG_DEAD:
13510
          /* If we replaced the right hand side of FROM_INSN with a
13511
             REG_EQUAL note, the original use of the dying register
13512
             will not have been combined into I3 and I2.  In such cases,
13513
             FROM_INSN is guaranteed to be the first of the combined
13514
             instructions, so we simply need to search back before
13515
             FROM_INSN for the previous use or set of this register,
13516
             then alter the notes there appropriately.
13517
 
13518
             If the register is used as an input in I3, it dies there.
13519
             Similarly for I2, if it is nonzero and adjacent to I3.
13520
 
13521
             If the register is not used as an input in either I3 or I2
13522
             and it is not one of the registers we were supposed to eliminate,
13523
             there are two possibilities.  We might have a non-adjacent I2
13524
             or we might have somehow eliminated an additional register
13525
             from a computation.  For example, we might have had A & B where
13526
             we discover that B will always be zero.  In this case we will
13527
             eliminate the reference to A.
13528
 
13529
             In both cases, we must search to see if we can find a previous
13530
             use of A and put the death note there.  */
13531
 
13532
          if (from_insn
13533
              && from_insn == i2mod
13534
              && !reg_overlap_mentioned_p (XEXP (note, 0), i2mod_new_rhs))
13535
            tem = from_insn;
13536
          else
13537
            {
13538
              if (from_insn
13539
                  && CALL_P (from_insn)
13540
                  && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
13541
                place = from_insn;
13542
              else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
13543
                place = i3;
13544
              else if (i2 != 0 && next_nonnote_nondebug_insn (i2) == i3
13545
                       && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
13546
                place = i2;
13547
              else if ((rtx_equal_p (XEXP (note, 0), elim_i2)
13548
                        && !(i2mod
13549
                             && reg_overlap_mentioned_p (XEXP (note, 0),
13550
                                                         i2mod_old_rhs)))
13551
                       || rtx_equal_p (XEXP (note, 0), elim_i1)
13552
                       || rtx_equal_p (XEXP (note, 0), elim_i0))
13553
                break;
13554
              tem = i3;
13555
            }
13556
 
13557
          if (place == 0)
13558
            {
13559
              basic_block bb = this_basic_block;
13560
 
13561
              for (tem = PREV_INSN (tem); place == 0; tem = PREV_INSN (tem))
13562
                {
13563
                  if (!NONDEBUG_INSN_P (tem))
13564
                    {
13565
                      if (tem == BB_HEAD (bb))
13566
                        break;
13567
                      continue;
13568
                    }
13569
 
13570
                  /* If the register is being set at TEM, see if that is all
13571
                     TEM is doing.  If so, delete TEM.  Otherwise, make this
13572
                     into a REG_UNUSED note instead. Don't delete sets to
13573
                     global register vars.  */
13574
                  if ((REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER
13575
                       || !global_regs[REGNO (XEXP (note, 0))])
13576
                      && reg_set_p (XEXP (note, 0), PATTERN (tem)))
13577
                    {
13578
                      rtx set = single_set (tem);
13579
                      rtx inner_dest = 0;
13580
#ifdef HAVE_cc0
13581
                      rtx cc0_setter = NULL_RTX;
13582
#endif
13583
 
13584
                      if (set != 0)
13585
                        for (inner_dest = SET_DEST (set);
13586
                             (GET_CODE (inner_dest) == STRICT_LOW_PART
13587
                              || GET_CODE (inner_dest) == SUBREG
13588
                              || GET_CODE (inner_dest) == ZERO_EXTRACT);
13589
                             inner_dest = XEXP (inner_dest, 0))
13590
                          ;
13591
 
13592
                      /* Verify that it was the set, and not a clobber that
13593
                         modified the register.
13594
 
13595
                         CC0 targets must be careful to maintain setter/user
13596
                         pairs.  If we cannot delete the setter due to side
13597
                         effects, mark the user with an UNUSED note instead
13598
                         of deleting it.  */
13599
 
13600
                      if (set != 0 && ! side_effects_p (SET_SRC (set))
13601
                          && rtx_equal_p (XEXP (note, 0), inner_dest)
13602
#ifdef HAVE_cc0
13603
                          && (! reg_mentioned_p (cc0_rtx, SET_SRC (set))
13604
                              || ((cc0_setter = prev_cc0_setter (tem)) != NULL
13605
                                  && sets_cc0_p (PATTERN (cc0_setter)) > 0))
13606
#endif
13607
                          )
13608
                        {
13609
                          /* Move the notes and links of TEM elsewhere.
13610
                             This might delete other dead insns recursively.
13611
                             First set the pattern to something that won't use
13612
                             any register.  */
13613
                          rtx old_notes = REG_NOTES (tem);
13614
 
13615
                          PATTERN (tem) = pc_rtx;
13616
                          REG_NOTES (tem) = NULL;
13617
 
13618
                          distribute_notes (old_notes, tem, tem, NULL_RTX,
13619
                                            NULL_RTX, NULL_RTX, NULL_RTX);
13620
                          distribute_links (LOG_LINKS (tem));
13621
 
13622
                          SET_INSN_DELETED (tem);
13623
                          if (tem == i2)
13624
                            i2 = NULL_RTX;
13625
 
13626
#ifdef HAVE_cc0
13627
                          /* Delete the setter too.  */
13628
                          if (cc0_setter)
13629
                            {
13630
                              PATTERN (cc0_setter) = pc_rtx;
13631
                              old_notes = REG_NOTES (cc0_setter);
13632
                              REG_NOTES (cc0_setter) = NULL;
13633
 
13634
                              distribute_notes (old_notes, cc0_setter,
13635
                                                cc0_setter, NULL_RTX,
13636
                                                NULL_RTX, NULL_RTX, NULL_RTX);
13637
                              distribute_links (LOG_LINKS (cc0_setter));
13638
 
13639
                              SET_INSN_DELETED (cc0_setter);
13640
                              if (cc0_setter == i2)
13641
                                i2 = NULL_RTX;
13642
                            }
13643
#endif
13644
                        }
13645
                      else
13646
                        {
13647
                          PUT_REG_NOTE_KIND (note, REG_UNUSED);
13648
 
13649
                          /*  If there isn't already a REG_UNUSED note, put one
13650
                              here.  Do not place a REG_DEAD note, even if
13651
                              the register is also used here; that would not
13652
                              match the algorithm used in lifetime analysis
13653
                              and can cause the consistency check in the
13654
                              scheduler to fail.  */
13655
                          if (! find_regno_note (tem, REG_UNUSED,
13656
                                                 REGNO (XEXP (note, 0))))
13657
                            place = tem;
13658
                          break;
13659
                        }
13660
                    }
13661
                  else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
13662
                           || (CALL_P (tem)
13663
                               && find_reg_fusage (tem, USE, XEXP (note, 0))))
13664
                    {
13665
                      place = tem;
13666
 
13667
                      /* If we are doing a 3->2 combination, and we have a
13668
                         register which formerly died in i3 and was not used
13669
                         by i2, which now no longer dies in i3 and is used in
13670
                         i2 but does not die in i2, and place is between i2
13671
                         and i3, then we may need to move a link from place to
13672
                         i2.  */
13673
                      if (i2 && DF_INSN_LUID (place) > DF_INSN_LUID (i2)
13674
                          && from_insn
13675
                          && DF_INSN_LUID (from_insn) > DF_INSN_LUID (i2)
13676
                          && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
13677
                        {
13678
                          struct insn_link *links = LOG_LINKS (place);
13679
                          LOG_LINKS (place) = NULL;
13680
                          distribute_links (links);
13681
                        }
13682
                      break;
13683
                    }
13684
 
13685
                  if (tem == BB_HEAD (bb))
13686
                    break;
13687
                }
13688
 
13689
            }
13690
 
13691
          /* If the register is set or already dead at PLACE, we needn't do
13692
             anything with this note if it is still a REG_DEAD note.
13693
             We check here if it is set at all, not if is it totally replaced,
13694
             which is what `dead_or_set_p' checks, so also check for it being
13695
             set partially.  */
13696
 
13697
          if (place && REG_NOTE_KIND (note) == REG_DEAD)
13698
            {
13699
              unsigned int regno = REGNO (XEXP (note, 0));
13700
              reg_stat_type *rsp = VEC_index (reg_stat_type, reg_stat, regno);
13701
 
13702
              if (dead_or_set_p (place, XEXP (note, 0))
13703
                  || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
13704
                {
13705
                  /* Unless the register previously died in PLACE, clear
13706
                     last_death.  [I no longer understand why this is
13707
                     being done.] */
13708
                  if (rsp->last_death != place)
13709
                    rsp->last_death = 0;
13710
                  place = 0;
13711
                }
13712
              else
13713
                rsp->last_death = place;
13714
 
13715
              /* If this is a death note for a hard reg that is occupying
13716
                 multiple registers, ensure that we are still using all
13717
                 parts of the object.  If we find a piece of the object
13718
                 that is unused, we must arrange for an appropriate REG_DEAD
13719
                 note to be added for it.  However, we can't just emit a USE
13720
                 and tag the note to it, since the register might actually
13721
                 be dead; so we recourse, and the recursive call then finds
13722
                 the previous insn that used this register.  */
13723
 
13724
              if (place && regno < FIRST_PSEUDO_REGISTER
13725
                  && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] > 1)
13726
                {
13727
                  unsigned int endregno = END_HARD_REGNO (XEXP (note, 0));
13728
                  int all_used = 1;
13729
                  unsigned int i;
13730
 
13731
                  for (i = regno; i < endregno; i++)
13732
                    if ((! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
13733
                         && ! find_regno_fusage (place, USE, i))
13734
                        || dead_or_set_regno_p (place, i))
13735
                      all_used = 0;
13736
 
13737
                  if (! all_used)
13738
                    {
13739
                      /* Put only REG_DEAD notes for pieces that are
13740
                         not already dead or set.  */
13741
 
13742
                      for (i = regno; i < endregno;
13743
                           i += hard_regno_nregs[i][reg_raw_mode[i]])
13744
                        {
13745
                          rtx piece = regno_reg_rtx[i];
13746
                          basic_block bb = this_basic_block;
13747
 
13748
                          if (! dead_or_set_p (place, piece)
13749
                              && ! reg_bitfield_target_p (piece,
13750
                                                          PATTERN (place)))
13751
                            {
13752
                              rtx new_note = alloc_reg_note (REG_DEAD, piece,
13753
                                                             NULL_RTX);
13754
 
13755
                              distribute_notes (new_note, place, place,
13756
                                                NULL_RTX, NULL_RTX, NULL_RTX,
13757
                                                NULL_RTX);
13758
                            }
13759
                          else if (! refers_to_regno_p (i, i + 1,
13760
                                                        PATTERN (place), 0)
13761
                                   && ! find_regno_fusage (place, USE, i))
13762
                            for (tem = PREV_INSN (place); ;
13763
                                 tem = PREV_INSN (tem))
13764
                              {
13765
                                if (!NONDEBUG_INSN_P (tem))
13766
                                  {
13767
                                    if (tem == BB_HEAD (bb))
13768
                                      break;
13769
                                    continue;
13770
                                  }
13771
                                if (dead_or_set_p (tem, piece)
13772
                                    || reg_bitfield_target_p (piece,
13773
                                                              PATTERN (tem)))
13774
                                  {
13775
                                    add_reg_note (tem, REG_UNUSED, piece);
13776
                                    break;
13777
                                  }
13778
                              }
13779
 
13780
                        }
13781
 
13782
                      place = 0;
13783
                    }
13784
                }
13785
            }
13786
          break;
13787
 
13788
        default:
13789
          /* Any other notes should not be present at this point in the
13790
             compilation.  */
13791
          gcc_unreachable ();
13792
        }
13793
 
13794
      if (place)
13795
        {
13796
          XEXP (note, 1) = REG_NOTES (place);
13797
          REG_NOTES (place) = note;
13798
        }
13799
 
13800
      if (place2)
13801
        add_reg_note (place2, REG_NOTE_KIND (note), XEXP (note, 0));
13802
    }
13803
}
13804
 
13805
/* Similarly to above, distribute the LOG_LINKS that used to be present on
13806
   I3, I2, and I1 to new locations.  This is also called to add a link
13807
   pointing at I3 when I3's destination is changed.  */
13808
 
13809
static void
13810
distribute_links (struct insn_link *links)
13811
{
13812
  struct insn_link *link, *next_link;
13813
 
13814
  for (link = links; link; link = next_link)
13815
    {
13816
      rtx place = 0;
13817
      rtx insn;
13818
      rtx set, reg;
13819
 
13820
      next_link = link->next;
13821
 
13822
      /* If the insn that this link points to is a NOTE or isn't a single
13823
         set, ignore it.  In the latter case, it isn't clear what we
13824
         can do other than ignore the link, since we can't tell which
13825
         register it was for.  Such links wouldn't be used by combine
13826
         anyway.
13827
 
13828
         It is not possible for the destination of the target of the link to
13829
         have been changed by combine.  The only potential of this is if we
13830
         replace I3, I2, and I1 by I3 and I2.  But in that case the
13831
         destination of I2 also remains unchanged.  */
13832
 
13833
      if (NOTE_P (link->insn)
13834
          || (set = single_set (link->insn)) == 0)
13835
        continue;
13836
 
13837
      reg = SET_DEST (set);
13838
      while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
13839
             || GET_CODE (reg) == STRICT_LOW_PART)
13840
        reg = XEXP (reg, 0);
13841
 
13842
      /* A LOG_LINK is defined as being placed on the first insn that uses
13843
         a register and points to the insn that sets the register.  Start
13844
         searching at the next insn after the target of the link and stop
13845
         when we reach a set of the register or the end of the basic block.
13846
 
13847
         Note that this correctly handles the link that used to point from
13848
         I3 to I2.  Also note that not much searching is typically done here
13849
         since most links don't point very far away.  */
13850
 
13851
      for (insn = NEXT_INSN (link->insn);
13852
           (insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR
13853
                     || BB_HEAD (this_basic_block->next_bb) != insn));
13854
           insn = NEXT_INSN (insn))
13855
        if (DEBUG_INSN_P (insn))
13856
          continue;
13857
        else if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
13858
          {
13859
            if (reg_referenced_p (reg, PATTERN (insn)))
13860
              place = insn;
13861
            break;
13862
          }
13863
        else if (CALL_P (insn)
13864
                 && find_reg_fusage (insn, USE, reg))
13865
          {
13866
            place = insn;
13867
            break;
13868
          }
13869
        else if (INSN_P (insn) && reg_set_p (reg, insn))
13870
          break;
13871
 
13872
      /* If we found a place to put the link, place it there unless there
13873
         is already a link to the same insn as LINK at that point.  */
13874
 
13875
      if (place)
13876
        {
13877
          struct insn_link *link2;
13878
 
13879
          FOR_EACH_LOG_LINK (link2, place)
13880
            if (link2->insn == link->insn)
13881
              break;
13882
 
13883
          if (link2 == NULL)
13884
            {
13885
              link->next = LOG_LINKS (place);
13886
              LOG_LINKS (place) = link;
13887
 
13888
              /* Set added_links_insn to the earliest insn we added a
13889
                 link to.  */
13890
              if (added_links_insn == 0
13891
                  || DF_INSN_LUID (added_links_insn) > DF_INSN_LUID (place))
13892
                added_links_insn = place;
13893
            }
13894
        }
13895
    }
13896
}
13897
 
13898
/* Subroutine of unmentioned_reg_p and callback from for_each_rtx.
13899
   Check whether the expression pointer to by LOC is a register or
13900
   memory, and if so return 1 if it isn't mentioned in the rtx EXPR.
13901
   Otherwise return zero.  */
13902
 
13903
static int
13904
unmentioned_reg_p_1 (rtx *loc, void *expr)
13905
{
13906
  rtx x = *loc;
13907
 
13908
  if (x != NULL_RTX
13909
      && (REG_P (x) || MEM_P (x))
13910
      && ! reg_mentioned_p (x, (rtx) expr))
13911
    return 1;
13912
  return 0;
13913
}
13914
 
13915
/* Check for any register or memory mentioned in EQUIV that is not
13916
   mentioned in EXPR.  This is used to restrict EQUIV to "specializations"
13917
   of EXPR where some registers may have been replaced by constants.  */
13918
 
13919
static bool
13920
unmentioned_reg_p (rtx equiv, rtx expr)
13921
{
13922
  return for_each_rtx (&equiv, unmentioned_reg_p_1, expr);
13923
}
13924
 
13925
void
13926
dump_combine_stats (FILE *file)
13927
{
13928
  fprintf
13929
    (file,
13930
     ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
13931
     combine_attempts, combine_merges, combine_extras, combine_successes);
13932
}
13933
 
13934
void
13935
dump_combine_total_stats (FILE *file)
13936
{
13937
  fprintf
13938
    (file,
13939
     "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
13940
     total_attempts, total_merges, total_extras, total_successes);
13941
}
13942
 
13943
static bool
13944
gate_handle_combine (void)
13945
{
13946
  return (optimize > 0);
13947
}
13948
 
13949
/* Try combining insns through substitution.  */
13950
static unsigned int
13951
rest_of_handle_combine (void)
13952
{
13953
  int rebuild_jump_labels_after_combine;
13954
 
13955
  df_set_flags (DF_LR_RUN_DCE + DF_DEFER_INSN_RESCAN);
13956
  df_note_add_problem ();
13957
  df_analyze ();
13958
 
13959
  regstat_init_n_sets_and_refs ();
13960
 
13961
  rebuild_jump_labels_after_combine
13962
    = combine_instructions (get_insns (), max_reg_num ());
13963
 
13964
  /* Combining insns may have turned an indirect jump into a
13965
     direct jump.  Rebuild the JUMP_LABEL fields of jumping
13966
     instructions.  */
13967
  if (rebuild_jump_labels_after_combine)
13968
    {
13969
      timevar_push (TV_JUMP);
13970
      rebuild_jump_labels (get_insns ());
13971
      cleanup_cfg (0);
13972
      timevar_pop (TV_JUMP);
13973
    }
13974
 
13975
  regstat_free_n_sets_and_refs ();
13976
  return 0;
13977
}
13978
 
13979
struct rtl_opt_pass pass_combine =
13980
{
13981
 {
13982
  RTL_PASS,
13983
  "combine",                            /* name */
13984
  gate_handle_combine,                  /* gate */
13985
  rest_of_handle_combine,               /* execute */
13986
  NULL,                                 /* sub */
13987
  NULL,                                 /* next */
13988
  0,                                    /* static_pass_number */
13989
  TV_COMBINE,                           /* tv_id */
13990
  PROP_cfglayout,                       /* properties_required */
13991
  0,                                    /* properties_provided */
13992
  0,                                    /* properties_destroyed */
13993
  0,                                    /* todo_flags_start */
13994
  TODO_df_finish | TODO_verify_rtl_sharing |
13995
  TODO_ggc_collect,                     /* todo_flags_finish */
13996
 }
13997
};

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.