OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [tags/] [gnu-src/] [gcc-4.5.1/] [gcc-4.5.1-or32-1.0rc1/] [gcc/] [combine.c] - Blame information for rev 435

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 280 jeremybenn
/* Optimize by combining instructions for GNU compiler.
2
   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3
   1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4
   Free Software Foundation, Inc.
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify it under
9
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
12
 
13
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14
WARRANTY; without even the implied warranty of MERCHANTABILITY or
15
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16
for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
/* This module is essentially the "combiner" phase of the U. of Arizona
23
   Portable Optimizer, but redone to work on our list-structured
24
   representation for RTL instead of their string representation.
25
 
26
   The LOG_LINKS of each insn identify the most recent assignment
27
   to each REG used in the insn.  It is a list of previous insns,
28
   each of which contains a SET for a REG that is used in this insn
29
   and not used or set in between.  LOG_LINKs never cross basic blocks.
30
   They were set up by the preceding pass (lifetime analysis).
31
 
32
   We try to combine each pair of insns joined by a logical link.
33
   We also try to combine triples of insns A, B and C when
34
   C has a link back to B and B has a link back to A.
35
 
36
   LOG_LINKS does not have links for use of the CC0.  They don't
37
   need to, because the insn that sets the CC0 is always immediately
38
   before the insn that tests it.  So we always regard a branch
39
   insn as having a logical link to the preceding insn.  The same is true
40
   for an insn explicitly using CC0.
41
 
42
   We check (with use_crosses_set_p) to avoid combining in such a way
43
   as to move a computation to a place where its value would be different.
44
 
45
   Combination is done by mathematically substituting the previous
46
   insn(s) values for the regs they set into the expressions in
47
   the later insns that refer to these regs.  If the result is a valid insn
48
   for our target machine, according to the machine description,
49
   we install it, delete the earlier insns, and update the data flow
50
   information (LOG_LINKS and REG_NOTES) for what we did.
51
 
52
   There are a few exceptions where the dataflow information isn't
53
   completely updated (however this is only a local issue since it is
54
   regenerated before the next pass that uses it):
55
 
56
   - reg_live_length is not updated
57
   - reg_n_refs is not adjusted in the rare case when a register is
58
     no longer required in a computation
59
   - there are extremely rare cases (see distribute_notes) when a
60
     REG_DEAD note is lost
61
   - a LOG_LINKS entry that refers to an insn with multiple SETs may be
62
     removed because there is no way to know which register it was
63
     linking
64
 
65
   To simplify substitution, we combine only when the earlier insn(s)
66
   consist of only a single assignment.  To simplify updating afterward,
67
   we never combine when a subroutine call appears in the middle.
68
 
69
   Since we do not represent assignments to CC0 explicitly except when that
70
   is all an insn does, there is no LOG_LINKS entry in an insn that uses
71
   the condition code for the insn that set the condition code.
72
   Fortunately, these two insns must be consecutive.
73
   Therefore, every JUMP_INSN is taken to have an implicit logical link
74
   to the preceding insn.  This is not quite right, since non-jumps can
75
   also use the condition code; but in practice such insns would not
76
   combine anyway.  */
77
 
78
#include "config.h"
79
#include "system.h"
80
#include "coretypes.h"
81
#include "tm.h"
82
#include "rtl.h"
83
#include "tree.h"
84
#include "tm_p.h"
85
#include "flags.h"
86
#include "regs.h"
87
#include "hard-reg-set.h"
88
#include "basic-block.h"
89
#include "insn-config.h"
90
#include "function.h"
91
/* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
92
#include "expr.h"
93
#include "insn-attr.h"
94
#include "recog.h"
95
#include "real.h"
96
#include "toplev.h"
97
#include "target.h"
98
#include "optabs.h"
99
#include "insn-codes.h"
100
#include "rtlhooks-def.h"
101
/* Include output.h for dump_file.  */
102
#include "output.h"
103
#include "params.h"
104
#include "timevar.h"
105
#include "tree-pass.h"
106
#include "df.h"
107
#include "cgraph.h"
108
 
109
/* Number of attempts to combine instructions in this function.  */
110
 
111
static int combine_attempts;
112
 
113
/* Number of attempts that got as far as substitution in this function.  */
114
 
115
static int combine_merges;
116
 
117
/* Number of instructions combined with added SETs in this function.  */
118
 
119
static int combine_extras;
120
 
121
/* Number of instructions combined in this function.  */
122
 
123
static int combine_successes;
124
 
125
/* Totals over entire compilation.  */
126
 
127
static int total_attempts, total_merges, total_extras, total_successes;
128
 
129
/* combine_instructions may try to replace the right hand side of the
130
   second instruction with the value of an associated REG_EQUAL note
131
   before throwing it at try_combine.  That is problematic when there
132
   is a REG_DEAD note for a register used in the old right hand side
133
   and can cause distribute_notes to do wrong things.  This is the
134
   second instruction if it has been so modified, null otherwise.  */
135
 
136
static rtx i2mod;
137
 
138
/* When I2MOD is nonnull, this is a copy of the old right hand side.  */
139
 
140
static rtx i2mod_old_rhs;
141
 
142
/* When I2MOD is nonnull, this is a copy of the new right hand side.  */
143
 
144
static rtx i2mod_new_rhs;
145
 
146
typedef struct reg_stat_struct {
147
  /* Record last point of death of (hard or pseudo) register n.  */
148
  rtx                           last_death;
149
 
150
  /* Record last point of modification of (hard or pseudo) register n.  */
151
  rtx                           last_set;
152
 
153
  /* The next group of fields allows the recording of the last value assigned
154
     to (hard or pseudo) register n.  We use this information to see if an
155
     operation being processed is redundant given a prior operation performed
156
     on the register.  For example, an `and' with a constant is redundant if
157
     all the zero bits are already known to be turned off.
158
 
159
     We use an approach similar to that used by cse, but change it in the
160
     following ways:
161
 
162
     (1) We do not want to reinitialize at each label.
163
     (2) It is useful, but not critical, to know the actual value assigned
164
         to a register.  Often just its form is helpful.
165
 
166
     Therefore, we maintain the following fields:
167
 
168
     last_set_value             the last value assigned
169
     last_set_label             records the value of label_tick when the
170
                                register was assigned
171
     last_set_table_tick        records the value of label_tick when a
172
                                value using the register is assigned
173
     last_set_invalid           set to nonzero when it is not valid
174
                                to use the value of this register in some
175
                                register's value
176
 
177
     To understand the usage of these tables, it is important to understand
178
     the distinction between the value in last_set_value being valid and
179
     the register being validly contained in some other expression in the
180
     table.
181
 
182
     (The next two parameters are out of date).
183
 
184
     reg_stat[i].last_set_value is valid if it is nonzero, and either
185
     reg_n_sets[i] is 1 or reg_stat[i].last_set_label == label_tick.
186
 
187
     Register I may validly appear in any expression returned for the value
188
     of another register if reg_n_sets[i] is 1.  It may also appear in the
189
     value for register J if reg_stat[j].last_set_invalid is zero, or
190
     reg_stat[i].last_set_label < reg_stat[j].last_set_label.
191
 
192
     If an expression is found in the table containing a register which may
193
     not validly appear in an expression, the register is replaced by
194
     something that won't match, (clobber (const_int 0)).  */
195
 
196
  /* Record last value assigned to (hard or pseudo) register n.  */
197
 
198
  rtx                           last_set_value;
199
 
200
  /* Record the value of label_tick when an expression involving register n
201
     is placed in last_set_value.  */
202
 
203
  int                           last_set_table_tick;
204
 
205
  /* Record the value of label_tick when the value for register n is placed in
206
     last_set_value.  */
207
 
208
  int                           last_set_label;
209
 
210
  /* These fields are maintained in parallel with last_set_value and are
211
     used to store the mode in which the register was last set, the bits
212
     that were known to be zero when it was last set, and the number of
213
     sign bits copies it was known to have when it was last set.  */
214
 
215
  unsigned HOST_WIDE_INT        last_set_nonzero_bits;
216
  char                          last_set_sign_bit_copies;
217
  ENUM_BITFIELD(machine_mode)   last_set_mode : 8;
218
 
219
  /* Set nonzero if references to register n in expressions should not be
220
     used.  last_set_invalid is set nonzero when this register is being
221
     assigned to and last_set_table_tick == label_tick.  */
222
 
223
  char                          last_set_invalid;
224
 
225
  /* Some registers that are set more than once and used in more than one
226
     basic block are nevertheless always set in similar ways.  For example,
227
     a QImode register may be loaded from memory in two places on a machine
228
     where byte loads zero extend.
229
 
230
     We record in the following fields if a register has some leading bits
231
     that are always equal to the sign bit, and what we know about the
232
     nonzero bits of a register, specifically which bits are known to be
233
     zero.
234
 
235
     If an entry is zero, it means that we don't know anything special.  */
236
 
237
  unsigned char                 sign_bit_copies;
238
 
239
  unsigned HOST_WIDE_INT        nonzero_bits;
240
 
241
  /* Record the value of the label_tick when the last truncation
242
     happened.  The field truncated_to_mode is only valid if
243
     truncation_label == label_tick.  */
244
 
245
  int                           truncation_label;
246
 
247
  /* Record the last truncation seen for this register.  If truncation
248
     is not a nop to this mode we might be able to save an explicit
249
     truncation if we know that value already contains a truncated
250
     value.  */
251
 
252
  ENUM_BITFIELD(machine_mode)   truncated_to_mode : 8;
253
} reg_stat_type;
254
 
255
DEF_VEC_O(reg_stat_type);
256
DEF_VEC_ALLOC_O(reg_stat_type,heap);
257
 
258
static VEC(reg_stat_type,heap) *reg_stat;
259
 
260
/* Record the luid of the last insn that invalidated memory
261
   (anything that writes memory, and subroutine calls, but not pushes).  */
262
 
263
static int mem_last_set;
264
 
265
/* Record the luid of the last CALL_INSN
266
   so we can tell whether a potential combination crosses any calls.  */
267
 
268
static int last_call_luid;
269
 
270
/* When `subst' is called, this is the insn that is being modified
271
   (by combining in a previous insn).  The PATTERN of this insn
272
   is still the old pattern partially modified and it should not be
273
   looked at, but this may be used to examine the successors of the insn
274
   to judge whether a simplification is valid.  */
275
 
276
static rtx subst_insn;
277
 
278
/* This is the lowest LUID that `subst' is currently dealing with.
279
   get_last_value will not return a value if the register was set at or
280
   after this LUID.  If not for this mechanism, we could get confused if
281
   I2 or I1 in try_combine were an insn that used the old value of a register
282
   to obtain a new value.  In that case, we might erroneously get the
283
   new value of the register when we wanted the old one.  */
284
 
285
static int subst_low_luid;
286
 
287
/* This contains any hard registers that are used in newpat; reg_dead_at_p
288
   must consider all these registers to be always live.  */
289
 
290
static HARD_REG_SET newpat_used_regs;
291
 
292
/* This is an insn to which a LOG_LINKS entry has been added.  If this
293
   insn is the earlier than I2 or I3, combine should rescan starting at
294
   that location.  */
295
 
296
static rtx added_links_insn;
297
 
298
/* Basic block in which we are performing combines.  */
299
static basic_block this_basic_block;
300
static bool optimize_this_for_speed_p;
301
 
302
 
303
/* Length of the currently allocated uid_insn_cost array.  */
304
 
305
static int max_uid_known;
306
 
307
/* The following array records the insn_rtx_cost for every insn
308
   in the instruction stream.  */
309
 
310
static int *uid_insn_cost;
311
 
312
/* The following array records the LOG_LINKS for every insn in the
313
   instruction stream as an INSN_LIST rtx.  */
314
 
315
static rtx *uid_log_links;
316
 
317
#define INSN_COST(INSN)         (uid_insn_cost[INSN_UID (INSN)])
318
#define LOG_LINKS(INSN)         (uid_log_links[INSN_UID (INSN)])
319
 
320
/* Incremented for each basic block.  */
321
 
322
static int label_tick;
323
 
324
/* Reset to label_tick for each extended basic block in scanning order.  */
325
 
326
static int label_tick_ebb_start;
327
 
328
/* Mode used to compute significance in reg_stat[].nonzero_bits.  It is the
329
   largest integer mode that can fit in HOST_BITS_PER_WIDE_INT.  */
330
 
331
static enum machine_mode nonzero_bits_mode;
332
 
333
/* Nonzero when reg_stat[].nonzero_bits and reg_stat[].sign_bit_copies can
334
   be safely used.  It is zero while computing them and after combine has
335
   completed.  This former test prevents propagating values based on
336
   previously set values, which can be incorrect if a variable is modified
337
   in a loop.  */
338
 
339
static int nonzero_sign_valid;
340
 
341
 
342
/* Record one modification to rtl structure
343
   to be undone by storing old_contents into *where.  */
344
 
345
enum undo_kind { UNDO_RTX, UNDO_INT, UNDO_MODE };
346
 
347
struct undo
348
{
349
  struct undo *next;
350
  enum undo_kind kind;
351
  union { rtx r; int i; enum machine_mode m; } old_contents;
352
  union { rtx *r; int *i; } where;
353
};
354
 
355
/* Record a bunch of changes to be undone, up to MAX_UNDO of them.
356
   num_undo says how many are currently recorded.
357
 
358
   other_insn is nonzero if we have modified some other insn in the process
359
   of working on subst_insn.  It must be verified too.  */
360
 
361
struct undobuf
362
{
363
  struct undo *undos;
364
  struct undo *frees;
365
  rtx other_insn;
366
};
367
 
368
static struct undobuf undobuf;
369
 
370
/* Number of times the pseudo being substituted for
371
   was found and replaced.  */
372
 
373
static int n_occurrences;
374
 
375
static rtx reg_nonzero_bits_for_combine (const_rtx, enum machine_mode, const_rtx,
376
                                         enum machine_mode,
377
                                         unsigned HOST_WIDE_INT,
378
                                         unsigned HOST_WIDE_INT *);
379
static rtx reg_num_sign_bit_copies_for_combine (const_rtx, enum machine_mode, const_rtx,
380
                                                enum machine_mode,
381
                                                unsigned int, unsigned int *);
382
static void do_SUBST (rtx *, rtx);
383
static void do_SUBST_INT (int *, int);
384
static void init_reg_last (void);
385
static void setup_incoming_promotions (rtx);
386
static void set_nonzero_bits_and_sign_copies (rtx, const_rtx, void *);
387
static int cant_combine_insn_p (rtx);
388
static int can_combine_p (rtx, rtx, rtx, rtx, rtx *, rtx *);
389
static int combinable_i3pat (rtx, rtx *, rtx, rtx, int, rtx *);
390
static int contains_muldiv (rtx);
391
static rtx try_combine (rtx, rtx, rtx, int *);
392
static void undo_all (void);
393
static void undo_commit (void);
394
static rtx *find_split_point (rtx *, rtx);
395
static rtx subst (rtx, rtx, rtx, int, int);
396
static rtx combine_simplify_rtx (rtx, enum machine_mode, int);
397
static rtx simplify_if_then_else (rtx);
398
static rtx simplify_set (rtx);
399
static rtx simplify_logical (rtx);
400
static rtx expand_compound_operation (rtx);
401
static const_rtx expand_field_assignment (const_rtx);
402
static rtx make_extraction (enum machine_mode, rtx, HOST_WIDE_INT,
403
                            rtx, unsigned HOST_WIDE_INT, int, int, int);
404
static rtx extract_left_shift (rtx, int);
405
static rtx make_compound_operation (rtx, enum rtx_code);
406
static int get_pos_from_mask (unsigned HOST_WIDE_INT,
407
                              unsigned HOST_WIDE_INT *);
408
static rtx canon_reg_for_combine (rtx, rtx);
409
static rtx force_to_mode (rtx, enum machine_mode,
410
                          unsigned HOST_WIDE_INT, int);
411
static rtx if_then_else_cond (rtx, rtx *, rtx *);
412
static rtx known_cond (rtx, enum rtx_code, rtx, rtx);
413
static int rtx_equal_for_field_assignment_p (rtx, rtx);
414
static rtx make_field_assignment (rtx);
415
static rtx apply_distributive_law (rtx);
416
static rtx distribute_and_simplify_rtx (rtx, int);
417
static rtx simplify_and_const_int_1 (enum machine_mode, rtx,
418
                                     unsigned HOST_WIDE_INT);
419
static rtx simplify_and_const_int (rtx, enum machine_mode, rtx,
420
                                   unsigned HOST_WIDE_INT);
421
static int merge_outer_ops (enum rtx_code *, HOST_WIDE_INT *, enum rtx_code,
422
                            HOST_WIDE_INT, enum machine_mode, int *);
423
static rtx simplify_shift_const_1 (enum rtx_code, enum machine_mode, rtx, int);
424
static rtx simplify_shift_const (rtx, enum rtx_code, enum machine_mode, rtx,
425
                                 int);
426
static int recog_for_combine (rtx *, rtx, rtx *);
427
static rtx gen_lowpart_for_combine (enum machine_mode, rtx);
428
static enum rtx_code simplify_comparison (enum rtx_code, rtx *, rtx *);
429
static void update_table_tick (rtx);
430
static void record_value_for_reg (rtx, rtx, rtx);
431
static void check_promoted_subreg (rtx, rtx);
432
static void record_dead_and_set_regs_1 (rtx, const_rtx, void *);
433
static void record_dead_and_set_regs (rtx);
434
static int get_last_value_validate (rtx *, rtx, int, int);
435
static rtx get_last_value (const_rtx);
436
static int use_crosses_set_p (const_rtx, int);
437
static void reg_dead_at_p_1 (rtx, const_rtx, void *);
438
static int reg_dead_at_p (rtx, rtx);
439
static void move_deaths (rtx, rtx, int, rtx, rtx *);
440
static int reg_bitfield_target_p (rtx, rtx);
441
static void distribute_notes (rtx, rtx, rtx, rtx, rtx, rtx);
442
static void distribute_links (rtx);
443
static void mark_used_regs_combine (rtx);
444
static void record_promoted_value (rtx, rtx);
445
static int unmentioned_reg_p_1 (rtx *, void *);
446
static bool unmentioned_reg_p (rtx, rtx);
447
static int record_truncated_value (rtx *, void *);
448
static void record_truncated_values (rtx *, void *);
449
static bool reg_truncated_to_mode (enum machine_mode, const_rtx);
450
static rtx gen_lowpart_or_truncate (enum machine_mode, rtx);
451
 
452
 
453
/* It is not safe to use ordinary gen_lowpart in combine.
454
   See comments in gen_lowpart_for_combine.  */
455
#undef RTL_HOOKS_GEN_LOWPART
456
#define RTL_HOOKS_GEN_LOWPART              gen_lowpart_for_combine
457
 
458
/* Our implementation of gen_lowpart never emits a new pseudo.  */
459
#undef RTL_HOOKS_GEN_LOWPART_NO_EMIT
460
#define RTL_HOOKS_GEN_LOWPART_NO_EMIT      gen_lowpart_for_combine
461
 
462
#undef RTL_HOOKS_REG_NONZERO_REG_BITS
463
#define RTL_HOOKS_REG_NONZERO_REG_BITS     reg_nonzero_bits_for_combine
464
 
465
#undef RTL_HOOKS_REG_NUM_SIGN_BIT_COPIES
466
#define RTL_HOOKS_REG_NUM_SIGN_BIT_COPIES  reg_num_sign_bit_copies_for_combine
467
 
468
#undef RTL_HOOKS_REG_TRUNCATED_TO_MODE
469
#define RTL_HOOKS_REG_TRUNCATED_TO_MODE    reg_truncated_to_mode
470
 
471
static const struct rtl_hooks combine_rtl_hooks = RTL_HOOKS_INITIALIZER;
472
 
473
 
474
/* Try to split PATTERN found in INSN.  This returns NULL_RTX if
475
   PATTERN can not be split.  Otherwise, it returns an insn sequence.
476
   This is a wrapper around split_insns which ensures that the
477
   reg_stat vector is made larger if the splitter creates a new
478
   register.  */
479
 
480
static rtx
481
combine_split_insns (rtx pattern, rtx insn)
482
{
483
  rtx ret;
484
  unsigned int nregs;
485
 
486
  ret = split_insns (pattern, insn);
487
  nregs = max_reg_num ();
488
  if (nregs > VEC_length (reg_stat_type, reg_stat))
489
    VEC_safe_grow_cleared (reg_stat_type, heap, reg_stat, nregs);
490
  return ret;
491
}
492
 
493
/* This is used by find_single_use to locate an rtx in LOC that
494
   contains exactly one use of DEST, which is typically either a REG
495
   or CC0.  It returns a pointer to the innermost rtx expression
496
   containing DEST.  Appearances of DEST that are being used to
497
   totally replace it are not counted.  */
498
 
499
static rtx *
500
find_single_use_1 (rtx dest, rtx *loc)
501
{
502
  rtx x = *loc;
503
  enum rtx_code code = GET_CODE (x);
504
  rtx *result = NULL;
505
  rtx *this_result;
506
  int i;
507
  const char *fmt;
508
 
509
  switch (code)
510
    {
511
    case CONST_INT:
512
    case CONST:
513
    case LABEL_REF:
514
    case SYMBOL_REF:
515
    case CONST_DOUBLE:
516
    case CONST_VECTOR:
517
    case CLOBBER:
518
      return 0;
519
 
520
    case SET:
521
      /* If the destination is anything other than CC0, PC, a REG or a SUBREG
522
         of a REG that occupies all of the REG, the insn uses DEST if
523
         it is mentioned in the destination or the source.  Otherwise, we
524
         need just check the source.  */
525
      if (GET_CODE (SET_DEST (x)) != CC0
526
          && GET_CODE (SET_DEST (x)) != PC
527
          && !REG_P (SET_DEST (x))
528
          && ! (GET_CODE (SET_DEST (x)) == SUBREG
529
                && REG_P (SUBREG_REG (SET_DEST (x)))
530
                && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
531
                      + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
532
                    == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
533
                         + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
534
        break;
535
 
536
      return find_single_use_1 (dest, &SET_SRC (x));
537
 
538
    case MEM:
539
    case SUBREG:
540
      return find_single_use_1 (dest, &XEXP (x, 0));
541
 
542
    default:
543
      break;
544
    }
545
 
546
  /* If it wasn't one of the common cases above, check each expression and
547
     vector of this code.  Look for a unique usage of DEST.  */
548
 
549
  fmt = GET_RTX_FORMAT (code);
550
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
551
    {
552
      if (fmt[i] == 'e')
553
        {
554
          if (dest == XEXP (x, i)
555
              || (REG_P (dest) && REG_P (XEXP (x, i))
556
                  && REGNO (dest) == REGNO (XEXP (x, i))))
557
            this_result = loc;
558
          else
559
            this_result = find_single_use_1 (dest, &XEXP (x, i));
560
 
561
          if (result == NULL)
562
            result = this_result;
563
          else if (this_result)
564
            /* Duplicate usage.  */
565
            return NULL;
566
        }
567
      else if (fmt[i] == 'E')
568
        {
569
          int j;
570
 
571
          for (j = XVECLEN (x, i) - 1; j >= 0; j--)
572
            {
573
              if (XVECEXP (x, i, j) == dest
574
                  || (REG_P (dest)
575
                      && REG_P (XVECEXP (x, i, j))
576
                      && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
577
                this_result = loc;
578
              else
579
                this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
580
 
581
              if (result == NULL)
582
                result = this_result;
583
              else if (this_result)
584
                return NULL;
585
            }
586
        }
587
    }
588
 
589
  return result;
590
}
591
 
592
 
593
/* See if DEST, produced in INSN, is used only a single time in the
594
   sequel.  If so, return a pointer to the innermost rtx expression in which
595
   it is used.
596
 
597
   If PLOC is nonzero, *PLOC is set to the insn containing the single use.
598
 
599
   If DEST is cc0_rtx, we look only at the next insn.  In that case, we don't
600
   care about REG_DEAD notes or LOG_LINKS.
601
 
602
   Otherwise, we find the single use by finding an insn that has a
603
   LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST.  If DEST is
604
   only referenced once in that insn, we know that it must be the first
605
   and last insn referencing DEST.  */
606
 
607
static rtx *
608
find_single_use (rtx dest, rtx insn, rtx *ploc)
609
{
610
  basic_block bb;
611
  rtx next;
612
  rtx *result;
613
  rtx link;
614
 
615
#ifdef HAVE_cc0
616
  if (dest == cc0_rtx)
617
    {
618
      next = NEXT_INSN (insn);
619
      if (next == 0
620
          || (!NONJUMP_INSN_P (next) && !JUMP_P (next)))
621
        return 0;
622
 
623
      result = find_single_use_1 (dest, &PATTERN (next));
624
      if (result && ploc)
625
        *ploc = next;
626
      return result;
627
    }
628
#endif
629
 
630
  if (!REG_P (dest))
631
    return 0;
632
 
633
  bb = BLOCK_FOR_INSN (insn);
634
  for (next = NEXT_INSN (insn);
635
       next && BLOCK_FOR_INSN (next) == bb;
636
       next = NEXT_INSN (next))
637
    if (INSN_P (next) && dead_or_set_p (next, dest))
638
      {
639
        for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
640
          if (XEXP (link, 0) == insn)
641
            break;
642
 
643
        if (link)
644
          {
645
            result = find_single_use_1 (dest, &PATTERN (next));
646
            if (ploc)
647
              *ploc = next;
648
            return result;
649
          }
650
      }
651
 
652
  return 0;
653
}
654
 
655
/* Substitute NEWVAL, an rtx expression, into INTO, a place in some
656
   insn.  The substitution can be undone by undo_all.  If INTO is already
657
   set to NEWVAL, do not record this change.  Because computing NEWVAL might
658
   also call SUBST, we have to compute it before we put anything into
659
   the undo table.  */
660
 
661
static void
662
do_SUBST (rtx *into, rtx newval)
663
{
664
  struct undo *buf;
665
  rtx oldval = *into;
666
 
667
  if (oldval == newval)
668
    return;
669
 
670
  /* We'd like to catch as many invalid transformations here as
671
     possible.  Unfortunately, there are way too many mode changes
672
     that are perfectly valid, so we'd waste too much effort for
673
     little gain doing the checks here.  Focus on catching invalid
674
     transformations involving integer constants.  */
675
  if (GET_MODE_CLASS (GET_MODE (oldval)) == MODE_INT
676
      && CONST_INT_P (newval))
677
    {
678
      /* Sanity check that we're replacing oldval with a CONST_INT
679
         that is a valid sign-extension for the original mode.  */
680
      gcc_assert (INTVAL (newval)
681
                  == trunc_int_for_mode (INTVAL (newval), GET_MODE (oldval)));
682
 
683
      /* Replacing the operand of a SUBREG or a ZERO_EXTEND with a
684
         CONST_INT is not valid, because after the replacement, the
685
         original mode would be gone.  Unfortunately, we can't tell
686
         when do_SUBST is called to replace the operand thereof, so we
687
         perform this test on oldval instead, checking whether an
688
         invalid replacement took place before we got here.  */
689
      gcc_assert (!(GET_CODE (oldval) == SUBREG
690
                    && CONST_INT_P (SUBREG_REG (oldval))));
691
      gcc_assert (!(GET_CODE (oldval) == ZERO_EXTEND
692
                    && CONST_INT_P (XEXP (oldval, 0))));
693
    }
694
 
695
  if (undobuf.frees)
696
    buf = undobuf.frees, undobuf.frees = buf->next;
697
  else
698
    buf = XNEW (struct undo);
699
 
700
  buf->kind = UNDO_RTX;
701
  buf->where.r = into;
702
  buf->old_contents.r = oldval;
703
  *into = newval;
704
 
705
  buf->next = undobuf.undos, undobuf.undos = buf;
706
}
707
 
708
#define SUBST(INTO, NEWVAL)     do_SUBST(&(INTO), (NEWVAL))
709
 
710
/* Similar to SUBST, but NEWVAL is an int expression.  Note that substitution
711
   for the value of a HOST_WIDE_INT value (including CONST_INT) is
712
   not safe.  */
713
 
714
static void
715
do_SUBST_INT (int *into, int newval)
716
{
717
  struct undo *buf;
718
  int oldval = *into;
719
 
720
  if (oldval == newval)
721
    return;
722
 
723
  if (undobuf.frees)
724
    buf = undobuf.frees, undobuf.frees = buf->next;
725
  else
726
    buf = XNEW (struct undo);
727
 
728
  buf->kind = UNDO_INT;
729
  buf->where.i = into;
730
  buf->old_contents.i = oldval;
731
  *into = newval;
732
 
733
  buf->next = undobuf.undos, undobuf.undos = buf;
734
}
735
 
736
#define SUBST_INT(INTO, NEWVAL)  do_SUBST_INT(&(INTO), (NEWVAL))
737
 
738
/* Similar to SUBST, but just substitute the mode.  This is used when
739
   changing the mode of a pseudo-register, so that any other
740
   references to the entry in the regno_reg_rtx array will change as
741
   well.  */
742
 
743
static void
744
do_SUBST_MODE (rtx *into, enum machine_mode newval)
745
{
746
  struct undo *buf;
747
  enum machine_mode oldval = GET_MODE (*into);
748
 
749
  if (oldval == newval)
750
    return;
751
 
752
  if (undobuf.frees)
753
    buf = undobuf.frees, undobuf.frees = buf->next;
754
  else
755
    buf = XNEW (struct undo);
756
 
757
  buf->kind = UNDO_MODE;
758
  buf->where.r = into;
759
  buf->old_contents.m = oldval;
760
  adjust_reg_mode (*into, newval);
761
 
762
  buf->next = undobuf.undos, undobuf.undos = buf;
763
}
764
 
765
#define SUBST_MODE(INTO, NEWVAL)  do_SUBST_MODE(&(INTO), (NEWVAL))
766
 
767
/* Subroutine of try_combine.  Determine whether the combine replacement
768
   patterns NEWPAT, NEWI2PAT and NEWOTHERPAT are cheaper according to
769
   insn_rtx_cost that the original instruction sequence I1, I2, I3 and
770
   undobuf.other_insn.  Note that I1 and/or NEWI2PAT may be NULL_RTX.
771
   NEWOTHERPAT and undobuf.other_insn may also both be NULL_RTX.  This
772
   function returns false, if the costs of all instructions can be
773
   estimated, and the replacements are more expensive than the original
774
   sequence.  */
775
 
776
static bool
777
combine_validate_cost (rtx i1, rtx i2, rtx i3, rtx newpat, rtx newi2pat,
778
                       rtx newotherpat)
779
{
780
  int i1_cost, i2_cost, i3_cost;
781
  int new_i2_cost, new_i3_cost;
782
  int old_cost, new_cost;
783
 
784
  /* Lookup the original insn_rtx_costs.  */
785
  i2_cost = INSN_COST (i2);
786
  i3_cost = INSN_COST (i3);
787
 
788
  if (i1)
789
    {
790
      i1_cost = INSN_COST (i1);
791
      old_cost = (i1_cost > 0 && i2_cost > 0 && i3_cost > 0)
792
                 ? i1_cost + i2_cost + i3_cost : 0;
793
    }
794
  else
795
    {
796
      old_cost = (i2_cost > 0 && i3_cost > 0) ? i2_cost + i3_cost : 0;
797
      i1_cost = 0;
798
    }
799
 
800
  /* Calculate the replacement insn_rtx_costs.  */
801
  new_i3_cost = insn_rtx_cost (newpat, optimize_this_for_speed_p);
802
  if (newi2pat)
803
    {
804
      new_i2_cost = insn_rtx_cost (newi2pat, optimize_this_for_speed_p);
805
      new_cost = (new_i2_cost > 0 && new_i3_cost > 0)
806
                 ? new_i2_cost + new_i3_cost : 0;
807
    }
808
  else
809
    {
810
      new_cost = new_i3_cost;
811
      new_i2_cost = 0;
812
    }
813
 
814
  if (undobuf.other_insn)
815
    {
816
      int old_other_cost, new_other_cost;
817
 
818
      old_other_cost = INSN_COST (undobuf.other_insn);
819
      new_other_cost = insn_rtx_cost (newotherpat, optimize_this_for_speed_p);
820
      if (old_other_cost > 0 && new_other_cost > 0)
821
        {
822
          old_cost += old_other_cost;
823
          new_cost += new_other_cost;
824
        }
825
      else
826
        old_cost = 0;
827
    }
828
 
829
  /* Disallow this recombination if both new_cost and old_cost are
830
     greater than zero, and new_cost is greater than old cost.  */
831
  if (old_cost > 0
832
      && new_cost > old_cost)
833
    {
834
      if (dump_file)
835
        {
836
          if (i1)
837
            {
838
              fprintf (dump_file,
839
                       "rejecting combination of insns %d, %d and %d\n",
840
                       INSN_UID (i1), INSN_UID (i2), INSN_UID (i3));
841
              fprintf (dump_file, "original costs %d + %d + %d = %d\n",
842
                       i1_cost, i2_cost, i3_cost, old_cost);
843
            }
844
          else
845
            {
846
              fprintf (dump_file,
847
                       "rejecting combination of insns %d and %d\n",
848
                       INSN_UID (i2), INSN_UID (i3));
849
              fprintf (dump_file, "original costs %d + %d = %d\n",
850
                       i2_cost, i3_cost, old_cost);
851
            }
852
 
853
          if (newi2pat)
854
            {
855
              fprintf (dump_file, "replacement costs %d + %d = %d\n",
856
                       new_i2_cost, new_i3_cost, new_cost);
857
            }
858
          else
859
            fprintf (dump_file, "replacement cost %d\n", new_cost);
860
        }
861
 
862
      return false;
863
    }
864
 
865
  /* Update the uid_insn_cost array with the replacement costs.  */
866
  INSN_COST (i2) = new_i2_cost;
867
  INSN_COST (i3) = new_i3_cost;
868
  if (i1)
869
    INSN_COST (i1) = 0;
870
 
871
  return true;
872
}
873
 
874
 
875
/* Delete any insns that copy a register to itself.  */
876
 
877
static void
878
delete_noop_moves (void)
879
{
880
  rtx insn, next;
881
  basic_block bb;
882
 
883
  FOR_EACH_BB (bb)
884
    {
885
      for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb)); insn = next)
886
        {
887
          next = NEXT_INSN (insn);
888
          if (INSN_P (insn) && noop_move_p (insn))
889
            {
890
              if (dump_file)
891
                fprintf (dump_file, "deleting noop move %d\n", INSN_UID (insn));
892
 
893
              delete_insn_and_edges (insn);
894
            }
895
        }
896
    }
897
}
898
 
899
 
900
/* Fill in log links field for all insns.  */
901
 
902
static void
903
create_log_links (void)
904
{
905
  basic_block bb;
906
  rtx *next_use, insn;
907
  df_ref *def_vec, *use_vec;
908
 
909
  next_use = XCNEWVEC (rtx, max_reg_num ());
910
 
911
  /* Pass through each block from the end, recording the uses of each
912
     register and establishing log links when def is encountered.
913
     Note that we do not clear next_use array in order to save time,
914
     so we have to test whether the use is in the same basic block as def.
915
 
916
     There are a few cases below when we do not consider the definition or
917
     usage -- these are taken from original flow.c did. Don't ask me why it is
918
     done this way; I don't know and if it works, I don't want to know.  */
919
 
920
  FOR_EACH_BB (bb)
921
    {
922
      FOR_BB_INSNS_REVERSE (bb, insn)
923
        {
924
          if (!NONDEBUG_INSN_P (insn))
925
            continue;
926
 
927
          /* Log links are created only once.  */
928
          gcc_assert (!LOG_LINKS (insn));
929
 
930
          for (def_vec = DF_INSN_DEFS (insn); *def_vec; def_vec++)
931
            {
932
              df_ref def = *def_vec;
933
              int regno = DF_REF_REGNO (def);
934
              rtx use_insn;
935
 
936
              if (!next_use[regno])
937
                continue;
938
 
939
              /* Do not consider if it is pre/post modification in MEM.  */
940
              if (DF_REF_FLAGS (def) & DF_REF_PRE_POST_MODIFY)
941
                continue;
942
 
943
              /* Do not make the log link for frame pointer.  */
944
              if ((regno == FRAME_POINTER_REGNUM
945
                   && (! reload_completed || frame_pointer_needed))
946
#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
947
                  || (regno == HARD_FRAME_POINTER_REGNUM
948
                      && (! reload_completed || frame_pointer_needed))
949
#endif
950
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
951
                  || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
952
#endif
953
                  )
954
                continue;
955
 
956
              use_insn = next_use[regno];
957
              if (BLOCK_FOR_INSN (use_insn) == bb)
958
                {
959
                  /* flow.c claimed:
960
 
961
                     We don't build a LOG_LINK for hard registers contained
962
                     in ASM_OPERANDs.  If these registers get replaced,
963
                     we might wind up changing the semantics of the insn,
964
                     even if reload can make what appear to be valid
965
                     assignments later.  */
966
                  if (regno >= FIRST_PSEUDO_REGISTER
967
                      || asm_noperands (PATTERN (use_insn)) < 0)
968
                    {
969
                      /* Don't add duplicate links between instructions.  */
970
                      rtx links;
971
                      for (links = LOG_LINKS (use_insn); links;
972
                           links = XEXP (links, 1))
973
                        if (insn == XEXP (links, 0))
974
                          break;
975
 
976
                      if (!links)
977
                        LOG_LINKS (use_insn) =
978
                          alloc_INSN_LIST (insn, LOG_LINKS (use_insn));
979
                    }
980
                }
981
              next_use[regno] = NULL_RTX;
982
            }
983
 
984
          for (use_vec = DF_INSN_USES (insn); *use_vec; use_vec++)
985
            {
986
              df_ref use = *use_vec;
987
              int regno = DF_REF_REGNO (use);
988
 
989
              /* Do not consider the usage of the stack pointer
990
                 by function call.  */
991
              if (DF_REF_FLAGS (use) & DF_REF_CALL_STACK_USAGE)
992
                continue;
993
 
994
              next_use[regno] = insn;
995
            }
996
        }
997
    }
998
 
999
  free (next_use);
1000
}
1001
 
1002
/* Clear LOG_LINKS fields of insns.  */
1003
 
1004
static void
1005
clear_log_links (void)
1006
{
1007
  rtx insn;
1008
 
1009
  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1010
    if (INSN_P (insn))
1011
      free_INSN_LIST_list (&LOG_LINKS (insn));
1012
}
1013
 
1014
/* Main entry point for combiner.  F is the first insn of the function.
1015
   NREGS is the first unused pseudo-reg number.
1016
 
1017
   Return nonzero if the combiner has turned an indirect jump
1018
   instruction into a direct jump.  */
1019
static int
1020
combine_instructions (rtx f, unsigned int nregs)
1021
{
1022
  rtx insn, next;
1023
#ifdef HAVE_cc0
1024
  rtx prev;
1025
#endif
1026
  rtx links, nextlinks;
1027
  rtx first;
1028
  basic_block last_bb;
1029
 
1030
  int new_direct_jump_p = 0;
1031
 
1032
  for (first = f; first && !INSN_P (first); )
1033
    first = NEXT_INSN (first);
1034
  if (!first)
1035
    return 0;
1036
 
1037
  combine_attempts = 0;
1038
  combine_merges = 0;
1039
  combine_extras = 0;
1040
  combine_successes = 0;
1041
 
1042
  rtl_hooks = combine_rtl_hooks;
1043
 
1044
  VEC_safe_grow_cleared (reg_stat_type, heap, reg_stat, nregs);
1045
 
1046
  init_recog_no_volatile ();
1047
 
1048
  /* Allocate array for insn info.  */
1049
  max_uid_known = get_max_uid ();
1050
  uid_log_links = XCNEWVEC (rtx, max_uid_known + 1);
1051
  uid_insn_cost = XCNEWVEC (int, max_uid_known + 1);
1052
 
1053
  nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1054
 
1055
  /* Don't use reg_stat[].nonzero_bits when computing it.  This can cause
1056
     problems when, for example, we have j <<= 1 in a loop.  */
1057
 
1058
  nonzero_sign_valid = 0;
1059
  label_tick = label_tick_ebb_start = 1;
1060
 
1061
  /* Scan all SETs and see if we can deduce anything about what
1062
     bits are known to be zero for some registers and how many copies
1063
     of the sign bit are known to exist for those registers.
1064
 
1065
     Also set any known values so that we can use it while searching
1066
     for what bits are known to be set.  */
1067
 
1068
  setup_incoming_promotions (first);
1069
  /* Allow the entry block and the first block to fall into the same EBB.
1070
     Conceptually the incoming promotions are assigned to the entry block.  */
1071
  last_bb = ENTRY_BLOCK_PTR;
1072
 
1073
  create_log_links ();
1074
  FOR_EACH_BB (this_basic_block)
1075
    {
1076
      optimize_this_for_speed_p = optimize_bb_for_speed_p (this_basic_block);
1077
      last_call_luid = 0;
1078
      mem_last_set = -1;
1079
 
1080
      label_tick++;
1081
      if (!single_pred_p (this_basic_block)
1082
          || single_pred (this_basic_block) != last_bb)
1083
        label_tick_ebb_start = label_tick;
1084
      last_bb = this_basic_block;
1085
 
1086
      FOR_BB_INSNS (this_basic_block, insn)
1087
        if (INSN_P (insn) && BLOCK_FOR_INSN (insn))
1088
          {
1089
            subst_low_luid = DF_INSN_LUID (insn);
1090
            subst_insn = insn;
1091
 
1092
            note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies,
1093
                         insn);
1094
            record_dead_and_set_regs (insn);
1095
 
1096
#ifdef AUTO_INC_DEC
1097
            for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
1098
              if (REG_NOTE_KIND (links) == REG_INC)
1099
                set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX,
1100
                                                  insn);
1101
#endif
1102
 
1103
            /* Record the current insn_rtx_cost of this instruction.  */
1104
            if (NONJUMP_INSN_P (insn))
1105
              INSN_COST (insn) = insn_rtx_cost (PATTERN (insn),
1106
                                                optimize_this_for_speed_p);
1107
            if (dump_file)
1108
              fprintf(dump_file, "insn_cost %d: %d\n",
1109
                    INSN_UID (insn), INSN_COST (insn));
1110
          }
1111
    }
1112
 
1113
  nonzero_sign_valid = 1;
1114
 
1115
  /* Now scan all the insns in forward order.  */
1116
  label_tick = label_tick_ebb_start = 1;
1117
  init_reg_last ();
1118
  setup_incoming_promotions (first);
1119
  last_bb = ENTRY_BLOCK_PTR;
1120
 
1121
  FOR_EACH_BB (this_basic_block)
1122
    {
1123
      optimize_this_for_speed_p = optimize_bb_for_speed_p (this_basic_block);
1124
      last_call_luid = 0;
1125
      mem_last_set = -1;
1126
 
1127
      label_tick++;
1128
      if (!single_pred_p (this_basic_block)
1129
          || single_pred (this_basic_block) != last_bb)
1130
        label_tick_ebb_start = label_tick;
1131
      last_bb = this_basic_block;
1132
 
1133
      rtl_profile_for_bb (this_basic_block);
1134
      for (insn = BB_HEAD (this_basic_block);
1135
           insn != NEXT_INSN (BB_END (this_basic_block));
1136
           insn = next ? next : NEXT_INSN (insn))
1137
        {
1138
          next = 0;
1139
          if (NONDEBUG_INSN_P (insn))
1140
            {
1141
              /* See if we know about function return values before this
1142
                 insn based upon SUBREG flags.  */
1143
              check_promoted_subreg (insn, PATTERN (insn));
1144
 
1145
              /* See if we can find hardregs and subreg of pseudos in
1146
                 narrower modes.  This could help turning TRUNCATEs
1147
                 into SUBREGs.  */
1148
              note_uses (&PATTERN (insn), record_truncated_values, NULL);
1149
 
1150
              /* Try this insn with each insn it links back to.  */
1151
 
1152
              for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
1153
                if ((next = try_combine (insn, XEXP (links, 0),
1154
                                         NULL_RTX, &new_direct_jump_p)) != 0)
1155
                  goto retry;
1156
 
1157
              /* Try each sequence of three linked insns ending with this one.  */
1158
 
1159
              for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
1160
                {
1161
                  rtx link = XEXP (links, 0);
1162
 
1163
                  /* If the linked insn has been replaced by a note, then there
1164
                     is no point in pursuing this chain any further.  */
1165
                  if (NOTE_P (link))
1166
                    continue;
1167
 
1168
                  for (nextlinks = LOG_LINKS (link);
1169
                       nextlinks;
1170
                       nextlinks = XEXP (nextlinks, 1))
1171
                    if ((next = try_combine (insn, link,
1172
                                             XEXP (nextlinks, 0),
1173
                                             &new_direct_jump_p)) != 0)
1174
                      goto retry;
1175
                }
1176
 
1177
#ifdef HAVE_cc0
1178
              /* Try to combine a jump insn that uses CC0
1179
                 with a preceding insn that sets CC0, and maybe with its
1180
                 logical predecessor as well.
1181
                 This is how we make decrement-and-branch insns.
1182
                 We need this special code because data flow connections
1183
                 via CC0 do not get entered in LOG_LINKS.  */
1184
 
1185
              if (JUMP_P (insn)
1186
                  && (prev = prev_nonnote_insn (insn)) != 0
1187
                  && NONJUMP_INSN_P (prev)
1188
                  && sets_cc0_p (PATTERN (prev)))
1189
                {
1190
                  if ((next = try_combine (insn, prev,
1191
                                           NULL_RTX, &new_direct_jump_p)) != 0)
1192
                    goto retry;
1193
 
1194
                  for (nextlinks = LOG_LINKS (prev); nextlinks;
1195
                       nextlinks = XEXP (nextlinks, 1))
1196
                    if ((next = try_combine (insn, prev,
1197
                                             XEXP (nextlinks, 0),
1198
                                             &new_direct_jump_p)) != 0)
1199
                      goto retry;
1200
                }
1201
 
1202
              /* Do the same for an insn that explicitly references CC0.  */
1203
              if (NONJUMP_INSN_P (insn)
1204
                  && (prev = prev_nonnote_insn (insn)) != 0
1205
                  && NONJUMP_INSN_P (prev)
1206
                  && sets_cc0_p (PATTERN (prev))
1207
                  && GET_CODE (PATTERN (insn)) == SET
1208
                  && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
1209
                {
1210
                  if ((next = try_combine (insn, prev,
1211
                                           NULL_RTX, &new_direct_jump_p)) != 0)
1212
                    goto retry;
1213
 
1214
                  for (nextlinks = LOG_LINKS (prev); nextlinks;
1215
                       nextlinks = XEXP (nextlinks, 1))
1216
                    if ((next = try_combine (insn, prev,
1217
                                             XEXP (nextlinks, 0),
1218
                                             &new_direct_jump_p)) != 0)
1219
                      goto retry;
1220
                }
1221
 
1222
              /* Finally, see if any of the insns that this insn links to
1223
                 explicitly references CC0.  If so, try this insn, that insn,
1224
                 and its predecessor if it sets CC0.  */
1225
              for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
1226
                if (NONJUMP_INSN_P (XEXP (links, 0))
1227
                    && GET_CODE (PATTERN (XEXP (links, 0))) == SET
1228
                    && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
1229
                    && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
1230
                    && NONJUMP_INSN_P (prev)
1231
                    && sets_cc0_p (PATTERN (prev))
1232
                    && (next = try_combine (insn, XEXP (links, 0),
1233
                                            prev, &new_direct_jump_p)) != 0)
1234
                  goto retry;
1235
#endif
1236
 
1237
              /* Try combining an insn with two different insns whose results it
1238
                 uses.  */
1239
              for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
1240
                for (nextlinks = XEXP (links, 1); nextlinks;
1241
                     nextlinks = XEXP (nextlinks, 1))
1242
                  if ((next = try_combine (insn, XEXP (links, 0),
1243
                                           XEXP (nextlinks, 0),
1244
                                           &new_direct_jump_p)) != 0)
1245
                    goto retry;
1246
 
1247
              /* Try this insn with each REG_EQUAL note it links back to.  */
1248
              for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
1249
                {
1250
                  rtx set, note;
1251
                  rtx temp = XEXP (links, 0);
1252
                  if ((set = single_set (temp)) != 0
1253
                      && (note = find_reg_equal_equiv_note (temp)) != 0
1254
                      && (note = XEXP (note, 0), GET_CODE (note)) != EXPR_LIST
1255
                      /* Avoid using a register that may already been marked
1256
                         dead by an earlier instruction.  */
1257
                      && ! unmentioned_reg_p (note, SET_SRC (set))
1258
                      && (GET_MODE (note) == VOIDmode
1259
                          ? SCALAR_INT_MODE_P (GET_MODE (SET_DEST (set)))
1260
                          : GET_MODE (SET_DEST (set)) == GET_MODE (note)))
1261
                    {
1262
                      /* Temporarily replace the set's source with the
1263
                         contents of the REG_EQUAL note.  The insn will
1264
                         be deleted or recognized by try_combine.  */
1265
                      rtx orig = SET_SRC (set);
1266
                      SET_SRC (set) = note;
1267
                      i2mod = temp;
1268
                      i2mod_old_rhs = copy_rtx (orig);
1269
                      i2mod_new_rhs = copy_rtx (note);
1270
                      next = try_combine (insn, i2mod, NULL_RTX,
1271
                                          &new_direct_jump_p);
1272
                      i2mod = NULL_RTX;
1273
                      if (next)
1274
                        goto retry;
1275
                      SET_SRC (set) = orig;
1276
                    }
1277
                }
1278
 
1279
              if (!NOTE_P (insn))
1280
                record_dead_and_set_regs (insn);
1281
 
1282
            retry:
1283
              ;
1284
            }
1285
        }
1286
    }
1287
 
1288
  default_rtl_profile ();
1289
  clear_log_links ();
1290
  clear_bb_flags ();
1291
  new_direct_jump_p |= purge_all_dead_edges ();
1292
  delete_noop_moves ();
1293
 
1294
  /* Clean up.  */
1295
  free (uid_log_links);
1296
  free (uid_insn_cost);
1297
  VEC_free (reg_stat_type, heap, reg_stat);
1298
 
1299
  {
1300
    struct undo *undo, *next;
1301
    for (undo = undobuf.frees; undo; undo = next)
1302
      {
1303
        next = undo->next;
1304
        free (undo);
1305
      }
1306
    undobuf.frees = 0;
1307
  }
1308
 
1309
  total_attempts += combine_attempts;
1310
  total_merges += combine_merges;
1311
  total_extras += combine_extras;
1312
  total_successes += combine_successes;
1313
 
1314
  nonzero_sign_valid = 0;
1315
  rtl_hooks = general_rtl_hooks;
1316
 
1317
  /* Make recognizer allow volatile MEMs again.  */
1318
  init_recog ();
1319
 
1320
  return new_direct_jump_p;
1321
}
1322
 
1323
/* Wipe the last_xxx fields of reg_stat in preparation for another pass.  */
1324
 
1325
static void
1326
init_reg_last (void)
1327
{
1328
  unsigned int i;
1329
  reg_stat_type *p;
1330
 
1331
  for (i = 0; VEC_iterate (reg_stat_type, reg_stat, i, p); ++i)
1332
    memset (p, 0, offsetof (reg_stat_type, sign_bit_copies));
1333
}
1334
 
1335
/* Set up any promoted values for incoming argument registers.  */
1336
 
1337
static void
1338
setup_incoming_promotions (rtx first)
1339
{
1340
  tree arg;
1341
  bool strictly_local = false;
1342
 
1343
  for (arg = DECL_ARGUMENTS (current_function_decl); arg;
1344
       arg = TREE_CHAIN (arg))
1345
    {
1346
      rtx x, reg = DECL_INCOMING_RTL (arg);
1347
      int uns1, uns3;
1348
      enum machine_mode mode1, mode2, mode3, mode4;
1349
 
1350
      /* Only continue if the incoming argument is in a register.  */
1351
      if (!REG_P (reg))
1352
        continue;
1353
 
1354
      /* Determine, if possible, whether all call sites of the current
1355
         function lie within the current compilation unit.  (This does
1356
         take into account the exporting of a function via taking its
1357
         address, and so forth.)  */
1358
      strictly_local = cgraph_local_info (current_function_decl)->local;
1359
 
1360
      /* The mode and signedness of the argument before any promotions happen
1361
         (equal to the mode of the pseudo holding it at that stage).  */
1362
      mode1 = TYPE_MODE (TREE_TYPE (arg));
1363
      uns1 = TYPE_UNSIGNED (TREE_TYPE (arg));
1364
 
1365
      /* The mode and signedness of the argument after any source language and
1366
         TARGET_PROMOTE_PROTOTYPES-driven promotions.  */
1367
      mode2 = TYPE_MODE (DECL_ARG_TYPE (arg));
1368
      uns3 = TYPE_UNSIGNED (DECL_ARG_TYPE (arg));
1369
 
1370
      /* The mode and signedness of the argument as it is actually passed,
1371
         after any TARGET_PROMOTE_FUNCTION_ARGS-driven ABI promotions.  */
1372
      mode3 = promote_function_mode (DECL_ARG_TYPE (arg), mode2, &uns3,
1373
                                     TREE_TYPE (cfun->decl), 0);
1374
 
1375
      /* The mode of the register in which the argument is being passed.  */
1376
      mode4 = GET_MODE (reg);
1377
 
1378
      /* Eliminate sign extensions in the callee when:
1379
         (a) A mode promotion has occurred;  */
1380
      if (mode1 == mode3)
1381
        continue;
1382
      /* (b) The mode of the register is the same as the mode of
1383
             the argument as it is passed; */
1384
      if (mode3 != mode4)
1385
        continue;
1386
      /* (c) There's no language level extension;  */
1387
      if (mode1 == mode2)
1388
        ;
1389
      /* (c.1) All callers are from the current compilation unit.  If that's
1390
         the case we don't have to rely on an ABI, we only have to know
1391
         what we're generating right now, and we know that we will do the
1392
         mode1 to mode2 promotion with the given sign.  */
1393
      else if (!strictly_local)
1394
        continue;
1395
      /* (c.2) The combination of the two promotions is useful.  This is
1396
         true when the signs match, or if the first promotion is unsigned.
1397
         In the later case, (sign_extend (zero_extend x)) is the same as
1398
         (zero_extend (zero_extend x)), so make sure to force UNS3 true.  */
1399
      else if (uns1)
1400
        uns3 = true;
1401
      else if (uns3)
1402
        continue;
1403
 
1404
      /* Record that the value was promoted from mode1 to mode3,
1405
         so that any sign extension at the head of the current
1406
         function may be eliminated.  */
1407
      x = gen_rtx_CLOBBER (mode1, const0_rtx);
1408
      x = gen_rtx_fmt_e ((uns3 ? ZERO_EXTEND : SIGN_EXTEND), mode3, x);
1409
      record_value_for_reg (reg, first, x);
1410
    }
1411
}
1412
 
1413
/* Called via note_stores.  If X is a pseudo that is narrower than
1414
   HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
1415
 
1416
   If we are setting only a portion of X and we can't figure out what
1417
   portion, assume all bits will be used since we don't know what will
1418
   be happening.
1419
 
1420
   Similarly, set how many bits of X are known to be copies of the sign bit
1421
   at all locations in the function.  This is the smallest number implied
1422
   by any set of X.  */
1423
 
1424
static void
1425
set_nonzero_bits_and_sign_copies (rtx x, const_rtx set, void *data)
1426
{
1427
  rtx insn = (rtx) data;
1428
  unsigned int num;
1429
 
1430
  if (REG_P (x)
1431
      && REGNO (x) >= FIRST_PSEUDO_REGISTER
1432
      /* If this register is undefined at the start of the file, we can't
1433
         say what its contents were.  */
1434
      && ! REGNO_REG_SET_P
1435
           (DF_LR_IN (ENTRY_BLOCK_PTR->next_bb), REGNO (x))
1436
      && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
1437
    {
1438
      reg_stat_type *rsp = VEC_index (reg_stat_type, reg_stat, REGNO (x));
1439
 
1440
      if (set == 0 || GET_CODE (set) == CLOBBER)
1441
        {
1442
          rsp->nonzero_bits = GET_MODE_MASK (GET_MODE (x));
1443
          rsp->sign_bit_copies = 1;
1444
          return;
1445
        }
1446
 
1447
      /* If this register is being initialized using itself, and the
1448
         register is uninitialized in this basic block, and there are
1449
         no LOG_LINKS which set the register, then part of the
1450
         register is uninitialized.  In that case we can't assume
1451
         anything about the number of nonzero bits.
1452
 
1453
         ??? We could do better if we checked this in
1454
         reg_{nonzero_bits,num_sign_bit_copies}_for_combine.  Then we
1455
         could avoid making assumptions about the insn which initially
1456
         sets the register, while still using the information in other
1457
         insns.  We would have to be careful to check every insn
1458
         involved in the combination.  */
1459
 
1460
      if (insn
1461
          && reg_referenced_p (x, PATTERN (insn))
1462
          && !REGNO_REG_SET_P (DF_LR_IN (BLOCK_FOR_INSN (insn)),
1463
                               REGNO (x)))
1464
        {
1465
          rtx link;
1466
 
1467
          for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
1468
            {
1469
              if (dead_or_set_p (XEXP (link, 0), x))
1470
                break;
1471
            }
1472
          if (!link)
1473
            {
1474
              rsp->nonzero_bits = GET_MODE_MASK (GET_MODE (x));
1475
              rsp->sign_bit_copies = 1;
1476
              return;
1477
            }
1478
        }
1479
 
1480
      /* If this is a complex assignment, see if we can convert it into a
1481
         simple assignment.  */
1482
      set = expand_field_assignment (set);
1483
 
1484
      /* If this is a simple assignment, or we have a paradoxical SUBREG,
1485
         set what we know about X.  */
1486
 
1487
      if (SET_DEST (set) == x
1488
          || (GET_CODE (SET_DEST (set)) == SUBREG
1489
              && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
1490
                  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
1491
              && SUBREG_REG (SET_DEST (set)) == x))
1492
        {
1493
          rtx src = SET_SRC (set);
1494
 
1495
#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
1496
          /* If X is narrower than a word and SRC is a non-negative
1497
             constant that would appear negative in the mode of X,
1498
             sign-extend it for use in reg_stat[].nonzero_bits because some
1499
             machines (maybe most) will actually do the sign-extension
1500
             and this is the conservative approach.
1501
 
1502
             ??? For 2.5, try to tighten up the MD files in this regard
1503
             instead of this kludge.  */
1504
 
1505
          if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
1506
              && CONST_INT_P (src)
1507
              && INTVAL (src) > 0
1508
              && 0 != (INTVAL (src)
1509
                       & ((HOST_WIDE_INT) 1
1510
                          << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
1511
            src = GEN_INT (INTVAL (src)
1512
                           | ((HOST_WIDE_INT) (-1)
1513
                              << GET_MODE_BITSIZE (GET_MODE (x))));
1514
#endif
1515
 
1516
          /* Don't call nonzero_bits if it cannot change anything.  */
1517
          if (rsp->nonzero_bits != ~(unsigned HOST_WIDE_INT) 0)
1518
            rsp->nonzero_bits |= nonzero_bits (src, nonzero_bits_mode);
1519
          num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
1520
          if (rsp->sign_bit_copies == 0
1521
              || rsp->sign_bit_copies > num)
1522
            rsp->sign_bit_copies = num;
1523
        }
1524
      else
1525
        {
1526
          rsp->nonzero_bits = GET_MODE_MASK (GET_MODE (x));
1527
          rsp->sign_bit_copies = 1;
1528
        }
1529
    }
1530
}
1531
 
1532
/* See if INSN can be combined into I3.  PRED and SUCC are optionally
1533
   insns that were previously combined into I3 or that will be combined
1534
   into the merger of INSN and I3.
1535
 
1536
   Return 0 if the combination is not allowed for any reason.
1537
 
1538
   If the combination is allowed, *PDEST will be set to the single
1539
   destination of INSN and *PSRC to the single source, and this function
1540
   will return 1.  */
1541
 
1542
static int
1543
can_combine_p (rtx insn, rtx i3, rtx pred ATTRIBUTE_UNUSED, rtx succ,
1544
               rtx *pdest, rtx *psrc)
1545
{
1546
  int i;
1547
  const_rtx set = 0;
1548
  rtx src, dest;
1549
  rtx p;
1550
#ifdef AUTO_INC_DEC
1551
  rtx link;
1552
#endif
1553
  int all_adjacent = (succ ? (next_active_insn (insn) == succ
1554
                              && next_active_insn (succ) == i3)
1555
                      : next_active_insn (insn) == i3);
1556
 
1557
  /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
1558
     or a PARALLEL consisting of such a SET and CLOBBERs.
1559
 
1560
     If INSN has CLOBBER parallel parts, ignore them for our processing.
1561
     By definition, these happen during the execution of the insn.  When it
1562
     is merged with another insn, all bets are off.  If they are, in fact,
1563
     needed and aren't also supplied in I3, they may be added by
1564
     recog_for_combine.  Otherwise, it won't match.
1565
 
1566
     We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
1567
     note.
1568
 
1569
     Get the source and destination of INSN.  If more than one, can't
1570
     combine.  */
1571
 
1572
  if (GET_CODE (PATTERN (insn)) == SET)
1573
    set = PATTERN (insn);
1574
  else if (GET_CODE (PATTERN (insn)) == PARALLEL
1575
           && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1576
    {
1577
      for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1578
        {
1579
          rtx elt = XVECEXP (PATTERN (insn), 0, i);
1580
 
1581
          switch (GET_CODE (elt))
1582
            {
1583
            /* This is important to combine floating point insns
1584
               for the SH4 port.  */
1585
            case USE:
1586
              /* Combining an isolated USE doesn't make sense.
1587
                 We depend here on combinable_i3pat to reject them.  */
1588
              /* The code below this loop only verifies that the inputs of
1589
                 the SET in INSN do not change.  We call reg_set_between_p
1590
                 to verify that the REG in the USE does not change between
1591
                 I3 and INSN.
1592
                 If the USE in INSN was for a pseudo register, the matching
1593
                 insn pattern will likely match any register; combining this
1594
                 with any other USE would only be safe if we knew that the
1595
                 used registers have identical values, or if there was
1596
                 something to tell them apart, e.g. different modes.  For
1597
                 now, we forgo such complicated tests and simply disallow
1598
                 combining of USES of pseudo registers with any other USE.  */
1599
              if (REG_P (XEXP (elt, 0))
1600
                  && GET_CODE (PATTERN (i3)) == PARALLEL)
1601
                {
1602
                  rtx i3pat = PATTERN (i3);
1603
                  int i = XVECLEN (i3pat, 0) - 1;
1604
                  unsigned int regno = REGNO (XEXP (elt, 0));
1605
 
1606
                  do
1607
                    {
1608
                      rtx i3elt = XVECEXP (i3pat, 0, i);
1609
 
1610
                      if (GET_CODE (i3elt) == USE
1611
                          && REG_P (XEXP (i3elt, 0))
1612
                          && (REGNO (XEXP (i3elt, 0)) == regno
1613
                              ? reg_set_between_p (XEXP (elt, 0),
1614
                                                   PREV_INSN (insn), i3)
1615
                              : regno >= FIRST_PSEUDO_REGISTER))
1616
                        return 0;
1617
                    }
1618
                  while (--i >= 0);
1619
                }
1620
              break;
1621
 
1622
              /* We can ignore CLOBBERs.  */
1623
            case CLOBBER:
1624
              break;
1625
 
1626
            case SET:
1627
              /* Ignore SETs whose result isn't used but not those that
1628
                 have side-effects.  */
1629
              if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
1630
                  && insn_nothrow_p (insn)
1631
                  && !side_effects_p (elt))
1632
                break;
1633
 
1634
              /* If we have already found a SET, this is a second one and
1635
                 so we cannot combine with this insn.  */
1636
              if (set)
1637
                return 0;
1638
 
1639
              set = elt;
1640
              break;
1641
 
1642
            default:
1643
              /* Anything else means we can't combine.  */
1644
              return 0;
1645
            }
1646
        }
1647
 
1648
      if (set == 0
1649
          /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
1650
             so don't do anything with it.  */
1651
          || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
1652
        return 0;
1653
    }
1654
  else
1655
    return 0;
1656
 
1657
  if (set == 0)
1658
    return 0;
1659
 
1660
  set = expand_field_assignment (set);
1661
  src = SET_SRC (set), dest = SET_DEST (set);
1662
 
1663
  /* Don't eliminate a store in the stack pointer.  */
1664
  if (dest == stack_pointer_rtx
1665
      /* Don't combine with an insn that sets a register to itself if it has
1666
         a REG_EQUAL note.  This may be part of a LIBCALL sequence.  */
1667
      || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
1668
      /* Can't merge an ASM_OPERANDS.  */
1669
      || GET_CODE (src) == ASM_OPERANDS
1670
      /* Can't merge a function call.  */
1671
      || GET_CODE (src) == CALL
1672
      /* Don't eliminate a function call argument.  */
1673
      || (CALL_P (i3)
1674
          && (find_reg_fusage (i3, USE, dest)
1675
              || (REG_P (dest)
1676
                  && REGNO (dest) < FIRST_PSEUDO_REGISTER
1677
                  && global_regs[REGNO (dest)])))
1678
      /* Don't substitute into an incremented register.  */
1679
      || FIND_REG_INC_NOTE (i3, dest)
1680
      || (succ && FIND_REG_INC_NOTE (succ, dest))
1681
      /* Don't substitute into a non-local goto, this confuses CFG.  */
1682
      || (JUMP_P (i3) && find_reg_note (i3, REG_NON_LOCAL_GOTO, NULL_RTX))
1683
      /* Make sure that DEST is not used after SUCC but before I3.  */
1684
      || (succ && ! all_adjacent
1685
          && reg_used_between_p (dest, succ, i3))
1686
      /* Make sure that the value that is to be substituted for the register
1687
         does not use any registers whose values alter in between.  However,
1688
         If the insns are adjacent, a use can't cross a set even though we
1689
         think it might (this can happen for a sequence of insns each setting
1690
         the same destination; last_set of that register might point to
1691
         a NOTE).  If INSN has a REG_EQUIV note, the register is always
1692
         equivalent to the memory so the substitution is valid even if there
1693
         are intervening stores.  Also, don't move a volatile asm or
1694
         UNSPEC_VOLATILE across any other insns.  */
1695
      || (! all_adjacent
1696
          && (((!MEM_P (src)
1697
                || ! find_reg_note (insn, REG_EQUIV, src))
1698
               && use_crosses_set_p (src, DF_INSN_LUID (insn)))
1699
              || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
1700
              || GET_CODE (src) == UNSPEC_VOLATILE))
1701
      /* Don't combine across a CALL_INSN, because that would possibly
1702
         change whether the life span of some REGs crosses calls or not,
1703
         and it is a pain to update that information.
1704
         Exception: if source is a constant, moving it later can't hurt.
1705
         Accept that as a special case.  */
1706
      || (DF_INSN_LUID (insn) < last_call_luid && ! CONSTANT_P (src)))
1707
    return 0;
1708
 
1709
  /* DEST must either be a REG or CC0.  */
1710
  if (REG_P (dest))
1711
    {
1712
      /* If register alignment is being enforced for multi-word items in all
1713
         cases except for parameters, it is possible to have a register copy
1714
         insn referencing a hard register that is not allowed to contain the
1715
         mode being copied and which would not be valid as an operand of most
1716
         insns.  Eliminate this problem by not combining with such an insn.
1717
 
1718
         Also, on some machines we don't want to extend the life of a hard
1719
         register.  */
1720
 
1721
      if (REG_P (src)
1722
          && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
1723
               && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
1724
              /* Don't extend the life of a hard register unless it is
1725
                 user variable (if we have few registers) or it can't
1726
                 fit into the desired register (meaning something special
1727
                 is going on).
1728
                 Also avoid substituting a return register into I3, because
1729
                 reload can't handle a conflict with constraints of other
1730
                 inputs.  */
1731
              || (REGNO (src) < FIRST_PSEUDO_REGISTER
1732
                  && ! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src)))))
1733
        return 0;
1734
    }
1735
  else if (GET_CODE (dest) != CC0)
1736
    return 0;
1737
 
1738
 
1739
  if (GET_CODE (PATTERN (i3)) == PARALLEL)
1740
    for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
1741
      if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER)
1742
        {
1743
          /* Don't substitute for a register intended as a clobberable
1744
             operand.  */
1745
          rtx reg = XEXP (XVECEXP (PATTERN (i3), 0, i), 0);
1746
          if (rtx_equal_p (reg, dest))
1747
            return 0;
1748
 
1749
          /* If the clobber represents an earlyclobber operand, we must not
1750
             substitute an expression containing the clobbered register.
1751
             As we do not analyze the constraint strings here, we have to
1752
             make the conservative assumption.  However, if the register is
1753
             a fixed hard reg, the clobber cannot represent any operand;
1754
             we leave it up to the machine description to either accept or
1755
             reject use-and-clobber patterns.  */
1756
          if (!REG_P (reg)
1757
              || REGNO (reg) >= FIRST_PSEUDO_REGISTER
1758
              || !fixed_regs[REGNO (reg)])
1759
            if (reg_overlap_mentioned_p (reg, src))
1760
              return 0;
1761
        }
1762
 
1763
  /* If INSN contains anything volatile, or is an `asm' (whether volatile
1764
     or not), reject, unless nothing volatile comes between it and I3 */
1765
 
1766
  if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
1767
    {
1768
      /* Make sure succ doesn't contain a volatile reference.  */
1769
      if (succ != 0 && volatile_refs_p (PATTERN (succ)))
1770
        return 0;
1771
 
1772
      for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1773
        if (INSN_P (p) && p != succ && volatile_refs_p (PATTERN (p)))
1774
          return 0;
1775
    }
1776
 
1777
  /* If INSN is an asm, and DEST is a hard register, reject, since it has
1778
     to be an explicit register variable, and was chosen for a reason.  */
1779
 
1780
  if (GET_CODE (src) == ASM_OPERANDS
1781
      && REG_P (dest) && REGNO (dest) < FIRST_PSEUDO_REGISTER)
1782
    return 0;
1783
 
1784
  /* If there are any volatile insns between INSN and I3, reject, because
1785
     they might affect machine state.  */
1786
 
1787
  for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1788
    if (INSN_P (p) && p != succ && volatile_insn_p (PATTERN (p)))
1789
      return 0;
1790
 
1791
  /* If INSN contains an autoincrement or autodecrement, make sure that
1792
     register is not used between there and I3, and not already used in
1793
     I3 either.  Neither must it be used in PRED or SUCC, if they exist.
1794
     Also insist that I3 not be a jump; if it were one
1795
     and the incremented register were spilled, we would lose.  */
1796
 
1797
#ifdef AUTO_INC_DEC
1798
  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1799
    if (REG_NOTE_KIND (link) == REG_INC
1800
        && (JUMP_P (i3)
1801
            || reg_used_between_p (XEXP (link, 0), insn, i3)
1802
            || (pred != NULL_RTX
1803
                && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (pred)))
1804
            || (succ != NULL_RTX
1805
                && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (succ)))
1806
            || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
1807
      return 0;
1808
#endif
1809
 
1810
#ifdef HAVE_cc0
1811
  /* Don't combine an insn that follows a CC0-setting insn.
1812
     An insn that uses CC0 must not be separated from the one that sets it.
1813
     We do, however, allow I2 to follow a CC0-setting insn if that insn
1814
     is passed as I1; in that case it will be deleted also.
1815
     We also allow combining in this case if all the insns are adjacent
1816
     because that would leave the two CC0 insns adjacent as well.
1817
     It would be more logical to test whether CC0 occurs inside I1 or I2,
1818
     but that would be much slower, and this ought to be equivalent.  */
1819
 
1820
  p = prev_nonnote_insn (insn);
1821
  if (p && p != pred && NONJUMP_INSN_P (p) && sets_cc0_p (PATTERN (p))
1822
      && ! all_adjacent)
1823
    return 0;
1824
#endif
1825
 
1826
  /* If we get here, we have passed all the tests and the combination is
1827
     to be allowed.  */
1828
 
1829
  *pdest = dest;
1830
  *psrc = src;
1831
 
1832
  return 1;
1833
}
1834
 
1835
/* LOC is the location within I3 that contains its pattern or the component
1836
   of a PARALLEL of the pattern.  We validate that it is valid for combining.
1837
 
1838
   One problem is if I3 modifies its output, as opposed to replacing it
1839
   entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1840
   so would produce an insn that is not equivalent to the original insns.
1841
 
1842
   Consider:
1843
 
1844
         (set (reg:DI 101) (reg:DI 100))
1845
         (set (subreg:SI (reg:DI 101) 0) <foo>)
1846
 
1847
   This is NOT equivalent to:
1848
 
1849
         (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
1850
                    (set (reg:DI 101) (reg:DI 100))])
1851
 
1852
   Not only does this modify 100 (in which case it might still be valid
1853
   if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
1854
 
1855
   We can also run into a problem if I2 sets a register that I1
1856
   uses and I1 gets directly substituted into I3 (not via I2).  In that
1857
   case, we would be getting the wrong value of I2DEST into I3, so we
1858
   must reject the combination.  This case occurs when I2 and I1 both
1859
   feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1860
   If I1_NOT_IN_SRC is nonzero, it means that finding I1 in the source
1861
   of a SET must prevent combination from occurring.
1862
 
1863
   Before doing the above check, we first try to expand a field assignment
1864
   into a set of logical operations.
1865
 
1866
   If PI3_DEST_KILLED is nonzero, it is a pointer to a location in which
1867
   we place a register that is both set and used within I3.  If more than one
1868
   such register is detected, we fail.
1869
 
1870
   Return 1 if the combination is valid, zero otherwise.  */
1871
 
1872
static int
1873
combinable_i3pat (rtx i3, rtx *loc, rtx i2dest, rtx i1dest,
1874
                  int i1_not_in_src, rtx *pi3dest_killed)
1875
{
1876
  rtx x = *loc;
1877
 
1878
  if (GET_CODE (x) == SET)
1879
    {
1880
      rtx set = x ;
1881
      rtx dest = SET_DEST (set);
1882
      rtx src = SET_SRC (set);
1883
      rtx inner_dest = dest;
1884
      rtx subdest;
1885
 
1886
      while (GET_CODE (inner_dest) == STRICT_LOW_PART
1887
             || GET_CODE (inner_dest) == SUBREG
1888
             || GET_CODE (inner_dest) == ZERO_EXTRACT)
1889
        inner_dest = XEXP (inner_dest, 0);
1890
 
1891
      /* Check for the case where I3 modifies its output, as discussed
1892
         above.  We don't want to prevent pseudos from being combined
1893
         into the address of a MEM, so only prevent the combination if
1894
         i1 or i2 set the same MEM.  */
1895
      if ((inner_dest != dest &&
1896
           (!MEM_P (inner_dest)
1897
            || rtx_equal_p (i2dest, inner_dest)
1898
            || (i1dest && rtx_equal_p (i1dest, inner_dest)))
1899
           && (reg_overlap_mentioned_p (i2dest, inner_dest)
1900
               || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
1901
 
1902
          /* This is the same test done in can_combine_p except we can't test
1903
             all_adjacent; we don't have to, since this instruction will stay
1904
             in place, thus we are not considering increasing the lifetime of
1905
             INNER_DEST.
1906
 
1907
             Also, if this insn sets a function argument, combining it with
1908
             something that might need a spill could clobber a previous
1909
             function argument; the all_adjacent test in can_combine_p also
1910
             checks this; here, we do a more specific test for this case.  */
1911
 
1912
          || (REG_P (inner_dest)
1913
              && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1914
              && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1915
                                        GET_MODE (inner_dest))))
1916
          || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1917
        return 0;
1918
 
1919
      /* If DEST is used in I3, it is being killed in this insn, so
1920
         record that for later.  We have to consider paradoxical
1921
         subregs here, since they kill the whole register, but we
1922
         ignore partial subregs, STRICT_LOW_PART, etc.
1923
         Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1924
         STACK_POINTER_REGNUM, since these are always considered to be
1925
         live.  Similarly for ARG_POINTER_REGNUM if it is fixed.  */
1926
      subdest = dest;
1927
      if (GET_CODE (subdest) == SUBREG
1928
          && (GET_MODE_SIZE (GET_MODE (subdest))
1929
              >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (subdest)))))
1930
        subdest = SUBREG_REG (subdest);
1931
      if (pi3dest_killed
1932
          && REG_P (subdest)
1933
          && reg_referenced_p (subdest, PATTERN (i3))
1934
          && REGNO (subdest) != FRAME_POINTER_REGNUM
1935
#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1936
          && REGNO (subdest) != HARD_FRAME_POINTER_REGNUM
1937
#endif
1938
#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1939
          && (REGNO (subdest) != ARG_POINTER_REGNUM
1940
              || ! fixed_regs [REGNO (subdest)])
1941
#endif
1942
          && REGNO (subdest) != STACK_POINTER_REGNUM)
1943
        {
1944
          if (*pi3dest_killed)
1945
            return 0;
1946
 
1947
          *pi3dest_killed = subdest;
1948
        }
1949
    }
1950
 
1951
  else if (GET_CODE (x) == PARALLEL)
1952
    {
1953
      int i;
1954
 
1955
      for (i = 0; i < XVECLEN (x, 0); i++)
1956
        if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1957
                                i1_not_in_src, pi3dest_killed))
1958
          return 0;
1959
    }
1960
 
1961
  return 1;
1962
}
1963
 
1964
/* Return 1 if X is an arithmetic expression that contains a multiplication
1965
   and division.  We don't count multiplications by powers of two here.  */
1966
 
1967
static int
1968
contains_muldiv (rtx x)
1969
{
1970
  switch (GET_CODE (x))
1971
    {
1972
    case MOD:  case DIV:  case UMOD:  case UDIV:
1973
      return 1;
1974
 
1975
    case MULT:
1976
      return ! (CONST_INT_P (XEXP (x, 1))
1977
                && exact_log2 (INTVAL (XEXP (x, 1))) >= 0);
1978
    default:
1979
      if (BINARY_P (x))
1980
        return contains_muldiv (XEXP (x, 0))
1981
            || contains_muldiv (XEXP (x, 1));
1982
 
1983
      if (UNARY_P (x))
1984
        return contains_muldiv (XEXP (x, 0));
1985
 
1986
      return 0;
1987
    }
1988
}
1989
 
1990
/* Determine whether INSN can be used in a combination.  Return nonzero if
1991
   not.  This is used in try_combine to detect early some cases where we
1992
   can't perform combinations.  */
1993
 
1994
static int
1995
cant_combine_insn_p (rtx insn)
1996
{
1997
  rtx set;
1998
  rtx src, dest;
1999
 
2000
  /* If this isn't really an insn, we can't do anything.
2001
     This can occur when flow deletes an insn that it has merged into an
2002
     auto-increment address.  */
2003
  if (! INSN_P (insn))
2004
    return 1;
2005
 
2006
  /* Never combine loads and stores involving hard regs that are likely
2007
     to be spilled.  The register allocator can usually handle such
2008
     reg-reg moves by tying.  If we allow the combiner to make
2009
     substitutions of likely-spilled regs, reload might die.
2010
     As an exception, we allow combinations involving fixed regs; these are
2011
     not available to the register allocator so there's no risk involved.  */
2012
 
2013
  set = single_set (insn);
2014
  if (! set)
2015
    return 0;
2016
  src = SET_SRC (set);
2017
  dest = SET_DEST (set);
2018
  if (GET_CODE (src) == SUBREG)
2019
    src = SUBREG_REG (src);
2020
  if (GET_CODE (dest) == SUBREG)
2021
    dest = SUBREG_REG (dest);
2022
  if (REG_P (src) && REG_P (dest)
2023
      && ((REGNO (src) < FIRST_PSEUDO_REGISTER
2024
           && ! fixed_regs[REGNO (src)]
2025
           && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (src))))
2026
          || (REGNO (dest) < FIRST_PSEUDO_REGISTER
2027
              && ! fixed_regs[REGNO (dest)]
2028
              && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (dest))))))
2029
    return 1;
2030
 
2031
  return 0;
2032
}
2033
 
2034
struct likely_spilled_retval_info
2035
{
2036
  unsigned regno, nregs;
2037
  unsigned mask;
2038
};
2039
 
2040
/* Called via note_stores by likely_spilled_retval_p.  Remove from info->mask
2041
   hard registers that are known to be written to / clobbered in full.  */
2042
static void
2043
likely_spilled_retval_1 (rtx x, const_rtx set, void *data)
2044
{
2045
  struct likely_spilled_retval_info *const info =
2046
    (struct likely_spilled_retval_info *) data;
2047
  unsigned regno, nregs;
2048
  unsigned new_mask;
2049
 
2050
  if (!REG_P (XEXP (set, 0)))
2051
    return;
2052
  regno = REGNO (x);
2053
  if (regno >= info->regno + info->nregs)
2054
    return;
2055
  nregs = hard_regno_nregs[regno][GET_MODE (x)];
2056
  if (regno + nregs <= info->regno)
2057
    return;
2058
  new_mask = (2U << (nregs - 1)) - 1;
2059
  if (regno < info->regno)
2060
    new_mask >>= info->regno - regno;
2061
  else
2062
    new_mask <<= regno - info->regno;
2063
  info->mask &= ~new_mask;
2064
}
2065
 
2066
/* Return nonzero iff part of the return value is live during INSN, and
2067
   it is likely spilled.  This can happen when more than one insn is needed
2068
   to copy the return value, e.g. when we consider to combine into the
2069
   second copy insn for a complex value.  */
2070
 
2071
static int
2072
likely_spilled_retval_p (rtx insn)
2073
{
2074
  rtx use = BB_END (this_basic_block);
2075
  rtx reg, p;
2076
  unsigned regno, nregs;
2077
  /* We assume here that no machine mode needs more than
2078
     32 hard registers when the value overlaps with a register
2079
     for which FUNCTION_VALUE_REGNO_P is true.  */
2080
  unsigned mask;
2081
  struct likely_spilled_retval_info info;
2082
 
2083
  if (!NONJUMP_INSN_P (use) || GET_CODE (PATTERN (use)) != USE || insn == use)
2084
    return 0;
2085
  reg = XEXP (PATTERN (use), 0);
2086
  if (!REG_P (reg) || !FUNCTION_VALUE_REGNO_P (REGNO (reg)))
2087
    return 0;
2088
  regno = REGNO (reg);
2089
  nregs = hard_regno_nregs[regno][GET_MODE (reg)];
2090
  if (nregs == 1)
2091
    return 0;
2092
  mask = (2U << (nregs - 1)) - 1;
2093
 
2094
  /* Disregard parts of the return value that are set later.  */
2095
  info.regno = regno;
2096
  info.nregs = nregs;
2097
  info.mask = mask;
2098
  for (p = PREV_INSN (use); info.mask && p != insn; p = PREV_INSN (p))
2099
    if (INSN_P (p))
2100
      note_stores (PATTERN (p), likely_spilled_retval_1, &info);
2101
  mask = info.mask;
2102
 
2103
  /* Check if any of the (probably) live return value registers is
2104
     likely spilled.  */
2105
  nregs --;
2106
  do
2107
    {
2108
      if ((mask & 1 << nregs)
2109
          && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno + nregs)))
2110
        return 1;
2111
    } while (nregs--);
2112
  return 0;
2113
}
2114
 
2115
/* Adjust INSN after we made a change to its destination.
2116
 
2117
   Changing the destination can invalidate notes that say something about
2118
   the results of the insn and a LOG_LINK pointing to the insn.  */
2119
 
2120
static void
2121
adjust_for_new_dest (rtx insn)
2122
{
2123
  /* For notes, be conservative and simply remove them.  */
2124
  remove_reg_equal_equiv_notes (insn);
2125
 
2126
  /* The new insn will have a destination that was previously the destination
2127
     of an insn just above it.  Call distribute_links to make a LOG_LINK from
2128
     the next use of that destination.  */
2129
  distribute_links (gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX));
2130
 
2131
  df_insn_rescan (insn);
2132
}
2133
 
2134
/* Return TRUE if combine can reuse reg X in mode MODE.
2135
   ADDED_SETS is nonzero if the original set is still required.  */
2136
static bool
2137
can_change_dest_mode (rtx x, int added_sets, enum machine_mode mode)
2138
{
2139
  unsigned int regno;
2140
 
2141
  if (!REG_P(x))
2142
    return false;
2143
 
2144
  regno = REGNO (x);
2145
  /* Allow hard registers if the new mode is legal, and occupies no more
2146
     registers than the old mode.  */
2147
  if (regno < FIRST_PSEUDO_REGISTER)
2148
    return (HARD_REGNO_MODE_OK (regno, mode)
2149
            && (hard_regno_nregs[regno][GET_MODE (x)]
2150
                >= hard_regno_nregs[regno][mode]));
2151
 
2152
  /* Or a pseudo that is only used once.  */
2153
  return (REG_N_SETS (regno) == 1 && !added_sets
2154
          && !REG_USERVAR_P (x));
2155
}
2156
 
2157
 
2158
/* Check whether X, the destination of a set, refers to part of
2159
   the register specified by REG.  */
2160
 
2161
static bool
2162
reg_subword_p (rtx x, rtx reg)
2163
{
2164
  /* Check that reg is an integer mode register.  */
2165
  if (!REG_P (reg) || GET_MODE_CLASS (GET_MODE (reg)) != MODE_INT)
2166
    return false;
2167
 
2168
  if (GET_CODE (x) == STRICT_LOW_PART
2169
      || GET_CODE (x) == ZERO_EXTRACT)
2170
    x = XEXP (x, 0);
2171
 
2172
  return GET_CODE (x) == SUBREG
2173
         && SUBREG_REG (x) == reg
2174
         && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT;
2175
}
2176
 
2177
#ifdef AUTO_INC_DEC
2178
/* Replace auto-increment addressing modes with explicit operations to
2179
   access the same addresses without modifying the corresponding
2180
   registers.  If AFTER holds, SRC is meant to be reused after the
2181
   side effect, otherwise it is to be reused before that.  */
2182
 
2183
static rtx
2184
cleanup_auto_inc_dec (rtx src, bool after, enum machine_mode mem_mode)
2185
{
2186
  rtx x = src;
2187
  const RTX_CODE code = GET_CODE (x);
2188
  int i;
2189
  const char *fmt;
2190
 
2191
  switch (code)
2192
    {
2193
    case REG:
2194
    case CONST_INT:
2195
    case CONST_DOUBLE:
2196
    case CONST_FIXED:
2197
    case CONST_VECTOR:
2198
    case SYMBOL_REF:
2199
    case CODE_LABEL:
2200
    case PC:
2201
    case CC0:
2202
    case SCRATCH:
2203
      /* SCRATCH must be shared because they represent distinct values.  */
2204
      return x;
2205
    case CLOBBER:
2206
      if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2207
        return x;
2208
      break;
2209
 
2210
    case CONST:
2211
      if (shared_const_p (x))
2212
        return x;
2213
      break;
2214
 
2215
    case MEM:
2216
      mem_mode = GET_MODE (x);
2217
      break;
2218
 
2219
    case PRE_INC:
2220
    case PRE_DEC:
2221
    case POST_INC:
2222
    case POST_DEC:
2223
      gcc_assert (mem_mode != VOIDmode && mem_mode != BLKmode);
2224
      if (after == (code == PRE_INC || code == PRE_DEC))
2225
        x = cleanup_auto_inc_dec (XEXP (x, 0), after, mem_mode);
2226
      else
2227
        x = gen_rtx_PLUS (GET_MODE (x),
2228
                          cleanup_auto_inc_dec (XEXP (x, 0), after, mem_mode),
2229
                          GEN_INT ((code == PRE_INC || code == POST_INC)
2230
                                   ? GET_MODE_SIZE (mem_mode)
2231
                                   : -GET_MODE_SIZE (mem_mode)));
2232
      return x;
2233
 
2234
    case PRE_MODIFY:
2235
    case POST_MODIFY:
2236
      if (after == (code == PRE_MODIFY))
2237
        x = XEXP (x, 0);
2238
      else
2239
        x = XEXP (x, 1);
2240
      return cleanup_auto_inc_dec (x, after, mem_mode);
2241
 
2242
    default:
2243
      break;
2244
    }
2245
 
2246
  /* Copy the various flags, fields, and other information.  We assume
2247
     that all fields need copying, and then clear the fields that should
2248
     not be copied.  That is the sensible default behavior, and forces
2249
     us to explicitly document why we are *not* copying a flag.  */
2250
  x = shallow_copy_rtx (x);
2251
 
2252
  /* We do not copy the USED flag, which is used as a mark bit during
2253
     walks over the RTL.  */
2254
  RTX_FLAG (x, used) = 0;
2255
 
2256
  /* We do not copy FRAME_RELATED for INSNs.  */
2257
  if (INSN_P (x))
2258
    RTX_FLAG (x, frame_related) = 0;
2259
 
2260
  fmt = GET_RTX_FORMAT (code);
2261
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2262
    if (fmt[i] == 'e')
2263
      XEXP (x, i) = cleanup_auto_inc_dec (XEXP (x, i), after, mem_mode);
2264
    else if (fmt[i] == 'E' || fmt[i] == 'V')
2265
      {
2266
        int j;
2267
        XVEC (x, i) = rtvec_alloc (XVECLEN (x, i));
2268
        for (j = 0; j < XVECLEN (x, i); j++)
2269
          XVECEXP (x, i, j)
2270
            = cleanup_auto_inc_dec (XVECEXP (src, i, j), after, mem_mode);
2271
      }
2272
 
2273
  return x;
2274
}
2275
 
2276
/* Auxiliary data structure for propagate_for_debug_stmt.  */
2277
 
2278
struct rtx_subst_pair
2279
{
2280
  rtx to;
2281
  bool adjusted;
2282
  bool after;
2283
};
2284
 
2285
/* DATA points to an rtx_subst_pair.  Return the value that should be
2286
   substituted.  */
2287
 
2288
static rtx
2289
propagate_for_debug_subst (rtx from, const_rtx old_rtx, void *data)
2290
{
2291
  struct rtx_subst_pair *pair = (struct rtx_subst_pair *)data;
2292
 
2293
  if (!rtx_equal_p (from, old_rtx))
2294
    return NULL_RTX;
2295
  if (!pair->adjusted)
2296
    {
2297
      pair->adjusted = true;
2298
      pair->to = cleanup_auto_inc_dec (pair->to, pair->after, VOIDmode);
2299
      return pair->to;
2300
    }
2301
  return copy_rtx (pair->to);
2302
}
2303
#endif
2304
 
2305
/* Replace occurrences of DEST with SRC in DEBUG_INSNs between INSN
2306
   and LAST.  If MOVE holds, debug insns must also be moved past
2307
   LAST.  */
2308
 
2309
static void
2310
propagate_for_debug (rtx insn, rtx last, rtx dest, rtx src, bool move)
2311
{
2312
  rtx next, move_pos = move ? last : NULL_RTX, loc;
2313
 
2314
#ifdef AUTO_INC_DEC
2315
  struct rtx_subst_pair p;
2316
  p.to = src;
2317
  p.adjusted = false;
2318
  p.after = move;
2319
#endif
2320
 
2321
  next = NEXT_INSN (insn);
2322
  while (next != last)
2323
    {
2324
      insn = next;
2325
      next = NEXT_INSN (insn);
2326
      if (DEBUG_INSN_P (insn))
2327
        {
2328
#ifdef AUTO_INC_DEC
2329
          loc = simplify_replace_fn_rtx (INSN_VAR_LOCATION_LOC (insn),
2330
                                         dest, propagate_for_debug_subst, &p);
2331
#else
2332
          loc = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (insn), dest, src);
2333
#endif
2334
          if (loc == INSN_VAR_LOCATION_LOC (insn))
2335
            continue;
2336
          INSN_VAR_LOCATION_LOC (insn) = loc;
2337
          if (move_pos)
2338
            {
2339
              remove_insn (insn);
2340
              PREV_INSN (insn) = NEXT_INSN (insn) = NULL_RTX;
2341
              move_pos = emit_debug_insn_after (insn, move_pos);
2342
            }
2343
          else
2344
            df_insn_rescan (insn);
2345
        }
2346
    }
2347
}
2348
 
2349
/* Delete the unconditional jump INSN and adjust the CFG correspondingly.
2350
   Note that the INSN should be deleted *after* removing dead edges, so
2351
   that the kept edge is the fallthrough edge for a (set (pc) (pc))
2352
   but not for a (set (pc) (label_ref FOO)).  */
2353
 
2354
static void
2355
update_cfg_for_uncondjump (rtx insn)
2356
{
2357
  basic_block bb = BLOCK_FOR_INSN (insn);
2358
  bool at_end = (BB_END (bb) == insn);
2359
 
2360
  if (at_end)
2361
    purge_dead_edges (bb);
2362
 
2363
  delete_insn (insn);
2364
  if (at_end && EDGE_COUNT (bb->succs) == 1)
2365
    single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
2366
}
2367
 
2368
 
2369
/* Try to combine the insns I1 and I2 into I3.
2370
   Here I1 and I2 appear earlier than I3.
2371
   I1 can be zero; then we combine just I2 into I3.
2372
 
2373
   If we are combining three insns and the resulting insn is not recognized,
2374
   try splitting it into two insns.  If that happens, I2 and I3 are retained
2375
   and I1 is pseudo-deleted by turning it into a NOTE.  Otherwise, I1 and I2
2376
   are pseudo-deleted.
2377
 
2378
   Return 0 if the combination does not work.  Then nothing is changed.
2379
   If we did the combination, return the insn at which combine should
2380
   resume scanning.
2381
 
2382
   Set NEW_DIRECT_JUMP_P to a nonzero value if try_combine creates a
2383
   new direct jump instruction.  */
2384
 
2385
static rtx
2386
try_combine (rtx i3, rtx i2, rtx i1, int *new_direct_jump_p)
2387
{
2388
  /* New patterns for I3 and I2, respectively.  */
2389
  rtx newpat, newi2pat = 0;
2390
  rtvec newpat_vec_with_clobbers = 0;
2391
  int substed_i2 = 0, substed_i1 = 0;
2392
  /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead.  */
2393
  int added_sets_1, added_sets_2;
2394
  /* Total number of SETs to put into I3.  */
2395
  int total_sets;
2396
  /* Nonzero if I2's body now appears in I3.  */
2397
  int i2_is_used;
2398
  /* INSN_CODEs for new I3, new I2, and user of condition code.  */
2399
  int insn_code_number, i2_code_number = 0, other_code_number = 0;
2400
  /* Contains I3 if the destination of I3 is used in its source, which means
2401
     that the old life of I3 is being killed.  If that usage is placed into
2402
     I2 and not in I3, a REG_DEAD note must be made.  */
2403
  rtx i3dest_killed = 0;
2404
  /* SET_DEST and SET_SRC of I2 and I1.  */
2405
  rtx i2dest = 0, i2src = 0, i1dest = 0, i1src = 0;
2406
  /* Set if I2DEST was reused as a scratch register.  */
2407
  bool i2scratch = false;
2408
  /* PATTERN (I1) and PATTERN (I2), or a copy of it in certain cases.  */
2409
  rtx i1pat = 0, i2pat = 0;
2410
  /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC.  */
2411
  int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
2412
  int i2dest_killed = 0, i1dest_killed = 0;
2413
  int i1_feeds_i3 = 0;
2414
  /* Notes that must be added to REG_NOTES in I3 and I2.  */
2415
  rtx new_i3_notes, new_i2_notes;
2416
  /* Notes that we substituted I3 into I2 instead of the normal case.  */
2417
  int i3_subst_into_i2 = 0;
2418
  /* Notes that I1, I2 or I3 is a MULT operation.  */
2419
  int have_mult = 0;
2420
  int swap_i2i3 = 0;
2421
  int changed_i3_dest = 0;
2422
 
2423
  int maxreg;
2424
  rtx temp;
2425
  rtx link;
2426
  rtx other_pat = 0;
2427
  rtx new_other_notes;
2428
  int i;
2429
 
2430
  /* Exit early if one of the insns involved can't be used for
2431
     combinations.  */
2432
  if (cant_combine_insn_p (i3)
2433
      || cant_combine_insn_p (i2)
2434
      || (i1 && cant_combine_insn_p (i1))
2435
      || likely_spilled_retval_p (i3))
2436
    return 0;
2437
 
2438
  combine_attempts++;
2439
  undobuf.other_insn = 0;
2440
 
2441
  /* Reset the hard register usage information.  */
2442
  CLEAR_HARD_REG_SET (newpat_used_regs);
2443
 
2444
  if (dump_file && (dump_flags & TDF_DETAILS))
2445
    {
2446
      if (i1)
2447
        fprintf (dump_file, "\nTrying %d, %d -> %d:\n",
2448
                 INSN_UID (i1), INSN_UID (i2), INSN_UID (i3));
2449
      else
2450
        fprintf (dump_file, "\nTrying %d -> %d:\n",
2451
                 INSN_UID (i2), INSN_UID (i3));
2452
    }
2453
 
2454
  /* If I1 and I2 both feed I3, they can be in any order.  To simplify the
2455
     code below, set I1 to be the earlier of the two insns.  */
2456
  if (i1 && DF_INSN_LUID (i1) > DF_INSN_LUID (i2))
2457
    temp = i1, i1 = i2, i2 = temp;
2458
 
2459
  added_links_insn = 0;
2460
 
2461
  /* First check for one important special-case that the code below will
2462
     not handle.  Namely, the case where I1 is zero, I2 is a PARALLEL
2463
     and I3 is a SET whose SET_SRC is a SET_DEST in I2.  In that case,
2464
     we may be able to replace that destination with the destination of I3.
2465
     This occurs in the common code where we compute both a quotient and
2466
     remainder into a structure, in which case we want to do the computation
2467
     directly into the structure to avoid register-register copies.
2468
 
2469
     Note that this case handles both multiple sets in I2 and also
2470
     cases where I2 has a number of CLOBBER or PARALLELs.
2471
 
2472
     We make very conservative checks below and only try to handle the
2473
     most common cases of this.  For example, we only handle the case
2474
     where I2 and I3 are adjacent to avoid making difficult register
2475
     usage tests.  */
2476
 
2477
  if (i1 == 0 && NONJUMP_INSN_P (i3) && GET_CODE (PATTERN (i3)) == SET
2478
      && REG_P (SET_SRC (PATTERN (i3)))
2479
      && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
2480
      && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
2481
      && GET_CODE (PATTERN (i2)) == PARALLEL
2482
      && ! side_effects_p (SET_DEST (PATTERN (i3)))
2483
      /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
2484
         below would need to check what is inside (and reg_overlap_mentioned_p
2485
         doesn't support those codes anyway).  Don't allow those destinations;
2486
         the resulting insn isn't likely to be recognized anyway.  */
2487
      && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
2488
      && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
2489
      && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
2490
                                    SET_DEST (PATTERN (i3)))
2491
      && next_active_insn (i2) == i3)
2492
    {
2493
      rtx p2 = PATTERN (i2);
2494
 
2495
      /* Make sure that the destination of I3,
2496
         which we are going to substitute into one output of I2,
2497
         is not used within another output of I2.  We must avoid making this:
2498
         (parallel [(set (mem (reg 69)) ...)
2499
                    (set (reg 69) ...)])
2500
         which is not well-defined as to order of actions.
2501
         (Besides, reload can't handle output reloads for this.)
2502
 
2503
         The problem can also happen if the dest of I3 is a memory ref,
2504
         if another dest in I2 is an indirect memory ref.  */
2505
      for (i = 0; i < XVECLEN (p2, 0); i++)
2506
        if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
2507
             || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
2508
            && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
2509
                                        SET_DEST (XVECEXP (p2, 0, i))))
2510
          break;
2511
 
2512
      if (i == XVECLEN (p2, 0))
2513
        for (i = 0; i < XVECLEN (p2, 0); i++)
2514
          if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
2515
               || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
2516
              && SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
2517
            {
2518
              combine_merges++;
2519
 
2520
              subst_insn = i3;
2521
              subst_low_luid = DF_INSN_LUID (i2);
2522
 
2523
              added_sets_2 = added_sets_1 = 0;
2524
              i2src = SET_DEST (PATTERN (i3));
2525
              i2dest = SET_SRC (PATTERN (i3));
2526
              i2dest_killed = dead_or_set_p (i2, i2dest);
2527
 
2528
              /* Replace the dest in I2 with our dest and make the resulting
2529
                 insn the new pattern for I3.  Then skip to where we
2530
                 validate the pattern.  Everything was set up above.  */
2531
              SUBST (SET_DEST (XVECEXP (p2, 0, i)),
2532
                     SET_DEST (PATTERN (i3)));
2533
 
2534
              newpat = p2;
2535
              i3_subst_into_i2 = 1;
2536
              goto validate_replacement;
2537
            }
2538
    }
2539
 
2540
  /* If I2 is setting a pseudo to a constant and I3 is setting some
2541
     sub-part of it to another constant, merge them by making a new
2542
     constant.  */
2543
  if (i1 == 0
2544
      && (temp = single_set (i2)) != 0
2545
      && (CONST_INT_P (SET_SRC (temp))
2546
          || GET_CODE (SET_SRC (temp)) == CONST_DOUBLE)
2547
      && GET_CODE (PATTERN (i3)) == SET
2548
      && (CONST_INT_P (SET_SRC (PATTERN (i3)))
2549
          || GET_CODE (SET_SRC (PATTERN (i3))) == CONST_DOUBLE)
2550
      && reg_subword_p (SET_DEST (PATTERN (i3)), SET_DEST (temp)))
2551
    {
2552
      rtx dest = SET_DEST (PATTERN (i3));
2553
      int offset = -1;
2554
      int width = 0;
2555
 
2556
      if (GET_CODE (dest) == ZERO_EXTRACT)
2557
        {
2558
          if (CONST_INT_P (XEXP (dest, 1))
2559
              && CONST_INT_P (XEXP (dest, 2)))
2560
            {
2561
              width = INTVAL (XEXP (dest, 1));
2562
              offset = INTVAL (XEXP (dest, 2));
2563
              dest = XEXP (dest, 0);
2564
              if (BITS_BIG_ENDIAN)
2565
                offset = GET_MODE_BITSIZE (GET_MODE (dest)) - width - offset;
2566
            }
2567
        }
2568
      else
2569
        {
2570
          if (GET_CODE (dest) == STRICT_LOW_PART)
2571
            dest = XEXP (dest, 0);
2572
          width = GET_MODE_BITSIZE (GET_MODE (dest));
2573
          offset = 0;
2574
        }
2575
 
2576
      if (offset >= 0)
2577
        {
2578
          /* If this is the low part, we're done.  */
2579
          if (subreg_lowpart_p (dest))
2580
            ;
2581
          /* Handle the case where inner is twice the size of outer.  */
2582
          else if (GET_MODE_BITSIZE (GET_MODE (SET_DEST (temp)))
2583
                   == 2 * GET_MODE_BITSIZE (GET_MODE (dest)))
2584
            offset += GET_MODE_BITSIZE (GET_MODE (dest));
2585
          /* Otherwise give up for now.  */
2586
          else
2587
            offset = -1;
2588
        }
2589
 
2590
      if (offset >= 0
2591
          && (GET_MODE_BITSIZE (GET_MODE (SET_DEST (temp)))
2592
              <= HOST_BITS_PER_WIDE_INT * 2))
2593
        {
2594
          HOST_WIDE_INT mhi, ohi, ihi;
2595
          HOST_WIDE_INT mlo, olo, ilo;
2596
          rtx inner = SET_SRC (PATTERN (i3));
2597
          rtx outer = SET_SRC (temp);
2598
 
2599
          if (CONST_INT_P (outer))
2600
            {
2601
              olo = INTVAL (outer);
2602
              ohi = olo < 0 ? -1 : 0;
2603
            }
2604
          else
2605
            {
2606
              olo = CONST_DOUBLE_LOW (outer);
2607
              ohi = CONST_DOUBLE_HIGH (outer);
2608
            }
2609
 
2610
          if (CONST_INT_P (inner))
2611
            {
2612
              ilo = INTVAL (inner);
2613
              ihi = ilo < 0 ? -1 : 0;
2614
            }
2615
          else
2616
            {
2617
              ilo = CONST_DOUBLE_LOW (inner);
2618
              ihi = CONST_DOUBLE_HIGH (inner);
2619
            }
2620
 
2621
          if (width < HOST_BITS_PER_WIDE_INT)
2622
            {
2623
              mlo = ((unsigned HOST_WIDE_INT) 1 << width) - 1;
2624
              mhi = 0;
2625
            }
2626
          else if (width < HOST_BITS_PER_WIDE_INT * 2)
2627
            {
2628
              mhi = ((unsigned HOST_WIDE_INT) 1
2629
                     << (width - HOST_BITS_PER_WIDE_INT)) - 1;
2630
              mlo = -1;
2631
            }
2632
          else
2633
            {
2634
              mlo = -1;
2635
              mhi = -1;
2636
            }
2637
 
2638
          ilo &= mlo;
2639
          ihi &= mhi;
2640
 
2641
          if (offset >= HOST_BITS_PER_WIDE_INT)
2642
            {
2643
              mhi = mlo << (offset - HOST_BITS_PER_WIDE_INT);
2644
              mlo = 0;
2645
              ihi = ilo << (offset - HOST_BITS_PER_WIDE_INT);
2646
              ilo = 0;
2647
            }
2648
          else if (offset > 0)
2649
            {
2650
              mhi = (mhi << offset) | ((unsigned HOST_WIDE_INT) mlo
2651
                                       >> (HOST_BITS_PER_WIDE_INT - offset));
2652
              mlo = mlo << offset;
2653
              ihi = (ihi << offset) | ((unsigned HOST_WIDE_INT) ilo
2654
                                       >> (HOST_BITS_PER_WIDE_INT - offset));
2655
              ilo = ilo << offset;
2656
            }
2657
 
2658
          olo = (olo & ~mlo) | ilo;
2659
          ohi = (ohi & ~mhi) | ihi;
2660
 
2661
          combine_merges++;
2662
          subst_insn = i3;
2663
          subst_low_luid = DF_INSN_LUID (i2);
2664
          added_sets_2 = added_sets_1 = 0;
2665
          i2dest = SET_DEST (temp);
2666
          i2dest_killed = dead_or_set_p (i2, i2dest);
2667
 
2668
          /* Replace the source in I2 with the new constant and make the
2669
             resulting insn the new pattern for I3.  Then skip to where we
2670
             validate the pattern.  Everything was set up above.  */
2671
          SUBST (SET_SRC (temp),
2672
                 immed_double_const (olo, ohi, GET_MODE (SET_DEST (temp))));
2673
 
2674
          newpat = PATTERN (i2);
2675
 
2676
          /* The dest of I3 has been replaced with the dest of I2.  */
2677
          changed_i3_dest = 1;
2678
          goto validate_replacement;
2679
        }
2680
    }
2681
 
2682
#ifndef HAVE_cc0
2683
  /* If we have no I1 and I2 looks like:
2684
        (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
2685
                   (set Y OP)])
2686
     make up a dummy I1 that is
2687
        (set Y OP)
2688
     and change I2 to be
2689
        (set (reg:CC X) (compare:CC Y (const_int 0)))
2690
 
2691
     (We can ignore any trailing CLOBBERs.)
2692
 
2693
     This undoes a previous combination and allows us to match a branch-and-
2694
     decrement insn.  */
2695
 
2696
  if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
2697
      && XVECLEN (PATTERN (i2), 0) >= 2
2698
      && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
2699
      && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
2700
          == MODE_CC)
2701
      && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
2702
      && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
2703
      && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
2704
      && REG_P (SET_DEST (XVECEXP (PATTERN (i2), 0, 1)))
2705
      && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
2706
                      SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
2707
    {
2708
      for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
2709
        if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
2710
          break;
2711
 
2712
      if (i == 1)
2713
        {
2714
          /* We make I1 with the same INSN_UID as I2.  This gives it
2715
             the same DF_INSN_LUID for value tracking.  Our fake I1 will
2716
             never appear in the insn stream so giving it the same INSN_UID
2717
             as I2 will not cause a problem.  */
2718
 
2719
          i1 = gen_rtx_INSN (VOIDmode, INSN_UID (i2), NULL_RTX, i2,
2720
                             BLOCK_FOR_INSN (i2), INSN_LOCATOR (i2),
2721
                             XVECEXP (PATTERN (i2), 0, 1), -1, NULL_RTX);
2722
 
2723
          SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
2724
          SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
2725
                 SET_DEST (PATTERN (i1)));
2726
        }
2727
    }
2728
#endif
2729
 
2730
  /* Verify that I2 and I1 are valid for combining.  */
2731
  if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
2732
      || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
2733
    {
2734
      undo_all ();
2735
      return 0;
2736
    }
2737
 
2738
  /* Record whether I2DEST is used in I2SRC and similarly for the other
2739
     cases.  Knowing this will help in register status updating below.  */
2740
  i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
2741
  i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
2742
  i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
2743
  i2dest_killed = dead_or_set_p (i2, i2dest);
2744
  i1dest_killed = i1 && dead_or_set_p (i1, i1dest);
2745
 
2746
  /* See if I1 directly feeds into I3.  It does if I1DEST is not used
2747
     in I2SRC.  */
2748
  i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
2749
 
2750
  /* Ensure that I3's pattern can be the destination of combines.  */
2751
  if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
2752
                          i1 && i2dest_in_i1src && i1_feeds_i3,
2753
                          &i3dest_killed))
2754
    {
2755
      undo_all ();
2756
      return 0;
2757
    }
2758
 
2759
  /* See if any of the insns is a MULT operation.  Unless one is, we will
2760
     reject a combination that is, since it must be slower.  Be conservative
2761
     here.  */
2762
  if (GET_CODE (i2src) == MULT
2763
      || (i1 != 0 && GET_CODE (i1src) == MULT)
2764
      || (GET_CODE (PATTERN (i3)) == SET
2765
          && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
2766
    have_mult = 1;
2767
 
2768
  /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
2769
     We used to do this EXCEPT in one case: I3 has a post-inc in an
2770
     output operand.  However, that exception can give rise to insns like
2771
        mov r3,(r3)+
2772
     which is a famous insn on the PDP-11 where the value of r3 used as the
2773
     source was model-dependent.  Avoid this sort of thing.  */
2774
 
2775
#if 0
2776
  if (!(GET_CODE (PATTERN (i3)) == SET
2777
        && REG_P (SET_SRC (PATTERN (i3)))
2778
        && MEM_P (SET_DEST (PATTERN (i3)))
2779
        && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
2780
            || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
2781
    /* It's not the exception.  */
2782
#endif
2783
#ifdef AUTO_INC_DEC
2784
    for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
2785
      if (REG_NOTE_KIND (link) == REG_INC
2786
          && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
2787
              || (i1 != 0
2788
                  && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
2789
        {
2790
          undo_all ();
2791
          return 0;
2792
        }
2793
#endif
2794
 
2795
  /* See if the SETs in I1 or I2 need to be kept around in the merged
2796
     instruction: whenever the value set there is still needed past I3.
2797
     For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
2798
 
2799
     For the SET in I1, we have two cases:  If I1 and I2 independently
2800
     feed into I3, the set in I1 needs to be kept around if I1DEST dies
2801
     or is set in I3.  Otherwise (if I1 feeds I2 which feeds I3), the set
2802
     in I1 needs to be kept around unless I1DEST dies or is set in either
2803
     I2 or I3.  We can distinguish these cases by seeing if I2SRC mentions
2804
     I1DEST.  If so, we know I1 feeds into I2.  */
2805
 
2806
  added_sets_2 = ! dead_or_set_p (i3, i2dest);
2807
 
2808
  added_sets_1
2809
    = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
2810
               : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
2811
 
2812
  /* If the set in I2 needs to be kept around, we must make a copy of
2813
     PATTERN (I2), so that when we substitute I1SRC for I1DEST in
2814
     PATTERN (I2), we are only substituting for the original I1DEST, not into
2815
     an already-substituted copy.  This also prevents making self-referential
2816
     rtx.  If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
2817
     I2DEST.  */
2818
 
2819
  if (added_sets_2)
2820
    {
2821
      if (GET_CODE (PATTERN (i2)) == PARALLEL)
2822
        i2pat = gen_rtx_SET (VOIDmode, i2dest, copy_rtx (i2src));
2823
      else
2824
        i2pat = copy_rtx (PATTERN (i2));
2825
    }
2826
 
2827
  if (added_sets_1)
2828
    {
2829
      if (GET_CODE (PATTERN (i1)) == PARALLEL)
2830
        i1pat = gen_rtx_SET (VOIDmode, i1dest, copy_rtx (i1src));
2831
      else
2832
        i1pat = copy_rtx (PATTERN (i1));
2833
    }
2834
 
2835
  combine_merges++;
2836
 
2837
  /* Substitute in the latest insn for the regs set by the earlier ones.  */
2838
 
2839
  maxreg = max_reg_num ();
2840
 
2841
  subst_insn = i3;
2842
 
2843
#ifndef HAVE_cc0
2844
  /* Many machines that don't use CC0 have insns that can both perform an
2845
     arithmetic operation and set the condition code.  These operations will
2846
     be represented as a PARALLEL with the first element of the vector
2847
     being a COMPARE of an arithmetic operation with the constant zero.
2848
     The second element of the vector will set some pseudo to the result
2849
     of the same arithmetic operation.  If we simplify the COMPARE, we won't
2850
     match such a pattern and so will generate an extra insn.   Here we test
2851
     for this case, where both the comparison and the operation result are
2852
     needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
2853
     I2SRC.  Later we will make the PARALLEL that contains I2.  */
2854
 
2855
  if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
2856
      && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
2857
      && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
2858
      && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
2859
    {
2860
#ifdef SELECT_CC_MODE
2861
      rtx *cc_use;
2862
      enum machine_mode compare_mode;
2863
#endif
2864
 
2865
      newpat = PATTERN (i3);
2866
      SUBST (XEXP (SET_SRC (newpat), 0), i2src);
2867
 
2868
      i2_is_used = 1;
2869
 
2870
#ifdef SELECT_CC_MODE
2871
      /* See if a COMPARE with the operand we substituted in should be done
2872
         with the mode that is currently being used.  If not, do the same
2873
         processing we do in `subst' for a SET; namely, if the destination
2874
         is used only once, try to replace it with a register of the proper
2875
         mode and also replace the COMPARE.  */
2876
      if (undobuf.other_insn == 0
2877
          && (cc_use = find_single_use (SET_DEST (newpat), i3,
2878
                                        &undobuf.other_insn))
2879
          && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
2880
                                              i2src, const0_rtx))
2881
              != GET_MODE (SET_DEST (newpat))))
2882
        {
2883
          if (can_change_dest_mode(SET_DEST (newpat), added_sets_2,
2884
                                   compare_mode))
2885
            {
2886
              unsigned int regno = REGNO (SET_DEST (newpat));
2887
              rtx new_dest;
2888
 
2889
              if (regno < FIRST_PSEUDO_REGISTER)
2890
                new_dest = gen_rtx_REG (compare_mode, regno);
2891
              else
2892
                {
2893
                  SUBST_MODE (regno_reg_rtx[regno], compare_mode);
2894
                  new_dest = regno_reg_rtx[regno];
2895
                }
2896
 
2897
              SUBST (SET_DEST (newpat), new_dest);
2898
              SUBST (XEXP (*cc_use, 0), new_dest);
2899
              SUBST (SET_SRC (newpat),
2900
                     gen_rtx_COMPARE (compare_mode, i2src, const0_rtx));
2901
            }
2902
          else
2903
            undobuf.other_insn = 0;
2904
        }
2905
#endif
2906
    }
2907
  else
2908
#endif
2909
    {
2910
      /* It is possible that the source of I2 or I1 may be performing
2911
         an unneeded operation, such as a ZERO_EXTEND of something
2912
         that is known to have the high part zero.  Handle that case
2913
         by letting subst look at the innermost one of them.
2914
 
2915
         Another way to do this would be to have a function that tries
2916
         to simplify a single insn instead of merging two or more
2917
         insns.  We don't do this because of the potential of infinite
2918
         loops and because of the potential extra memory required.
2919
         However, doing it the way we are is a bit of a kludge and
2920
         doesn't catch all cases.
2921
 
2922
         But only do this if -fexpensive-optimizations since it slows
2923
         things down and doesn't usually win.
2924
 
2925
         This is not done in the COMPARE case above because the
2926
         unmodified I2PAT is used in the PARALLEL and so a pattern
2927
         with a modified I2SRC would not match.  */
2928
 
2929
      if (flag_expensive_optimizations)
2930
        {
2931
          /* Pass pc_rtx so no substitutions are done, just
2932
             simplifications.  */
2933
          if (i1)
2934
            {
2935
              subst_low_luid = DF_INSN_LUID (i1);
2936
              i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
2937
            }
2938
          else
2939
            {
2940
              subst_low_luid = DF_INSN_LUID (i2);
2941
              i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
2942
            }
2943
        }
2944
 
2945
      n_occurrences = 0;         /* `subst' counts here */
2946
 
2947
      /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
2948
         need to make a unique copy of I2SRC each time we substitute it
2949
         to avoid self-referential rtl.  */
2950
 
2951
      subst_low_luid = DF_INSN_LUID (i2);
2952
      newpat = subst (PATTERN (i3), i2dest, i2src, 0,
2953
                      ! i1_feeds_i3 && i1dest_in_i1src);
2954
      substed_i2 = 1;
2955
 
2956
      /* Record whether i2's body now appears within i3's body.  */
2957
      i2_is_used = n_occurrences;
2958
    }
2959
 
2960
  /* If we already got a failure, don't try to do more.  Otherwise,
2961
     try to substitute in I1 if we have it.  */
2962
 
2963
  if (i1 && GET_CODE (newpat) != CLOBBER)
2964
    {
2965
      /* Check that an autoincrement side-effect on I1 has not been lost.
2966
         This happens if I1DEST is mentioned in I2 and dies there, and
2967
         has disappeared from the new pattern.  */
2968
      if ((FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
2969
           && !i1_feeds_i3
2970
           && dead_or_set_p (i2, i1dest)
2971
           && !reg_overlap_mentioned_p (i1dest, newpat))
2972
          /* Before we can do this substitution, we must redo the test done
2973
             above (see detailed comments there) that ensures  that I1DEST
2974
             isn't mentioned in any SETs in NEWPAT that are field assignments.  */
2975
          || !combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX, 0, 0))
2976
        {
2977
          undo_all ();
2978
          return 0;
2979
        }
2980
 
2981
      n_occurrences = 0;
2982
      subst_low_luid = DF_INSN_LUID (i1);
2983
      newpat = subst (newpat, i1dest, i1src, 0, 0);
2984
      substed_i1 = 1;
2985
    }
2986
 
2987
  /* Fail if an autoincrement side-effect has been duplicated.  Be careful
2988
     to count all the ways that I2SRC and I1SRC can be used.  */
2989
  if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
2990
       && i2_is_used + added_sets_2 > 1)
2991
      || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
2992
          && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
2993
              > 1))
2994
      /* Fail if we tried to make a new register.  */
2995
      || max_reg_num () != maxreg
2996
      /* Fail if we couldn't do something and have a CLOBBER.  */
2997
      || GET_CODE (newpat) == CLOBBER
2998
      /* Fail if this new pattern is a MULT and we didn't have one before
2999
         at the outer level.  */
3000
      || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
3001
          && ! have_mult))
3002
    {
3003
      undo_all ();
3004
      return 0;
3005
    }
3006
 
3007
  /* If the actions of the earlier insns must be kept
3008
     in addition to substituting them into the latest one,
3009
     we must make a new PARALLEL for the latest insn
3010
     to hold additional the SETs.  */
3011
 
3012
  if (added_sets_1 || added_sets_2)
3013
    {
3014
      combine_extras++;
3015
 
3016
      if (GET_CODE (newpat) == PARALLEL)
3017
        {
3018
          rtvec old = XVEC (newpat, 0);
3019
          total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
3020
          newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
3021
          memcpy (XVEC (newpat, 0)->elem, &old->elem[0],
3022
                  sizeof (old->elem[0]) * old->num_elem);
3023
        }
3024
      else
3025
        {
3026
          rtx old = newpat;
3027
          total_sets = 1 + added_sets_1 + added_sets_2;
3028
          newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
3029
          XVECEXP (newpat, 0, 0) = old;
3030
        }
3031
 
3032
      if (added_sets_1)
3033
        XVECEXP (newpat, 0, --total_sets) = i1pat;
3034
 
3035
      if (added_sets_2)
3036
        {
3037
          /* If there is no I1, use I2's body as is.  We used to also not do
3038
             the subst call below if I2 was substituted into I3,
3039
             but that could lose a simplification.  */
3040
          if (i1 == 0)
3041
            XVECEXP (newpat, 0, --total_sets) = i2pat;
3042
          else
3043
            /* See comment where i2pat is assigned.  */
3044
            XVECEXP (newpat, 0, --total_sets)
3045
              = subst (i2pat, i1dest, i1src, 0, 0);
3046
        }
3047
    }
3048
 
3049
 validate_replacement:
3050
 
3051
  /* Note which hard regs this insn has as inputs.  */
3052
  mark_used_regs_combine (newpat);
3053
 
3054
  /* If recog_for_combine fails, it strips existing clobbers.  If we'll
3055
     consider splitting this pattern, we might need these clobbers.  */
3056
  if (i1 && GET_CODE (newpat) == PARALLEL
3057
      && GET_CODE (XVECEXP (newpat, 0, XVECLEN (newpat, 0) - 1)) == CLOBBER)
3058
    {
3059
      int len = XVECLEN (newpat, 0);
3060
 
3061
      newpat_vec_with_clobbers = rtvec_alloc (len);
3062
      for (i = 0; i < len; i++)
3063
        RTVEC_ELT (newpat_vec_with_clobbers, i) = XVECEXP (newpat, 0, i);
3064
    }
3065
 
3066
  /* Is the result of combination a valid instruction?  */
3067
  insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3068
 
3069
  /* If the result isn't valid, see if it is a PARALLEL of two SETs where
3070
     the second SET's destination is a register that is unused and isn't
3071
     marked as an instruction that might trap in an EH region.  In that case,
3072
     we just need the first SET.   This can occur when simplifying a divmod
3073
     insn.  We *must* test for this case here because the code below that
3074
     splits two independent SETs doesn't handle this case correctly when it
3075
     updates the register status.
3076
 
3077
     It's pointless doing this if we originally had two sets, one from
3078
     i3, and one from i2.  Combining then splitting the parallel results
3079
     in the original i2 again plus an invalid insn (which we delete).
3080
     The net effect is only to move instructions around, which makes
3081
     debug info less accurate.
3082
 
3083
     Also check the case where the first SET's destination is unused.
3084
     That would not cause incorrect code, but does cause an unneeded
3085
     insn to remain.  */
3086
 
3087
  if (insn_code_number < 0
3088
      && !(added_sets_2 && i1 == 0)
3089
      && GET_CODE (newpat) == PARALLEL
3090
      && XVECLEN (newpat, 0) == 2
3091
      && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
3092
      && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
3093
      && asm_noperands (newpat) < 0)
3094
    {
3095
      rtx set0 = XVECEXP (newpat, 0, 0);
3096
      rtx set1 = XVECEXP (newpat, 0, 1);
3097
 
3098
      if (((REG_P (SET_DEST (set1))
3099
            && find_reg_note (i3, REG_UNUSED, SET_DEST (set1)))
3100
           || (GET_CODE (SET_DEST (set1)) == SUBREG
3101
               && find_reg_note (i3, REG_UNUSED, SUBREG_REG (SET_DEST (set1)))))
3102
          && insn_nothrow_p (i3)
3103
          && !side_effects_p (SET_SRC (set1)))
3104
        {
3105
          newpat = set0;
3106
          insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3107
        }
3108
 
3109
      else if (((REG_P (SET_DEST (set0))
3110
                 && find_reg_note (i3, REG_UNUSED, SET_DEST (set0)))
3111
                || (GET_CODE (SET_DEST (set0)) == SUBREG
3112
                    && find_reg_note (i3, REG_UNUSED,
3113
                                      SUBREG_REG (SET_DEST (set0)))))
3114
               && insn_nothrow_p (i3)
3115
               && !side_effects_p (SET_SRC (set0)))
3116
        {
3117
          newpat = set1;
3118
          insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3119
 
3120
          if (insn_code_number >= 0)
3121
            changed_i3_dest = 1;
3122
        }
3123
    }
3124
 
3125
  /* If we were combining three insns and the result is a simple SET
3126
     with no ASM_OPERANDS that wasn't recognized, try to split it into two
3127
     insns.  There are two ways to do this.  It can be split using a
3128
     machine-specific method (like when you have an addition of a large
3129
     constant) or by combine in the function find_split_point.  */
3130
 
3131
  if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
3132
      && asm_noperands (newpat) < 0)
3133
    {
3134
      rtx parallel, m_split, *split;
3135
 
3136
      /* See if the MD file can split NEWPAT.  If it can't, see if letting it
3137
         use I2DEST as a scratch register will help.  In the latter case,
3138
         convert I2DEST to the mode of the source of NEWPAT if we can.  */
3139
 
3140
      m_split = combine_split_insns (newpat, i3);
3141
 
3142
      /* We can only use I2DEST as a scratch reg if it doesn't overlap any
3143
         inputs of NEWPAT.  */
3144
 
3145
      /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
3146
         possible to try that as a scratch reg.  This would require adding
3147
         more code to make it work though.  */
3148
 
3149
      if (m_split == 0 && ! reg_overlap_mentioned_p (i2dest, newpat))
3150
        {
3151
          enum machine_mode new_mode = GET_MODE (SET_DEST (newpat));
3152
 
3153
          /* First try to split using the original register as a
3154
             scratch register.  */
3155
          parallel = gen_rtx_PARALLEL (VOIDmode,
3156
                                       gen_rtvec (2, newpat,
3157
                                                  gen_rtx_CLOBBER (VOIDmode,
3158
                                                                   i2dest)));
3159
          m_split = combine_split_insns (parallel, i3);
3160
 
3161
          /* If that didn't work, try changing the mode of I2DEST if
3162
             we can.  */
3163
          if (m_split == 0
3164
              && new_mode != GET_MODE (i2dest)
3165
              && new_mode != VOIDmode
3166
              && can_change_dest_mode (i2dest, added_sets_2, new_mode))
3167
            {
3168
              enum machine_mode old_mode = GET_MODE (i2dest);
3169
              rtx ni2dest;
3170
 
3171
              if (REGNO (i2dest) < FIRST_PSEUDO_REGISTER)
3172
                ni2dest = gen_rtx_REG (new_mode, REGNO (i2dest));
3173
              else
3174
                {
3175
                  SUBST_MODE (regno_reg_rtx[REGNO (i2dest)], new_mode);
3176
                  ni2dest = regno_reg_rtx[REGNO (i2dest)];
3177
                }
3178
 
3179
              parallel = (gen_rtx_PARALLEL
3180
                          (VOIDmode,
3181
                           gen_rtvec (2, newpat,
3182
                                      gen_rtx_CLOBBER (VOIDmode,
3183
                                                       ni2dest))));
3184
              m_split = combine_split_insns (parallel, i3);
3185
 
3186
              if (m_split == 0
3187
                  && REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
3188
                {
3189
                  struct undo *buf;
3190
 
3191
                  adjust_reg_mode (regno_reg_rtx[REGNO (i2dest)], old_mode);
3192
                  buf = undobuf.undos;
3193
                  undobuf.undos = buf->next;
3194
                  buf->next = undobuf.frees;
3195
                  undobuf.frees = buf;
3196
                }
3197
            }
3198
 
3199
          i2scratch = m_split != 0;
3200
        }
3201
 
3202
      /* If recog_for_combine has discarded clobbers, try to use them
3203
         again for the split.  */
3204
      if (m_split == 0 && newpat_vec_with_clobbers)
3205
        {
3206
          parallel = gen_rtx_PARALLEL (VOIDmode, newpat_vec_with_clobbers);
3207
          m_split = combine_split_insns (parallel, i3);
3208
        }
3209
 
3210
      if (m_split && NEXT_INSN (m_split) == NULL_RTX)
3211
        {
3212
          m_split = PATTERN (m_split);
3213
          insn_code_number = recog_for_combine (&m_split, i3, &new_i3_notes);
3214
          if (insn_code_number >= 0)
3215
            newpat = m_split;
3216
        }
3217
      else if (m_split && NEXT_INSN (NEXT_INSN (m_split)) == NULL_RTX
3218
               && (next_real_insn (i2) == i3
3219
                   || ! use_crosses_set_p (PATTERN (m_split), DF_INSN_LUID (i2))))
3220
        {
3221
          rtx i2set, i3set;
3222
          rtx newi3pat = PATTERN (NEXT_INSN (m_split));
3223
          newi2pat = PATTERN (m_split);
3224
 
3225
          i3set = single_set (NEXT_INSN (m_split));
3226
          i2set = single_set (m_split);
3227
 
3228
          i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
3229
 
3230
          /* If I2 or I3 has multiple SETs, we won't know how to track
3231
             register status, so don't use these insns.  If I2's destination
3232
             is used between I2 and I3, we also can't use these insns.  */
3233
 
3234
          if (i2_code_number >= 0 && i2set && i3set
3235
              && (next_real_insn (i2) == i3
3236
                  || ! reg_used_between_p (SET_DEST (i2set), i2, i3)))
3237
            insn_code_number = recog_for_combine (&newi3pat, i3,
3238
                                                  &new_i3_notes);
3239
          if (insn_code_number >= 0)
3240
            newpat = newi3pat;
3241
 
3242
          /* It is possible that both insns now set the destination of I3.
3243
             If so, we must show an extra use of it.  */
3244
 
3245
          if (insn_code_number >= 0)
3246
            {
3247
              rtx new_i3_dest = SET_DEST (i3set);
3248
              rtx new_i2_dest = SET_DEST (i2set);
3249
 
3250
              while (GET_CODE (new_i3_dest) == ZERO_EXTRACT
3251
                     || GET_CODE (new_i3_dest) == STRICT_LOW_PART
3252
                     || GET_CODE (new_i3_dest) == SUBREG)
3253
                new_i3_dest = XEXP (new_i3_dest, 0);
3254
 
3255
              while (GET_CODE (new_i2_dest) == ZERO_EXTRACT
3256
                     || GET_CODE (new_i2_dest) == STRICT_LOW_PART
3257
                     || GET_CODE (new_i2_dest) == SUBREG)
3258
                new_i2_dest = XEXP (new_i2_dest, 0);
3259
 
3260
              if (REG_P (new_i3_dest)
3261
                  && REG_P (new_i2_dest)
3262
                  && REGNO (new_i3_dest) == REGNO (new_i2_dest))
3263
                INC_REG_N_SETS (REGNO (new_i2_dest), 1);
3264
            }
3265
        }
3266
 
3267
      /* If we can split it and use I2DEST, go ahead and see if that
3268
         helps things be recognized.  Verify that none of the registers
3269
         are set between I2 and I3.  */
3270
      if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
3271
#ifdef HAVE_cc0
3272
          && REG_P (i2dest)
3273
#endif
3274
          /* We need I2DEST in the proper mode.  If it is a hard register
3275
             or the only use of a pseudo, we can change its mode.
3276
             Make sure we don't change a hard register to have a mode that
3277
             isn't valid for it, or change the number of registers.  */
3278
          && (GET_MODE (*split) == GET_MODE (i2dest)
3279
              || GET_MODE (*split) == VOIDmode
3280
              || can_change_dest_mode (i2dest, added_sets_2,
3281
                                       GET_MODE (*split)))
3282
          && (next_real_insn (i2) == i3
3283
              || ! use_crosses_set_p (*split, DF_INSN_LUID (i2)))
3284
          /* We can't overwrite I2DEST if its value is still used by
3285
             NEWPAT.  */
3286
          && ! reg_referenced_p (i2dest, newpat))
3287
        {
3288
          rtx newdest = i2dest;
3289
          enum rtx_code split_code = GET_CODE (*split);
3290
          enum machine_mode split_mode = GET_MODE (*split);
3291
          bool subst_done = false;
3292
          newi2pat = NULL_RTX;
3293
 
3294
          i2scratch = true;
3295
 
3296
          /* Get NEWDEST as a register in the proper mode.  We have already
3297
             validated that we can do this.  */
3298
          if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
3299
            {
3300
              if (REGNO (i2dest) < FIRST_PSEUDO_REGISTER)
3301
                newdest = gen_rtx_REG (split_mode, REGNO (i2dest));
3302
              else
3303
                {
3304
                  SUBST_MODE (regno_reg_rtx[REGNO (i2dest)], split_mode);
3305
                  newdest = regno_reg_rtx[REGNO (i2dest)];
3306
                }
3307
            }
3308
 
3309
          /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
3310
             an ASHIFT.  This can occur if it was inside a PLUS and hence
3311
             appeared to be a memory address.  This is a kludge.  */
3312
          if (split_code == MULT
3313
              && CONST_INT_P (XEXP (*split, 1))
3314
              && INTVAL (XEXP (*split, 1)) > 0
3315
              && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
3316
            {
3317
              SUBST (*split, gen_rtx_ASHIFT (split_mode,
3318
                                             XEXP (*split, 0), GEN_INT (i)));
3319
              /* Update split_code because we may not have a multiply
3320
                 anymore.  */
3321
              split_code = GET_CODE (*split);
3322
            }
3323
 
3324
#ifdef INSN_SCHEDULING
3325
          /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
3326
             be written as a ZERO_EXTEND.  */
3327
          if (split_code == SUBREG && MEM_P (SUBREG_REG (*split)))
3328
            {
3329
#ifdef LOAD_EXTEND_OP
3330
              /* Or as a SIGN_EXTEND if LOAD_EXTEND_OP says that that's
3331
                 what it really is.  */
3332
              if (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (*split)))
3333
                  == SIGN_EXTEND)
3334
                SUBST (*split, gen_rtx_SIGN_EXTEND (split_mode,
3335
                                                    SUBREG_REG (*split)));
3336
              else
3337
#endif
3338
                SUBST (*split, gen_rtx_ZERO_EXTEND (split_mode,
3339
                                                    SUBREG_REG (*split)));
3340
            }
3341
#endif
3342
 
3343
          /* Attempt to split binary operators using arithmetic identities.  */
3344
          if (BINARY_P (SET_SRC (newpat))
3345
              && split_mode == GET_MODE (SET_SRC (newpat))
3346
              && ! side_effects_p (SET_SRC (newpat)))
3347
            {
3348
              rtx setsrc = SET_SRC (newpat);
3349
              enum machine_mode mode = GET_MODE (setsrc);
3350
              enum rtx_code code = GET_CODE (setsrc);
3351
              rtx src_op0 = XEXP (setsrc, 0);
3352
              rtx src_op1 = XEXP (setsrc, 1);
3353
 
3354
              /* Split "X = Y op Y" as "Z = Y; X = Z op Z".  */
3355
              if (rtx_equal_p (src_op0, src_op1))
3356
                {
3357
                  newi2pat = gen_rtx_SET (VOIDmode, newdest, src_op0);
3358
                  SUBST (XEXP (setsrc, 0), newdest);
3359
                  SUBST (XEXP (setsrc, 1), newdest);
3360
                  subst_done = true;
3361
                }
3362
              /* Split "((P op Q) op R) op S" where op is PLUS or MULT.  */
3363
              else if ((code == PLUS || code == MULT)
3364
                       && GET_CODE (src_op0) == code
3365
                       && GET_CODE (XEXP (src_op0, 0)) == code
3366
                       && (INTEGRAL_MODE_P (mode)
3367
                           || (FLOAT_MODE_P (mode)
3368
                               && flag_unsafe_math_optimizations)))
3369
                {
3370
                  rtx p = XEXP (XEXP (src_op0, 0), 0);
3371
                  rtx q = XEXP (XEXP (src_op0, 0), 1);
3372
                  rtx r = XEXP (src_op0, 1);
3373
                  rtx s = src_op1;
3374
 
3375
                  /* Split both "((X op Y) op X) op Y" and
3376
                     "((X op Y) op Y) op X" as "T op T" where T is
3377
                     "X op Y".  */
3378
                  if ((rtx_equal_p (p,r) && rtx_equal_p (q,s))
3379
                       || (rtx_equal_p (p,s) && rtx_equal_p (q,r)))
3380
                    {
3381
                      newi2pat = gen_rtx_SET (VOIDmode, newdest,
3382
                                              XEXP (src_op0, 0));
3383
                      SUBST (XEXP (setsrc, 0), newdest);
3384
                      SUBST (XEXP (setsrc, 1), newdest);
3385
                      subst_done = true;
3386
                    }
3387
                  /* Split "((X op X) op Y) op Y)" as "T op T" where
3388
                     T is "X op Y".  */
3389
                  else if (rtx_equal_p (p,q) && rtx_equal_p (r,s))
3390
                    {
3391
                      rtx tmp = simplify_gen_binary (code, mode, p, r);
3392
                      newi2pat = gen_rtx_SET (VOIDmode, newdest, tmp);
3393
                      SUBST (XEXP (setsrc, 0), newdest);
3394
                      SUBST (XEXP (setsrc, 1), newdest);
3395
                      subst_done = true;
3396
                    }
3397
                }
3398
            }
3399
 
3400
          if (!subst_done)
3401
            {
3402
              newi2pat = gen_rtx_SET (VOIDmode, newdest, *split);
3403
              SUBST (*split, newdest);
3404
            }
3405
 
3406
          i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
3407
 
3408
          /* recog_for_combine might have added CLOBBERs to newi2pat.
3409
             Make sure NEWPAT does not depend on the clobbered regs.  */
3410
          if (GET_CODE (newi2pat) == PARALLEL)
3411
            for (i = XVECLEN (newi2pat, 0) - 1; i >= 0; i--)
3412
              if (GET_CODE (XVECEXP (newi2pat, 0, i)) == CLOBBER)
3413
                {
3414
                  rtx reg = XEXP (XVECEXP (newi2pat, 0, i), 0);
3415
                  if (reg_overlap_mentioned_p (reg, newpat))
3416
                    {
3417
                      undo_all ();
3418
                      return 0;
3419
                    }
3420
                }
3421
 
3422
          /* If the split point was a MULT and we didn't have one before,
3423
             don't use one now.  */
3424
          if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
3425
            insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3426
        }
3427
    }
3428
 
3429
  /* Check for a case where we loaded from memory in a narrow mode and
3430
     then sign extended it, but we need both registers.  In that case,
3431
     we have a PARALLEL with both loads from the same memory location.
3432
     We can split this into a load from memory followed by a register-register
3433
     copy.  This saves at least one insn, more if register allocation can
3434
     eliminate the copy.
3435
 
3436
     We cannot do this if the destination of the first assignment is a
3437
     condition code register or cc0.  We eliminate this case by making sure
3438
     the SET_DEST and SET_SRC have the same mode.
3439
 
3440
     We cannot do this if the destination of the second assignment is
3441
     a register that we have already assumed is zero-extended.  Similarly
3442
     for a SUBREG of such a register.  */
3443
 
3444
  else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
3445
           && GET_CODE (newpat) == PARALLEL
3446
           && XVECLEN (newpat, 0) == 2
3447
           && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
3448
           && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
3449
           && (GET_MODE (SET_DEST (XVECEXP (newpat, 0, 0)))
3450
               == GET_MODE (SET_SRC (XVECEXP (newpat, 0, 0))))
3451
           && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
3452
           && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
3453
                           XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
3454
           && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
3455
                                   DF_INSN_LUID (i2))
3456
           && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
3457
           && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
3458
           && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
3459
                 (REG_P (temp)
3460
                  && VEC_index (reg_stat_type, reg_stat,
3461
                                REGNO (temp))->nonzero_bits != 0
3462
                  && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
3463
                  && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
3464
                  && (VEC_index (reg_stat_type, reg_stat,
3465
                                 REGNO (temp))->nonzero_bits
3466
                      != GET_MODE_MASK (word_mode))))
3467
           && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
3468
                 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
3469
                     (REG_P (temp)
3470
                      && VEC_index (reg_stat_type, reg_stat,
3471
                                    REGNO (temp))->nonzero_bits != 0
3472
                      && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
3473
                      && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
3474
                      && (VEC_index (reg_stat_type, reg_stat,
3475
                                     REGNO (temp))->nonzero_bits
3476
                          != GET_MODE_MASK (word_mode)))))
3477
           && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
3478
                                         SET_SRC (XVECEXP (newpat, 0, 1)))
3479
           && ! find_reg_note (i3, REG_UNUSED,
3480
                               SET_DEST (XVECEXP (newpat, 0, 0))))
3481
    {
3482
      rtx ni2dest;
3483
 
3484
      newi2pat = XVECEXP (newpat, 0, 0);
3485
      ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
3486
      newpat = XVECEXP (newpat, 0, 1);
3487
      SUBST (SET_SRC (newpat),
3488
             gen_lowpart (GET_MODE (SET_SRC (newpat)), ni2dest));
3489
      i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
3490
 
3491
      if (i2_code_number >= 0)
3492
        insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3493
 
3494
      if (insn_code_number >= 0)
3495
        swap_i2i3 = 1;
3496
    }
3497
 
3498
  /* Similarly, check for a case where we have a PARALLEL of two independent
3499
     SETs but we started with three insns.  In this case, we can do the sets
3500
     as two separate insns.  This case occurs when some SET allows two
3501
     other insns to combine, but the destination of that SET is still live.  */
3502
 
3503
  else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
3504
           && GET_CODE (newpat) == PARALLEL
3505
           && XVECLEN (newpat, 0) == 2
3506
           && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
3507
           && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
3508
           && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
3509
           && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
3510
           && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
3511
           && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
3512
           && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
3513
                                   DF_INSN_LUID (i2))
3514
           && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
3515
                                  XVECEXP (newpat, 0, 0))
3516
           && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
3517
                                  XVECEXP (newpat, 0, 1))
3518
           && ! (contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 0)))
3519
                 && contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 1))))
3520
#ifdef HAVE_cc0
3521
           /* We cannot split the parallel into two sets if both sets
3522
              reference cc0.  */
3523
           && ! (reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0))
3524
                 && reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 1)))
3525
#endif
3526
           )
3527
    {
3528
      /* Normally, it doesn't matter which of the two is done first,
3529
         but it does if one references cc0.  In that case, it has to
3530
         be first.  */
3531
#ifdef HAVE_cc0
3532
      if (reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0)))
3533
        {
3534
          newi2pat = XVECEXP (newpat, 0, 0);
3535
          newpat = XVECEXP (newpat, 0, 1);
3536
        }
3537
      else
3538
#endif
3539
        {
3540
          newi2pat = XVECEXP (newpat, 0, 1);
3541
          newpat = XVECEXP (newpat, 0, 0);
3542
        }
3543
 
3544
      i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
3545
 
3546
      if (i2_code_number >= 0)
3547
        insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3548
    }
3549
 
3550
  /* If it still isn't recognized, fail and change things back the way they
3551
     were.  */
3552
  if ((insn_code_number < 0
3553
       /* Is the result a reasonable ASM_OPERANDS?  */
3554
       && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
3555
    {
3556
      undo_all ();
3557
      return 0;
3558
    }
3559
 
3560
  /* If we had to change another insn, make sure it is valid also.  */
3561
  if (undobuf.other_insn)
3562
    {
3563
      CLEAR_HARD_REG_SET (newpat_used_regs);
3564
 
3565
      other_pat = PATTERN (undobuf.other_insn);
3566
      other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
3567
                                             &new_other_notes);
3568
 
3569
      if (other_code_number < 0 && ! check_asm_operands (other_pat))
3570
        {
3571
          undo_all ();
3572
          return 0;
3573
        }
3574
    }
3575
 
3576
#ifdef HAVE_cc0
3577
  /* If I2 is the CC0 setter and I3 is the CC0 user then check whether
3578
     they are adjacent to each other or not.  */
3579
  {
3580
    rtx p = prev_nonnote_insn (i3);
3581
    if (p && p != i2 && NONJUMP_INSN_P (p) && newi2pat
3582
        && sets_cc0_p (newi2pat))
3583
      {
3584
        undo_all ();
3585
        return 0;
3586
      }
3587
  }
3588
#endif
3589
 
3590
  /* Only allow this combination if insn_rtx_costs reports that the
3591
     replacement instructions are cheaper than the originals.  */
3592
  if (!combine_validate_cost (i1, i2, i3, newpat, newi2pat, other_pat))
3593
    {
3594
      undo_all ();
3595
      return 0;
3596
    }
3597
 
3598
  if (MAY_HAVE_DEBUG_INSNS)
3599
    {
3600
      struct undo *undo;
3601
 
3602
      for (undo = undobuf.undos; undo; undo = undo->next)
3603
        if (undo->kind == UNDO_MODE)
3604
          {
3605
            rtx reg = *undo->where.r;
3606
            enum machine_mode new_mode = GET_MODE (reg);
3607
            enum machine_mode old_mode = undo->old_contents.m;
3608
 
3609
            /* Temporarily revert mode back.  */
3610
            adjust_reg_mode (reg, old_mode);
3611
 
3612
            if (reg == i2dest && i2scratch)
3613
              {
3614
                /* If we used i2dest as a scratch register with a
3615
                   different mode, substitute it for the original
3616
                   i2src while its original mode is temporarily
3617
                   restored, and then clear i2scratch so that we don't
3618
                   do it again later.  */
3619
                propagate_for_debug (i2, i3, reg, i2src, false);
3620
                i2scratch = false;
3621
                /* Put back the new mode.  */
3622
                adjust_reg_mode (reg, new_mode);
3623
              }
3624
            else
3625
              {
3626
                rtx tempreg = gen_raw_REG (old_mode, REGNO (reg));
3627
                rtx first, last;
3628
 
3629
                if (reg == i2dest)
3630
                  {
3631
                    first = i2;
3632
                    last = i3;
3633
                  }
3634
                else
3635
                  {
3636
                    first = i3;
3637
                    last = undobuf.other_insn;
3638
                    gcc_assert (last);
3639
                  }
3640
 
3641
                /* We're dealing with a reg that changed mode but not
3642
                   meaning, so we want to turn it into a subreg for
3643
                   the new mode.  However, because of REG sharing and
3644
                   because its mode had already changed, we have to do
3645
                   it in two steps.  First, replace any debug uses of
3646
                   reg, with its original mode temporarily restored,
3647
                   with this copy we have created; then, replace the
3648
                   copy with the SUBREG of the original shared reg,
3649
                   once again changed to the new mode.  */
3650
                propagate_for_debug (first, last, reg, tempreg, false);
3651
                adjust_reg_mode (reg, new_mode);
3652
                propagate_for_debug (first, last, tempreg,
3653
                                     lowpart_subreg (old_mode, reg, new_mode),
3654
                                     false);
3655
              }
3656
          }
3657
    }
3658
 
3659
  /* If we will be able to accept this, we have made a
3660
     change to the destination of I3.  This requires us to
3661
     do a few adjustments.  */
3662
 
3663
  if (changed_i3_dest)
3664
    {
3665
      PATTERN (i3) = newpat;
3666
      adjust_for_new_dest (i3);
3667
    }
3668
 
3669
  /* We now know that we can do this combination.  Merge the insns and
3670
     update the status of registers and LOG_LINKS.  */
3671
 
3672
  if (undobuf.other_insn)
3673
    {
3674
      rtx note, next;
3675
 
3676
      PATTERN (undobuf.other_insn) = other_pat;
3677
 
3678
      /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
3679
         are still valid.  Then add any non-duplicate notes added by
3680
         recog_for_combine.  */
3681
      for (note = REG_NOTES (undobuf.other_insn); note; note = next)
3682
        {
3683
          next = XEXP (note, 1);
3684
 
3685
          if (REG_NOTE_KIND (note) == REG_UNUSED
3686
              && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
3687
            remove_note (undobuf.other_insn, note);
3688
        }
3689
 
3690
      distribute_notes (new_other_notes, undobuf.other_insn,
3691
                        undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
3692
    }
3693
 
3694
  if (swap_i2i3)
3695
    {
3696
      rtx insn;
3697
      rtx link;
3698
      rtx ni2dest;
3699
 
3700
      /* I3 now uses what used to be its destination and which is now
3701
         I2's destination.  This requires us to do a few adjustments.  */
3702
      PATTERN (i3) = newpat;
3703
      adjust_for_new_dest (i3);
3704
 
3705
      /* We need a LOG_LINK from I3 to I2.  But we used to have one,
3706
         so we still will.
3707
 
3708
         However, some later insn might be using I2's dest and have
3709
         a LOG_LINK pointing at I3.  We must remove this link.
3710
         The simplest way to remove the link is to point it at I1,
3711
         which we know will be a NOTE.  */
3712
 
3713
      /* newi2pat is usually a SET here; however, recog_for_combine might
3714
         have added some clobbers.  */
3715
      if (GET_CODE (newi2pat) == PARALLEL)
3716
        ni2dest = SET_DEST (XVECEXP (newi2pat, 0, 0));
3717
      else
3718
        ni2dest = SET_DEST (newi2pat);
3719
 
3720
      for (insn = NEXT_INSN (i3);
3721
           insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR
3722
                    || insn != BB_HEAD (this_basic_block->next_bb));
3723
           insn = NEXT_INSN (insn))
3724
        {
3725
          if (INSN_P (insn) && reg_referenced_p (ni2dest, PATTERN (insn)))
3726
            {
3727
              for (link = LOG_LINKS (insn); link;
3728
                   link = XEXP (link, 1))
3729
                if (XEXP (link, 0) == i3)
3730
                  XEXP (link, 0) = i1;
3731
 
3732
              break;
3733
            }
3734
        }
3735
    }
3736
 
3737
  {
3738
    rtx i3notes, i2notes, i1notes = 0;
3739
    rtx i3links, i2links, i1links = 0;
3740
    rtx midnotes = 0;
3741
    unsigned int regno;
3742
    /* Compute which registers we expect to eliminate.  newi2pat may be setting
3743
       either i3dest or i2dest, so we must check it.  Also, i1dest may be the
3744
       same as i3dest, in which case newi2pat may be setting i1dest.  */
3745
    rtx elim_i2 = ((newi2pat && reg_set_p (i2dest, newi2pat))
3746
                   || i2dest_in_i2src || i2dest_in_i1src
3747
                   || !i2dest_killed
3748
                   ? 0 : i2dest);
3749
    rtx elim_i1 = (i1 == 0 || i1dest_in_i1src
3750
                   || (newi2pat && reg_set_p (i1dest, newi2pat))
3751
                   || !i1dest_killed
3752
                   ? 0 : i1dest);
3753
 
3754
    /* Get the old REG_NOTES and LOG_LINKS from all our insns and
3755
       clear them.  */
3756
    i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
3757
    i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
3758
    if (i1)
3759
      i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
3760
 
3761
    /* Ensure that we do not have something that should not be shared but
3762
       occurs multiple times in the new insns.  Check this by first
3763
       resetting all the `used' flags and then copying anything is shared.  */
3764
 
3765
    reset_used_flags (i3notes);
3766
    reset_used_flags (i2notes);
3767
    reset_used_flags (i1notes);
3768
    reset_used_flags (newpat);
3769
    reset_used_flags (newi2pat);
3770
    if (undobuf.other_insn)
3771
      reset_used_flags (PATTERN (undobuf.other_insn));
3772
 
3773
    i3notes = copy_rtx_if_shared (i3notes);
3774
    i2notes = copy_rtx_if_shared (i2notes);
3775
    i1notes = copy_rtx_if_shared (i1notes);
3776
    newpat = copy_rtx_if_shared (newpat);
3777
    newi2pat = copy_rtx_if_shared (newi2pat);
3778
    if (undobuf.other_insn)
3779
      reset_used_flags (PATTERN (undobuf.other_insn));
3780
 
3781
    INSN_CODE (i3) = insn_code_number;
3782
    PATTERN (i3) = newpat;
3783
 
3784
    if (CALL_P (i3) && CALL_INSN_FUNCTION_USAGE (i3))
3785
      {
3786
        rtx call_usage = CALL_INSN_FUNCTION_USAGE (i3);
3787
 
3788
        reset_used_flags (call_usage);
3789
        call_usage = copy_rtx (call_usage);
3790
 
3791
        if (substed_i2)
3792
          replace_rtx (call_usage, i2dest, i2src);
3793
 
3794
        if (substed_i1)
3795
          replace_rtx (call_usage, i1dest, i1src);
3796
 
3797
        CALL_INSN_FUNCTION_USAGE (i3) = call_usage;
3798
      }
3799
 
3800
    if (undobuf.other_insn)
3801
      INSN_CODE (undobuf.other_insn) = other_code_number;
3802
 
3803
    /* We had one special case above where I2 had more than one set and
3804
       we replaced a destination of one of those sets with the destination
3805
       of I3.  In that case, we have to update LOG_LINKS of insns later
3806
       in this basic block.  Note that this (expensive) case is rare.
3807
 
3808
       Also, in this case, we must pretend that all REG_NOTEs for I2
3809
       actually came from I3, so that REG_UNUSED notes from I2 will be
3810
       properly handled.  */
3811
 
3812
    if (i3_subst_into_i2)
3813
      {
3814
        for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
3815
          if ((GET_CODE (XVECEXP (PATTERN (i2), 0, i)) == SET
3816
               || GET_CODE (XVECEXP (PATTERN (i2), 0, i)) == CLOBBER)
3817
              && REG_P (SET_DEST (XVECEXP (PATTERN (i2), 0, i)))
3818
              && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
3819
              && ! find_reg_note (i2, REG_UNUSED,
3820
                                  SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
3821
            for (temp = NEXT_INSN (i2);
3822
                 temp && (this_basic_block->next_bb == EXIT_BLOCK_PTR
3823
                          || BB_HEAD (this_basic_block) != temp);
3824
                 temp = NEXT_INSN (temp))
3825
              if (temp != i3 && INSN_P (temp))
3826
                for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
3827
                  if (XEXP (link, 0) == i2)
3828
                    XEXP (link, 0) = i3;
3829
 
3830
        if (i3notes)
3831
          {
3832
            rtx link = i3notes;
3833
            while (XEXP (link, 1))
3834
              link = XEXP (link, 1);
3835
            XEXP (link, 1) = i2notes;
3836
          }
3837
        else
3838
          i3notes = i2notes;
3839
        i2notes = 0;
3840
      }
3841
 
3842
    LOG_LINKS (i3) = 0;
3843
    REG_NOTES (i3) = 0;
3844
    LOG_LINKS (i2) = 0;
3845
    REG_NOTES (i2) = 0;
3846
 
3847
    if (newi2pat)
3848
      {
3849
        if (MAY_HAVE_DEBUG_INSNS && i2scratch)
3850
          propagate_for_debug (i2, i3, i2dest, i2src, false);
3851
        INSN_CODE (i2) = i2_code_number;
3852
        PATTERN (i2) = newi2pat;
3853
      }
3854
    else
3855
      {
3856
        if (MAY_HAVE_DEBUG_INSNS && i2src)
3857
          propagate_for_debug (i2, i3, i2dest, i2src, i3_subst_into_i2);
3858
        SET_INSN_DELETED (i2);
3859
      }
3860
 
3861
    if (i1)
3862
      {
3863
        LOG_LINKS (i1) = 0;
3864
        REG_NOTES (i1) = 0;
3865
        if (MAY_HAVE_DEBUG_INSNS)
3866
          propagate_for_debug (i1, i3, i1dest, i1src, false);
3867
        SET_INSN_DELETED (i1);
3868
      }
3869
 
3870
    /* Get death notes for everything that is now used in either I3 or
3871
       I2 and used to die in a previous insn.  If we built two new
3872
       patterns, move from I1 to I2 then I2 to I3 so that we get the
3873
       proper movement on registers that I2 modifies.  */
3874
 
3875
    if (newi2pat)
3876
      {
3877
        move_deaths (newi2pat, NULL_RTX, DF_INSN_LUID (i1), i2, &midnotes);
3878
        move_deaths (newpat, newi2pat, DF_INSN_LUID (i1), i3, &midnotes);
3879
      }
3880
    else
3881
      move_deaths (newpat, NULL_RTX, i1 ? DF_INSN_LUID (i1) : DF_INSN_LUID (i2),
3882
                   i3, &midnotes);
3883
 
3884
    /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3.  */
3885
    if (i3notes)
3886
      distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
3887
                        elim_i2, elim_i1);
3888
    if (i2notes)
3889
      distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
3890
                        elim_i2, elim_i1);
3891
    if (i1notes)
3892
      distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
3893
                        elim_i2, elim_i1);
3894
    if (midnotes)
3895
      distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
3896
                        elim_i2, elim_i1);
3897
 
3898
    /* Distribute any notes added to I2 or I3 by recog_for_combine.  We
3899
       know these are REG_UNUSED and want them to go to the desired insn,
3900
       so we always pass it as i3.  */
3901
 
3902
    if (newi2pat && new_i2_notes)
3903
      distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
3904
 
3905
    if (new_i3_notes)
3906
      distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
3907
 
3908
    /* If I3DEST was used in I3SRC, it really died in I3.  We may need to
3909
       put a REG_DEAD note for it somewhere.  If NEWI2PAT exists and sets
3910
       I3DEST, the death must be somewhere before I2, not I3.  If we passed I3
3911
       in that case, it might delete I2.  Similarly for I2 and I1.
3912
       Show an additional death due to the REG_DEAD note we make here.  If
3913
       we discard it in distribute_notes, we will decrement it again.  */
3914
 
3915
    if (i3dest_killed)
3916
      {
3917
        if (newi2pat && reg_set_p (i3dest_killed, newi2pat))
3918
          distribute_notes (alloc_reg_note (REG_DEAD, i3dest_killed,
3919
                                            NULL_RTX),
3920
                            NULL_RTX, i2, NULL_RTX, elim_i2, elim_i1);
3921
        else
3922
          distribute_notes (alloc_reg_note (REG_DEAD, i3dest_killed,
3923
                                            NULL_RTX),
3924
                            NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
3925
                            elim_i2, elim_i1);
3926
      }
3927
 
3928
    if (i2dest_in_i2src)
3929
      {
3930
        if (newi2pat && reg_set_p (i2dest, newi2pat))
3931
          distribute_notes (alloc_reg_note (REG_DEAD, i2dest, NULL_RTX),
3932
                            NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
3933
        else
3934
          distribute_notes (alloc_reg_note (REG_DEAD, i2dest, NULL_RTX),
3935
                            NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
3936
                            NULL_RTX, NULL_RTX);
3937
      }
3938
 
3939
    if (i1dest_in_i1src)
3940
      {
3941
        if (newi2pat && reg_set_p (i1dest, newi2pat))
3942
          distribute_notes (alloc_reg_note (REG_DEAD, i1dest, NULL_RTX),
3943
                            NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
3944
        else
3945
          distribute_notes (alloc_reg_note (REG_DEAD, i1dest, NULL_RTX),
3946
                            NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
3947
                            NULL_RTX, NULL_RTX);
3948
      }
3949
 
3950
    distribute_links (i3links);
3951
    distribute_links (i2links);
3952
    distribute_links (i1links);
3953
 
3954
    if (REG_P (i2dest))
3955
      {
3956
        rtx link;
3957
        rtx i2_insn = 0, i2_val = 0, set;
3958
 
3959
        /* The insn that used to set this register doesn't exist, and
3960
           this life of the register may not exist either.  See if one of
3961
           I3's links points to an insn that sets I2DEST.  If it does,
3962
           that is now the last known value for I2DEST. If we don't update
3963
           this and I2 set the register to a value that depended on its old
3964
           contents, we will get confused.  If this insn is used, thing
3965
           will be set correctly in combine_instructions.  */
3966
 
3967
        for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
3968
          if ((set = single_set (XEXP (link, 0))) != 0
3969
              && rtx_equal_p (i2dest, SET_DEST (set)))
3970
            i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
3971
 
3972
        record_value_for_reg (i2dest, i2_insn, i2_val);
3973
 
3974
        /* If the reg formerly set in I2 died only once and that was in I3,
3975
           zero its use count so it won't make `reload' do any work.  */
3976
        if (! added_sets_2
3977
            && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat))
3978
            && ! i2dest_in_i2src)
3979
          {
3980
            regno = REGNO (i2dest);
3981
            INC_REG_N_SETS (regno, -1);
3982
          }
3983
      }
3984
 
3985
    if (i1 && REG_P (i1dest))
3986
      {
3987
        rtx link;
3988
        rtx i1_insn = 0, i1_val = 0, set;
3989
 
3990
        for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
3991
          if ((set = single_set (XEXP (link, 0))) != 0
3992
              && rtx_equal_p (i1dest, SET_DEST (set)))
3993
            i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
3994
 
3995
        record_value_for_reg (i1dest, i1_insn, i1_val);
3996
 
3997
        regno = REGNO (i1dest);
3998
        if (! added_sets_1 && ! i1dest_in_i1src)
3999
          INC_REG_N_SETS (regno, -1);
4000
      }
4001
 
4002
    /* Update reg_stat[].nonzero_bits et al for any changes that may have
4003
       been made to this insn.  The order of
4004
       set_nonzero_bits_and_sign_copies() is important.  Because newi2pat
4005
       can affect nonzero_bits of newpat */
4006
    if (newi2pat)
4007
      note_stores (newi2pat, set_nonzero_bits_and_sign_copies, NULL);
4008
    note_stores (newpat, set_nonzero_bits_and_sign_copies, NULL);
4009
  }
4010
 
4011
  if (undobuf.other_insn != NULL_RTX)
4012
    {
4013
      if (dump_file)
4014
        {
4015
          fprintf (dump_file, "modifying other_insn ");
4016
          dump_insn_slim (dump_file, undobuf.other_insn);
4017
        }
4018
      df_insn_rescan (undobuf.other_insn);
4019
    }
4020
 
4021
  if (i1 && !(NOTE_P(i1) && (NOTE_KIND (i1) == NOTE_INSN_DELETED)))
4022
    {
4023
      if (dump_file)
4024
        {
4025
          fprintf (dump_file, "modifying insn i1 ");
4026
          dump_insn_slim (dump_file, i1);
4027
        }
4028
      df_insn_rescan (i1);
4029
    }
4030
 
4031
  if (i2 && !(NOTE_P(i2) && (NOTE_KIND (i2) == NOTE_INSN_DELETED)))
4032
    {
4033
      if (dump_file)
4034
        {
4035
          fprintf (dump_file, "modifying insn i2 ");
4036
          dump_insn_slim (dump_file, i2);
4037
        }
4038
      df_insn_rescan (i2);
4039
    }
4040
 
4041
  if (i3 && !(NOTE_P(i3) && (NOTE_KIND (i3) == NOTE_INSN_DELETED)))
4042
    {
4043
      if (dump_file)
4044
        {
4045
          fprintf (dump_file, "modifying insn i3 ");
4046
          dump_insn_slim (dump_file, i3);
4047
        }
4048
      df_insn_rescan (i3);
4049
    }
4050
 
4051
  /* Set new_direct_jump_p if a new return or simple jump instruction
4052
     has been created.  Adjust the CFG accordingly.  */
4053
 
4054
  if (returnjump_p (i3) || any_uncondjump_p (i3))
4055
    {
4056
      *new_direct_jump_p = 1;
4057
      mark_jump_label (PATTERN (i3), i3, 0);
4058
      update_cfg_for_uncondjump (i3);
4059
    }
4060
 
4061
  if (undobuf.other_insn != NULL_RTX
4062
      && (returnjump_p (undobuf.other_insn)
4063
          || any_uncondjump_p (undobuf.other_insn)))
4064
    {
4065
      *new_direct_jump_p = 1;
4066
      update_cfg_for_uncondjump (undobuf.other_insn);
4067
    }
4068
 
4069
  /* A noop might also need cleaning up of CFG, if it comes from the
4070
     simplification of a jump.  */
4071
  if (GET_CODE (newpat) == SET
4072
      && SET_SRC (newpat) == pc_rtx
4073
      && SET_DEST (newpat) == pc_rtx)
4074
    {
4075
      *new_direct_jump_p = 1;
4076
      update_cfg_for_uncondjump (i3);
4077
    }
4078
 
4079
  combine_successes++;
4080
  undo_commit ();
4081
 
4082
  if (added_links_insn
4083
      && (newi2pat == 0 || DF_INSN_LUID (added_links_insn) < DF_INSN_LUID (i2))
4084
      && DF_INSN_LUID (added_links_insn) < DF_INSN_LUID (i3))
4085
    return added_links_insn;
4086
  else
4087
    return newi2pat ? i2 : i3;
4088
}
4089
 
4090
/* Undo all the modifications recorded in undobuf.  */
4091
 
4092
static void
4093
undo_all (void)
4094
{
4095
  struct undo *undo, *next;
4096
 
4097
  for (undo = undobuf.undos; undo; undo = next)
4098
    {
4099
      next = undo->next;
4100
      switch (undo->kind)
4101
        {
4102
        case UNDO_RTX:
4103
          *undo->where.r = undo->old_contents.r;
4104
          break;
4105
        case UNDO_INT:
4106
          *undo->where.i = undo->old_contents.i;
4107
          break;
4108
        case UNDO_MODE:
4109
          adjust_reg_mode (*undo->where.r, undo->old_contents.m);
4110
          break;
4111
        default:
4112
          gcc_unreachable ();
4113
        }
4114
 
4115
      undo->next = undobuf.frees;
4116
      undobuf.frees = undo;
4117
    }
4118
 
4119
  undobuf.undos = 0;
4120
}
4121
 
4122
/* We've committed to accepting the changes we made.  Move all
4123
   of the undos to the free list.  */
4124
 
4125
static void
4126
undo_commit (void)
4127
{
4128
  struct undo *undo, *next;
4129
 
4130
  for (undo = undobuf.undos; undo; undo = next)
4131
    {
4132
      next = undo->next;
4133
      undo->next = undobuf.frees;
4134
      undobuf.frees = undo;
4135
    }
4136
  undobuf.undos = 0;
4137
}
4138
 
4139
/* Find the innermost point within the rtx at LOC, possibly LOC itself,
4140
   where we have an arithmetic expression and return that point.  LOC will
4141
   be inside INSN.
4142
 
4143
   try_combine will call this function to see if an insn can be split into
4144
   two insns.  */
4145
 
4146
static rtx *
4147
find_split_point (rtx *loc, rtx insn)
4148
{
4149
  rtx x = *loc;
4150
  enum rtx_code code = GET_CODE (x);
4151
  rtx *split;
4152
  unsigned HOST_WIDE_INT len = 0;
4153
  HOST_WIDE_INT pos = 0;
4154
  int unsignedp = 0;
4155
  rtx inner = NULL_RTX;
4156
 
4157
  /* First special-case some codes.  */
4158
  switch (code)
4159
    {
4160
    case SUBREG:
4161
#ifdef INSN_SCHEDULING
4162
      /* If we are making a paradoxical SUBREG invalid, it becomes a split
4163
         point.  */
4164
      if (MEM_P (SUBREG_REG (x)))
4165
        return loc;
4166
#endif
4167
      return find_split_point (&SUBREG_REG (x), insn);
4168
 
4169
    case MEM:
4170
#ifdef HAVE_lo_sum
4171
      /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
4172
         using LO_SUM and HIGH.  */
4173
      if (GET_CODE (XEXP (x, 0)) == CONST
4174
          || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
4175
        {
4176
          enum machine_mode address_mode
4177
            = targetm.addr_space.address_mode (MEM_ADDR_SPACE (x));
4178
 
4179
          SUBST (XEXP (x, 0),
4180
                 gen_rtx_LO_SUM (address_mode,
4181
                                 gen_rtx_HIGH (address_mode, XEXP (x, 0)),
4182
                                 XEXP (x, 0)));
4183
          return &XEXP (XEXP (x, 0), 0);
4184
        }
4185
#endif
4186
 
4187
      /* If we have a PLUS whose second operand is a constant and the
4188
         address is not valid, perhaps will can split it up using
4189
         the machine-specific way to split large constants.  We use
4190
         the first pseudo-reg (one of the virtual regs) as a placeholder;
4191
         it will not remain in the result.  */
4192
      if (GET_CODE (XEXP (x, 0)) == PLUS
4193
          && CONST_INT_P (XEXP (XEXP (x, 0), 1))
4194
          && ! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
4195
                                            MEM_ADDR_SPACE (x)))
4196
        {
4197
          rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
4198
          rtx seq = combine_split_insns (gen_rtx_SET (VOIDmode, reg,
4199
                                                      XEXP (x, 0)),
4200
                                         subst_insn);
4201
 
4202
          /* This should have produced two insns, each of which sets our
4203
             placeholder.  If the source of the second is a valid address,
4204
             we can make put both sources together and make a split point
4205
             in the middle.  */
4206
 
4207
          if (seq
4208
              && NEXT_INSN (seq) != NULL_RTX
4209
              && NEXT_INSN (NEXT_INSN (seq)) == NULL_RTX
4210
              && NONJUMP_INSN_P (seq)
4211
              && GET_CODE (PATTERN (seq)) == SET
4212
              && SET_DEST (PATTERN (seq)) == reg
4213
              && ! reg_mentioned_p (reg,
4214
                                    SET_SRC (PATTERN (seq)))
4215
              && NONJUMP_INSN_P (NEXT_INSN (seq))
4216
              && GET_CODE (PATTERN (NEXT_INSN (seq))) == SET
4217
              && SET_DEST (PATTERN (NEXT_INSN (seq))) == reg
4218
              && memory_address_addr_space_p
4219
                   (GET_MODE (x), SET_SRC (PATTERN (NEXT_INSN (seq))),
4220
                    MEM_ADDR_SPACE (x)))
4221
            {
4222
              rtx src1 = SET_SRC (PATTERN (seq));
4223
              rtx src2 = SET_SRC (PATTERN (NEXT_INSN (seq)));
4224
 
4225
              /* Replace the placeholder in SRC2 with SRC1.  If we can
4226
                 find where in SRC2 it was placed, that can become our
4227
                 split point and we can replace this address with SRC2.
4228
                 Just try two obvious places.  */
4229
 
4230
              src2 = replace_rtx (src2, reg, src1);
4231
              split = 0;
4232
              if (XEXP (src2, 0) == src1)
4233
                split = &XEXP (src2, 0);
4234
              else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
4235
                       && XEXP (XEXP (src2, 0), 0) == src1)
4236
                split = &XEXP (XEXP (src2, 0), 0);
4237
 
4238
              if (split)
4239
                {
4240
                  SUBST (XEXP (x, 0), src2);
4241
                  return split;
4242
                }
4243
            }
4244
 
4245
          /* If that didn't work, perhaps the first operand is complex and
4246
             needs to be computed separately, so make a split point there.
4247
             This will occur on machines that just support REG + CONST
4248
             and have a constant moved through some previous computation.  */
4249
 
4250
          else if (!OBJECT_P (XEXP (XEXP (x, 0), 0))
4251
                   && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
4252
                         && OBJECT_P (SUBREG_REG (XEXP (XEXP (x, 0), 0)))))
4253
            return &XEXP (XEXP (x, 0), 0);
4254
        }
4255
 
4256
      /* If we have a PLUS whose first operand is complex, try computing it
4257
         separately by making a split there.  */
4258
      if (GET_CODE (XEXP (x, 0)) == PLUS
4259
          && ! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
4260
                                            MEM_ADDR_SPACE (x))
4261
          && ! OBJECT_P (XEXP (XEXP (x, 0), 0))
4262
          && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
4263
                && OBJECT_P (SUBREG_REG (XEXP (XEXP (x, 0), 0)))))
4264
        return &XEXP (XEXP (x, 0), 0);
4265
      break;
4266
 
4267
    case SET:
4268
#ifdef HAVE_cc0
4269
      /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
4270
         ZERO_EXTRACT, the most likely reason why this doesn't match is that
4271
         we need to put the operand into a register.  So split at that
4272
         point.  */
4273
 
4274
      if (SET_DEST (x) == cc0_rtx
4275
          && GET_CODE (SET_SRC (x)) != COMPARE
4276
          && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
4277
          && !OBJECT_P (SET_SRC (x))
4278
          && ! (GET_CODE (SET_SRC (x)) == SUBREG
4279
                && OBJECT_P (SUBREG_REG (SET_SRC (x)))))
4280
        return &SET_SRC (x);
4281
#endif
4282
 
4283
      /* See if we can split SET_SRC as it stands.  */
4284
      split = find_split_point (&SET_SRC (x), insn);
4285
      if (split && split != &SET_SRC (x))
4286
        return split;
4287
 
4288
      /* See if we can split SET_DEST as it stands.  */
4289
      split = find_split_point (&SET_DEST (x), insn);
4290
      if (split && split != &SET_DEST (x))
4291
        return split;
4292
 
4293
      /* See if this is a bitfield assignment with everything constant.  If
4294
         so, this is an IOR of an AND, so split it into that.  */
4295
      if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
4296
          && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
4297
              <= HOST_BITS_PER_WIDE_INT)
4298
          && CONST_INT_P (XEXP (SET_DEST (x), 1))
4299
          && CONST_INT_P (XEXP (SET_DEST (x), 2))
4300
          && CONST_INT_P (SET_SRC (x))
4301
          && ((INTVAL (XEXP (SET_DEST (x), 1))
4302
               + INTVAL (XEXP (SET_DEST (x), 2)))
4303
              <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
4304
          && ! side_effects_p (XEXP (SET_DEST (x), 0)))
4305
        {
4306
          HOST_WIDE_INT pos = INTVAL (XEXP (SET_DEST (x), 2));
4307
          unsigned HOST_WIDE_INT len = INTVAL (XEXP (SET_DEST (x), 1));
4308
          unsigned HOST_WIDE_INT src = INTVAL (SET_SRC (x));
4309
          rtx dest = XEXP (SET_DEST (x), 0);
4310
          enum machine_mode mode = GET_MODE (dest);
4311
          unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
4312
          rtx or_mask;
4313
 
4314
          if (BITS_BIG_ENDIAN)
4315
            pos = GET_MODE_BITSIZE (mode) - len - pos;
4316
 
4317
          or_mask = gen_int_mode (src << pos, mode);
4318
          if (src == mask)
4319
            SUBST (SET_SRC (x),
4320
                   simplify_gen_binary (IOR, mode, dest, or_mask));
4321
          else
4322
            {
4323
              rtx negmask = gen_int_mode (~(mask << pos), mode);
4324
              SUBST (SET_SRC (x),
4325
                     simplify_gen_binary (IOR, mode,
4326
                                          simplify_gen_binary (AND, mode,
4327
                                                               dest, negmask),
4328
                                          or_mask));
4329
            }
4330
 
4331
          SUBST (SET_DEST (x), dest);
4332
 
4333
          split = find_split_point (&SET_SRC (x), insn);
4334
          if (split && split != &SET_SRC (x))
4335
            return split;
4336
        }
4337
 
4338
      /* Otherwise, see if this is an operation that we can split into two.
4339
         If so, try to split that.  */
4340
      code = GET_CODE (SET_SRC (x));
4341
 
4342
      switch (code)
4343
        {
4344
        case AND:
4345
          /* If we are AND'ing with a large constant that is only a single
4346
             bit and the result is only being used in a context where we
4347
             need to know if it is zero or nonzero, replace it with a bit
4348
             extraction.  This will avoid the large constant, which might
4349
             have taken more than one insn to make.  If the constant were
4350
             not a valid argument to the AND but took only one insn to make,
4351
             this is no worse, but if it took more than one insn, it will
4352
             be better.  */
4353
 
4354
          if (CONST_INT_P (XEXP (SET_SRC (x), 1))
4355
              && REG_P (XEXP (SET_SRC (x), 0))
4356
              && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
4357
              && REG_P (SET_DEST (x))
4358
              && (split = find_single_use (SET_DEST (x), insn, (rtx*) 0)) != 0
4359
              && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
4360
              && XEXP (*split, 0) == SET_DEST (x)
4361
              && XEXP (*split, 1) == const0_rtx)
4362
            {
4363
              rtx extraction = make_extraction (GET_MODE (SET_DEST (x)),
4364
                                                XEXP (SET_SRC (x), 0),
4365
                                                pos, NULL_RTX, 1, 1, 0, 0);
4366
              if (extraction != 0)
4367
                {
4368
                  SUBST (SET_SRC (x), extraction);
4369
                  return find_split_point (loc, insn);
4370
                }
4371
            }
4372
          break;
4373
 
4374
        case NE:
4375
          /* If STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X
4376
             is known to be on, this can be converted into a NEG of a shift.  */
4377
          if (STORE_FLAG_VALUE == -1 && XEXP (SET_SRC (x), 1) == const0_rtx
4378
              && GET_MODE (SET_SRC (x)) == GET_MODE (XEXP (SET_SRC (x), 0))
4379
              && 1 <= (pos = exact_log2
4380
                       (nonzero_bits (XEXP (SET_SRC (x), 0),
4381
                                      GET_MODE (XEXP (SET_SRC (x), 0))))))
4382
            {
4383
              enum machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0));
4384
 
4385
              SUBST (SET_SRC (x),
4386
                     gen_rtx_NEG (mode,
4387
                                  gen_rtx_LSHIFTRT (mode,
4388
                                                    XEXP (SET_SRC (x), 0),
4389
                                                    GEN_INT (pos))));
4390
 
4391
              split = find_split_point (&SET_SRC (x), insn);
4392
              if (split && split != &SET_SRC (x))
4393
                return split;
4394
            }
4395
          break;
4396
 
4397
        case SIGN_EXTEND:
4398
          inner = XEXP (SET_SRC (x), 0);
4399
 
4400
          /* We can't optimize if either mode is a partial integer
4401
             mode as we don't know how many bits are significant
4402
             in those modes.  */
4403
          if (GET_MODE_CLASS (GET_MODE (inner)) == MODE_PARTIAL_INT
4404
              || GET_MODE_CLASS (GET_MODE (SET_SRC (x))) == MODE_PARTIAL_INT)
4405
            break;
4406
 
4407
          pos = 0;
4408
          len = GET_MODE_BITSIZE (GET_MODE (inner));
4409
          unsignedp = 0;
4410
          break;
4411
 
4412
        case SIGN_EXTRACT:
4413
        case ZERO_EXTRACT:
4414
          if (CONST_INT_P (XEXP (SET_SRC (x), 1))
4415
              && CONST_INT_P (XEXP (SET_SRC (x), 2)))
4416
            {
4417
              inner = XEXP (SET_SRC (x), 0);
4418
              len = INTVAL (XEXP (SET_SRC (x), 1));
4419
              pos = INTVAL (XEXP (SET_SRC (x), 2));
4420
 
4421
              if (BITS_BIG_ENDIAN)
4422
                pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
4423
              unsignedp = (code == ZERO_EXTRACT);
4424
            }
4425
          break;
4426
 
4427
        default:
4428
          break;
4429
        }
4430
 
4431
      if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
4432
        {
4433
          enum machine_mode mode = GET_MODE (SET_SRC (x));
4434
 
4435
          /* For unsigned, we have a choice of a shift followed by an
4436
             AND or two shifts.  Use two shifts for field sizes where the
4437
             constant might be too large.  We assume here that we can
4438
             always at least get 8-bit constants in an AND insn, which is
4439
             true for every current RISC.  */
4440
 
4441
          if (unsignedp && len <= 8)
4442
            {
4443
              SUBST (SET_SRC (x),
4444
                     gen_rtx_AND (mode,
4445
                                  gen_rtx_LSHIFTRT
4446
                                  (mode, gen_lowpart (mode, inner),
4447
                                   GEN_INT (pos)),
4448
                                  GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
4449
 
4450
              split = find_split_point (&SET_SRC (x), insn);
4451
              if (split && split != &SET_SRC (x))
4452
                return split;
4453
            }
4454
          else
4455
            {
4456
              SUBST (SET_SRC (x),
4457
                     gen_rtx_fmt_ee
4458
                     (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
4459
                      gen_rtx_ASHIFT (mode,
4460
                                      gen_lowpart (mode, inner),
4461
                                      GEN_INT (GET_MODE_BITSIZE (mode)
4462
                                               - len - pos)),
4463
                      GEN_INT (GET_MODE_BITSIZE (mode) - len)));
4464
 
4465
              split = find_split_point (&SET_SRC (x), insn);
4466
              if (split && split != &SET_SRC (x))
4467
                return split;
4468
            }
4469
        }
4470
 
4471
      /* See if this is a simple operation with a constant as the second
4472
         operand.  It might be that this constant is out of range and hence
4473
         could be used as a split point.  */
4474
      if (BINARY_P (SET_SRC (x))
4475
          && CONSTANT_P (XEXP (SET_SRC (x), 1))
4476
          && (OBJECT_P (XEXP (SET_SRC (x), 0))
4477
              || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
4478
                  && OBJECT_P (SUBREG_REG (XEXP (SET_SRC (x), 0))))))
4479
        return &XEXP (SET_SRC (x), 1);
4480
 
4481
      /* Finally, see if this is a simple operation with its first operand
4482
         not in a register.  The operation might require this operand in a
4483
         register, so return it as a split point.  We can always do this
4484
         because if the first operand were another operation, we would have
4485
         already found it as a split point.  */
4486
      if ((BINARY_P (SET_SRC (x)) || UNARY_P (SET_SRC (x)))
4487
          && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
4488
        return &XEXP (SET_SRC (x), 0);
4489
 
4490
      return 0;
4491
 
4492
    case AND:
4493
    case IOR:
4494
      /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
4495
         it is better to write this as (not (ior A B)) so we can split it.
4496
         Similarly for IOR.  */
4497
      if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
4498
        {
4499
          SUBST (*loc,
4500
                 gen_rtx_NOT (GET_MODE (x),
4501
                              gen_rtx_fmt_ee (code == IOR ? AND : IOR,
4502
                                              GET_MODE (x),
4503
                                              XEXP (XEXP (x, 0), 0),
4504
                                              XEXP (XEXP (x, 1), 0))));
4505
          return find_split_point (loc, insn);
4506
        }
4507
 
4508
      /* Many RISC machines have a large set of logical insns.  If the
4509
         second operand is a NOT, put it first so we will try to split the
4510
         other operand first.  */
4511
      if (GET_CODE (XEXP (x, 1)) == NOT)
4512
        {
4513
          rtx tem = XEXP (x, 0);
4514
          SUBST (XEXP (x, 0), XEXP (x, 1));
4515
          SUBST (XEXP (x, 1), tem);
4516
        }
4517
      break;
4518
 
4519
    default:
4520
      break;
4521
    }
4522
 
4523
  /* Otherwise, select our actions depending on our rtx class.  */
4524
  switch (GET_RTX_CLASS (code))
4525
    {
4526
    case RTX_BITFIELD_OPS:              /* This is ZERO_EXTRACT and SIGN_EXTRACT.  */
4527
    case RTX_TERNARY:
4528
      split = find_split_point (&XEXP (x, 2), insn);
4529
      if (split)
4530
        return split;
4531
      /* ... fall through ...  */
4532
    case RTX_BIN_ARITH:
4533
    case RTX_COMM_ARITH:
4534
    case RTX_COMPARE:
4535
    case RTX_COMM_COMPARE:
4536
      split = find_split_point (&XEXP (x, 1), insn);
4537
      if (split)
4538
        return split;
4539
      /* ... fall through ...  */
4540
    case RTX_UNARY:
4541
      /* Some machines have (and (shift ...) ...) insns.  If X is not
4542
         an AND, but XEXP (X, 0) is, use it as our split point.  */
4543
      if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
4544
        return &XEXP (x, 0);
4545
 
4546
      split = find_split_point (&XEXP (x, 0), insn);
4547
      if (split)
4548
        return split;
4549
      return loc;
4550
 
4551
    default:
4552
      /* Otherwise, we don't have a split point.  */
4553
      return 0;
4554
    }
4555
}
4556
 
4557
/* Throughout X, replace FROM with TO, and return the result.
4558
   The result is TO if X is FROM;
4559
   otherwise the result is X, but its contents may have been modified.
4560
   If they were modified, a record was made in undobuf so that
4561
   undo_all will (among other things) return X to its original state.
4562
 
4563
   If the number of changes necessary is too much to record to undo,
4564
   the excess changes are not made, so the result is invalid.
4565
   The changes already made can still be undone.
4566
   undobuf.num_undo is incremented for such changes, so by testing that
4567
   the caller can tell whether the result is valid.
4568
 
4569
   `n_occurrences' is incremented each time FROM is replaced.
4570
 
4571
   IN_DEST is nonzero if we are processing the SET_DEST of a SET.
4572
 
4573
   UNIQUE_COPY is nonzero if each substitution must be unique.  We do this
4574
   by copying if `n_occurrences' is nonzero.  */
4575
 
4576
static rtx
4577
subst (rtx x, rtx from, rtx to, int in_dest, int unique_copy)
4578
{
4579
  enum rtx_code code = GET_CODE (x);
4580
  enum machine_mode op0_mode = VOIDmode;
4581
  const char *fmt;
4582
  int len, i;
4583
  rtx new_rtx;
4584
 
4585
/* Two expressions are equal if they are identical copies of a shared
4586
   RTX or if they are both registers with the same register number
4587
   and mode.  */
4588
 
4589
#define COMBINE_RTX_EQUAL_P(X,Y)                        \
4590
  ((X) == (Y)                                           \
4591
   || (REG_P (X) && REG_P (Y)   \
4592
       && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
4593
 
4594
  if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
4595
    {
4596
      n_occurrences++;
4597
      return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
4598
    }
4599
 
4600
  /* If X and FROM are the same register but different modes, they
4601
     will not have been seen as equal above.  However, the log links code
4602
     will make a LOG_LINKS entry for that case.  If we do nothing, we
4603
     will try to rerecognize our original insn and, when it succeeds,
4604
     we will delete the feeding insn, which is incorrect.
4605
 
4606
     So force this insn not to match in this (rare) case.  */
4607
  if (! in_dest && code == REG && REG_P (from)
4608
      && reg_overlap_mentioned_p (x, from))
4609
    return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
4610
 
4611
  /* If this is an object, we are done unless it is a MEM or LO_SUM, both
4612
     of which may contain things that can be combined.  */
4613
  if (code != MEM && code != LO_SUM && OBJECT_P (x))
4614
    return x;
4615
 
4616
  /* It is possible to have a subexpression appear twice in the insn.
4617
     Suppose that FROM is a register that appears within TO.
4618
     Then, after that subexpression has been scanned once by `subst',
4619
     the second time it is scanned, TO may be found.  If we were
4620
     to scan TO here, we would find FROM within it and create a
4621
     self-referent rtl structure which is completely wrong.  */
4622
  if (COMBINE_RTX_EQUAL_P (x, to))
4623
    return to;
4624
 
4625
  /* Parallel asm_operands need special attention because all of the
4626
     inputs are shared across the arms.  Furthermore, unsharing the
4627
     rtl results in recognition failures.  Failure to handle this case
4628
     specially can result in circular rtl.
4629
 
4630
     Solve this by doing a normal pass across the first entry of the
4631
     parallel, and only processing the SET_DESTs of the subsequent
4632
     entries.  Ug.  */
4633
 
4634
  if (code == PARALLEL
4635
      && GET_CODE (XVECEXP (x, 0, 0)) == SET
4636
      && GET_CODE (SET_SRC (XVECEXP (x, 0, 0))) == ASM_OPERANDS)
4637
    {
4638
      new_rtx = subst (XVECEXP (x, 0, 0), from, to, 0, unique_copy);
4639
 
4640
      /* If this substitution failed, this whole thing fails.  */
4641
      if (GET_CODE (new_rtx) == CLOBBER
4642
          && XEXP (new_rtx, 0) == const0_rtx)
4643
        return new_rtx;
4644
 
4645
      SUBST (XVECEXP (x, 0, 0), new_rtx);
4646
 
4647
      for (i = XVECLEN (x, 0) - 1; i >= 1; i--)
4648
        {
4649
          rtx dest = SET_DEST (XVECEXP (x, 0, i));
4650
 
4651
          if (!REG_P (dest)
4652
              && GET_CODE (dest) != CC0
4653
              && GET_CODE (dest) != PC)
4654
            {
4655
              new_rtx = subst (dest, from, to, 0, unique_copy);
4656
 
4657
              /* If this substitution failed, this whole thing fails.  */
4658
              if (GET_CODE (new_rtx) == CLOBBER
4659
                  && XEXP (new_rtx, 0) == const0_rtx)
4660
                return new_rtx;
4661
 
4662
              SUBST (SET_DEST (XVECEXP (x, 0, i)), new_rtx);
4663
            }
4664
        }
4665
    }
4666
  else
4667
    {
4668
      len = GET_RTX_LENGTH (code);
4669
      fmt = GET_RTX_FORMAT (code);
4670
 
4671
      /* We don't need to process a SET_DEST that is a register, CC0,
4672
         or PC, so set up to skip this common case.  All other cases
4673
         where we want to suppress replacing something inside a
4674
         SET_SRC are handled via the IN_DEST operand.  */
4675
      if (code == SET
4676
          && (REG_P (SET_DEST (x))
4677
              || GET_CODE (SET_DEST (x)) == CC0
4678
              || GET_CODE (SET_DEST (x)) == PC))
4679
        fmt = "ie";
4680
 
4681
      /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a
4682
         constant.  */
4683
      if (fmt[0] == 'e')
4684
        op0_mode = GET_MODE (XEXP (x, 0));
4685
 
4686
      for (i = 0; i < len; i++)
4687
        {
4688
          if (fmt[i] == 'E')
4689
            {
4690
              int j;
4691
              for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4692
                {
4693
                  if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
4694
                    {
4695
                      new_rtx = (unique_copy && n_occurrences
4696
                             ? copy_rtx (to) : to);
4697
                      n_occurrences++;
4698
                    }
4699
                  else
4700
                    {
4701
                      new_rtx = subst (XVECEXP (x, i, j), from, to, 0,
4702
                                   unique_copy);
4703
 
4704
                      /* If this substitution failed, this whole thing
4705
                         fails.  */
4706
                      if (GET_CODE (new_rtx) == CLOBBER
4707
                          && XEXP (new_rtx, 0) == const0_rtx)
4708
                        return new_rtx;
4709
                    }
4710
 
4711
                  SUBST (XVECEXP (x, i, j), new_rtx);
4712
                }
4713
            }
4714
          else if (fmt[i] == 'e')
4715
            {
4716
              /* If this is a register being set, ignore it.  */
4717
              new_rtx = XEXP (x, i);
4718
              if (in_dest
4719
                  && i == 0
4720
                  && (((code == SUBREG || code == ZERO_EXTRACT)
4721
                       && REG_P (new_rtx))
4722
                      || code == STRICT_LOW_PART))
4723
                ;
4724
 
4725
              else if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
4726
                {
4727
                  /* In general, don't install a subreg involving two
4728
                     modes not tieable.  It can worsen register
4729
                     allocation, and can even make invalid reload
4730
                     insns, since the reg inside may need to be copied
4731
                     from in the outside mode, and that may be invalid
4732
                     if it is an fp reg copied in integer mode.
4733
 
4734
                     We allow two exceptions to this: It is valid if
4735
                     it is inside another SUBREG and the mode of that
4736
                     SUBREG and the mode of the inside of TO is
4737
                     tieable and it is valid if X is a SET that copies
4738
                     FROM to CC0.  */
4739
 
4740
                  if (GET_CODE (to) == SUBREG
4741
                      && ! MODES_TIEABLE_P (GET_MODE (to),
4742
                                            GET_MODE (SUBREG_REG (to)))
4743
                      && ! (code == SUBREG
4744
                            && MODES_TIEABLE_P (GET_MODE (x),
4745
                                                GET_MODE (SUBREG_REG (to))))
4746
#ifdef HAVE_cc0
4747
                      && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
4748
#endif
4749
                      )
4750
                    return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
4751
 
4752
#ifdef CANNOT_CHANGE_MODE_CLASS
4753
                  if (code == SUBREG
4754
                      && REG_P (to)
4755
                      && REGNO (to) < FIRST_PSEUDO_REGISTER
4756
                      && REG_CANNOT_CHANGE_MODE_P (REGNO (to),
4757
                                                   GET_MODE (to),
4758
                                                   GET_MODE (x)))
4759
                    return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
4760
#endif
4761
 
4762
                  new_rtx = (unique_copy && n_occurrences ? copy_rtx (to) : to);
4763
                  n_occurrences++;
4764
                }
4765
              else
4766
                /* If we are in a SET_DEST, suppress most cases unless we
4767
                   have gone inside a MEM, in which case we want to
4768
                   simplify the address.  We assume here that things that
4769
                   are actually part of the destination have their inner
4770
                   parts in the first expression.  This is true for SUBREG,
4771
                   STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
4772
                   things aside from REG and MEM that should appear in a
4773
                   SET_DEST.  */
4774
                new_rtx = subst (XEXP (x, i), from, to,
4775
                             (((in_dest
4776
                                && (code == SUBREG || code == STRICT_LOW_PART
4777
                                    || code == ZERO_EXTRACT))
4778
                               || code == SET)
4779
                              && i == 0), unique_copy);
4780
 
4781
              /* If we found that we will have to reject this combination,
4782
                 indicate that by returning the CLOBBER ourselves, rather than
4783
                 an expression containing it.  This will speed things up as
4784
                 well as prevent accidents where two CLOBBERs are considered
4785
                 to be equal, thus producing an incorrect simplification.  */
4786
 
4787
              if (GET_CODE (new_rtx) == CLOBBER && XEXP (new_rtx, 0) == const0_rtx)
4788
                return new_rtx;
4789
 
4790
              if (GET_CODE (x) == SUBREG
4791
                  && (CONST_INT_P (new_rtx)
4792
                      || GET_CODE (new_rtx) == CONST_DOUBLE))
4793
                {
4794
                  enum machine_mode mode = GET_MODE (x);
4795
 
4796
                  x = simplify_subreg (GET_MODE (x), new_rtx,
4797
                                       GET_MODE (SUBREG_REG (x)),
4798
                                       SUBREG_BYTE (x));
4799
                  if (! x)
4800
                    x = gen_rtx_CLOBBER (mode, const0_rtx);
4801
                }
4802
              else if (CONST_INT_P (new_rtx)
4803
                       && GET_CODE (x) == ZERO_EXTEND)
4804
                {
4805
                  x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
4806
                                                new_rtx, GET_MODE (XEXP (x, 0)));
4807
                  gcc_assert (x);
4808
                }
4809
              else
4810
                SUBST (XEXP (x, i), new_rtx);
4811
            }
4812
        }
4813
    }
4814
 
4815
  /* Check if we are loading something from the constant pool via float
4816
     extension; in this case we would undo compress_float_constant
4817
     optimization and degenerate constant load to an immediate value.  */
4818
  if (GET_CODE (x) == FLOAT_EXTEND
4819
      && MEM_P (XEXP (x, 0))
4820
      && MEM_READONLY_P (XEXP (x, 0)))
4821
    {
4822
      rtx tmp = avoid_constant_pool_reference (x);
4823
      if (x != tmp)
4824
        return x;
4825
    }
4826
 
4827
  /* Try to simplify X.  If the simplification changed the code, it is likely
4828
     that further simplification will help, so loop, but limit the number
4829
     of repetitions that will be performed.  */
4830
 
4831
  for (i = 0; i < 4; i++)
4832
    {
4833
      /* If X is sufficiently simple, don't bother trying to do anything
4834
         with it.  */
4835
      if (code != CONST_INT && code != REG && code != CLOBBER)
4836
        x = combine_simplify_rtx (x, op0_mode, in_dest);
4837
 
4838
      if (GET_CODE (x) == code)
4839
        break;
4840
 
4841
      code = GET_CODE (x);
4842
 
4843
      /* We no longer know the original mode of operand 0 since we
4844
         have changed the form of X)  */
4845
      op0_mode = VOIDmode;
4846
    }
4847
 
4848
  return x;
4849
}
4850
 
4851
/* Simplify X, a piece of RTL.  We just operate on the expression at the
4852
   outer level; call `subst' to simplify recursively.  Return the new
4853
   expression.
4854
 
4855
   OP0_MODE is the original mode of XEXP (x, 0).  IN_DEST is nonzero
4856
   if we are inside a SET_DEST.  */
4857
 
4858
static rtx
4859
combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest)
4860
{
4861
  enum rtx_code code = GET_CODE (x);
4862
  enum machine_mode mode = GET_MODE (x);
4863
  rtx temp;
4864
  int i;
4865
 
4866
  /* If this is a commutative operation, put a constant last and a complex
4867
     expression first.  We don't need to do this for comparisons here.  */
4868
  if (COMMUTATIVE_ARITH_P (x)
4869
      && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
4870
    {
4871
      temp = XEXP (x, 0);
4872
      SUBST (XEXP (x, 0), XEXP (x, 1));
4873
      SUBST (XEXP (x, 1), temp);
4874
    }
4875
 
4876
  /* If this is a simple operation applied to an IF_THEN_ELSE, try
4877
     applying it to the arms of the IF_THEN_ELSE.  This often simplifies
4878
     things.  Check for cases where both arms are testing the same
4879
     condition.
4880
 
4881
     Don't do anything if all operands are very simple.  */
4882
 
4883
  if ((BINARY_P (x)
4884
       && ((!OBJECT_P (XEXP (x, 0))
4885
            && ! (GET_CODE (XEXP (x, 0)) == SUBREG
4886
                  && OBJECT_P (SUBREG_REG (XEXP (x, 0)))))
4887
           || (!OBJECT_P (XEXP (x, 1))
4888
               && ! (GET_CODE (XEXP (x, 1)) == SUBREG
4889
                     && OBJECT_P (SUBREG_REG (XEXP (x, 1)))))))
4890
      || (UNARY_P (x)
4891
          && (!OBJECT_P (XEXP (x, 0))
4892
               && ! (GET_CODE (XEXP (x, 0)) == SUBREG
4893
                     && OBJECT_P (SUBREG_REG (XEXP (x, 0)))))))
4894
    {
4895
      rtx cond, true_rtx, false_rtx;
4896
 
4897
      cond = if_then_else_cond (x, &true_rtx, &false_rtx);
4898
      if (cond != 0
4899
          /* If everything is a comparison, what we have is highly unlikely
4900
             to be simpler, so don't use it.  */
4901
          && ! (COMPARISON_P (x)
4902
                && (COMPARISON_P (true_rtx) || COMPARISON_P (false_rtx))))
4903
        {
4904
          rtx cop1 = const0_rtx;
4905
          enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
4906
 
4907
          if (cond_code == NE && COMPARISON_P (cond))
4908
            return x;
4909
 
4910
          /* Simplify the alternative arms; this may collapse the true and
4911
             false arms to store-flag values.  Be careful to use copy_rtx
4912
             here since true_rtx or false_rtx might share RTL with x as a
4913
             result of the if_then_else_cond call above.  */
4914
          true_rtx = subst (copy_rtx (true_rtx), pc_rtx, pc_rtx, 0, 0);
4915
          false_rtx = subst (copy_rtx (false_rtx), pc_rtx, pc_rtx, 0, 0);
4916
 
4917
          /* If true_rtx and false_rtx are not general_operands, an if_then_else
4918
             is unlikely to be simpler.  */
4919
          if (general_operand (true_rtx, VOIDmode)
4920
              && general_operand (false_rtx, VOIDmode))
4921
            {
4922
              enum rtx_code reversed;
4923
 
4924
              /* Restarting if we generate a store-flag expression will cause
4925
                 us to loop.  Just drop through in this case.  */
4926
 
4927
              /* If the result values are STORE_FLAG_VALUE and zero, we can
4928
                 just make the comparison operation.  */
4929
              if (true_rtx == const_true_rtx && false_rtx == const0_rtx)
4930
                x = simplify_gen_relational (cond_code, mode, VOIDmode,
4931
                                             cond, cop1);
4932
              else if (true_rtx == const0_rtx && false_rtx == const_true_rtx
4933
                       && ((reversed = reversed_comparison_code_parts
4934
                                        (cond_code, cond, cop1, NULL))
4935
                           != UNKNOWN))
4936
                x = simplify_gen_relational (reversed, mode, VOIDmode,
4937
                                             cond, cop1);
4938
 
4939
              /* Likewise, we can make the negate of a comparison operation
4940
                 if the result values are - STORE_FLAG_VALUE and zero.  */
4941
              else if (CONST_INT_P (true_rtx)
4942
                       && INTVAL (true_rtx) == - STORE_FLAG_VALUE
4943
                       && false_rtx == const0_rtx)
4944
                x = simplify_gen_unary (NEG, mode,
4945
                                        simplify_gen_relational (cond_code,
4946
                                                                 mode, VOIDmode,
4947
                                                                 cond, cop1),
4948
                                        mode);
4949
              else if (CONST_INT_P (false_rtx)
4950
                       && INTVAL (false_rtx) == - STORE_FLAG_VALUE
4951
                       && true_rtx == const0_rtx
4952
                       && ((reversed = reversed_comparison_code_parts
4953
                                        (cond_code, cond, cop1, NULL))
4954
                           != UNKNOWN))
4955
                x = simplify_gen_unary (NEG, mode,
4956
                                        simplify_gen_relational (reversed,
4957
                                                                 mode, VOIDmode,
4958
                                                                 cond, cop1),
4959
                                        mode);
4960
              else
4961
                return gen_rtx_IF_THEN_ELSE (mode,
4962
                                             simplify_gen_relational (cond_code,
4963
                                                                      mode,
4964
                                                                      VOIDmode,
4965
                                                                      cond,
4966
                                                                      cop1),
4967
                                             true_rtx, false_rtx);
4968
 
4969
              code = GET_CODE (x);
4970
              op0_mode = VOIDmode;
4971
            }
4972
        }
4973
    }
4974
 
4975
  /* Try to fold this expression in case we have constants that weren't
4976
     present before.  */
4977
  temp = 0;
4978
  switch (GET_RTX_CLASS (code))
4979
    {
4980
    case RTX_UNARY:
4981
      if (op0_mode == VOIDmode)
4982
        op0_mode = GET_MODE (XEXP (x, 0));
4983
      temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
4984
      break;
4985
    case RTX_COMPARE:
4986
    case RTX_COMM_COMPARE:
4987
      {
4988
        enum machine_mode cmp_mode = GET_MODE (XEXP (x, 0));
4989
        if (cmp_mode == VOIDmode)
4990
          {
4991
            cmp_mode = GET_MODE (XEXP (x, 1));
4992
            if (cmp_mode == VOIDmode)
4993
              cmp_mode = op0_mode;
4994
          }
4995
        temp = simplify_relational_operation (code, mode, cmp_mode,
4996
                                              XEXP (x, 0), XEXP (x, 1));
4997
      }
4998
      break;
4999
    case RTX_COMM_ARITH:
5000
    case RTX_BIN_ARITH:
5001
      temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
5002
      break;
5003
    case RTX_BITFIELD_OPS:
5004
    case RTX_TERNARY:
5005
      temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
5006
                                         XEXP (x, 1), XEXP (x, 2));
5007
      break;
5008
    default:
5009
      break;
5010
    }
5011
 
5012
  if (temp)
5013
    {
5014
      x = temp;
5015
      code = GET_CODE (temp);
5016
      op0_mode = VOIDmode;
5017
      mode = GET_MODE (temp);
5018
    }
5019
 
5020
  /* First see if we can apply the inverse distributive law.  */
5021
  if (code == PLUS || code == MINUS
5022
      || code == AND || code == IOR || code == XOR)
5023
    {
5024
      x = apply_distributive_law (x);
5025
      code = GET_CODE (x);
5026
      op0_mode = VOIDmode;
5027
    }
5028
 
5029
  /* If CODE is an associative operation not otherwise handled, see if we
5030
     can associate some operands.  This can win if they are constants or
5031
     if they are logically related (i.e. (a & b) & a).  */
5032
  if ((code == PLUS || code == MINUS || code == MULT || code == DIV
5033
       || code == AND || code == IOR || code == XOR
5034
       || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
5035
      && ((INTEGRAL_MODE_P (mode) && code != DIV)
5036
          || (flag_associative_math && FLOAT_MODE_P (mode))))
5037
    {
5038
      if (GET_CODE (XEXP (x, 0)) == code)
5039
        {
5040
          rtx other = XEXP (XEXP (x, 0), 0);
5041
          rtx inner_op0 = XEXP (XEXP (x, 0), 1);
5042
          rtx inner_op1 = XEXP (x, 1);
5043
          rtx inner;
5044
 
5045
          /* Make sure we pass the constant operand if any as the second
5046
             one if this is a commutative operation.  */
5047
          if (CONSTANT_P (inner_op0) && COMMUTATIVE_ARITH_P (x))
5048
            {
5049
              rtx tem = inner_op0;
5050
              inner_op0 = inner_op1;
5051
              inner_op1 = tem;
5052
            }
5053
          inner = simplify_binary_operation (code == MINUS ? PLUS
5054
                                             : code == DIV ? MULT
5055
                                             : code,
5056
                                             mode, inner_op0, inner_op1);
5057
 
5058
          /* For commutative operations, try the other pair if that one
5059
             didn't simplify.  */
5060
          if (inner == 0 && COMMUTATIVE_ARITH_P (x))
5061
            {
5062
              other = XEXP (XEXP (x, 0), 1);
5063
              inner = simplify_binary_operation (code, mode,
5064
                                                 XEXP (XEXP (x, 0), 0),
5065
                                                 XEXP (x, 1));
5066
            }
5067
 
5068
          if (inner)
5069
            return simplify_gen_binary (code, mode, other, inner);
5070
        }
5071
    }
5072
 
5073
  /* A little bit of algebraic simplification here.  */
5074
  switch (code)
5075
    {
5076
    case MEM:
5077
      /* Ensure that our address has any ASHIFTs converted to MULT in case
5078
         address-recognizing predicates are called later.  */
5079
      temp = make_compound_operation (XEXP (x, 0), MEM);
5080
      SUBST (XEXP (x, 0), temp);
5081
      break;
5082
 
5083
    case SUBREG:
5084
      if (op0_mode == VOIDmode)
5085
        op0_mode = GET_MODE (SUBREG_REG (x));
5086
 
5087
      /* See if this can be moved to simplify_subreg.  */
5088
      if (CONSTANT_P (SUBREG_REG (x))
5089
          && subreg_lowpart_offset (mode, op0_mode) == SUBREG_BYTE (x)
5090
             /* Don't call gen_lowpart if the inner mode
5091
                is VOIDmode and we cannot simplify it, as SUBREG without
5092
                inner mode is invalid.  */
5093
          && (GET_MODE (SUBREG_REG (x)) != VOIDmode
5094
              || gen_lowpart_common (mode, SUBREG_REG (x))))
5095
        return gen_lowpart (mode, SUBREG_REG (x));
5096
 
5097
      if (GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_CC)
5098
        break;
5099
      {
5100
        rtx temp;
5101
        temp = simplify_subreg (mode, SUBREG_REG (x), op0_mode,
5102
                                SUBREG_BYTE (x));
5103
        if (temp)
5104
          return temp;
5105
      }
5106
 
5107
      /* Don't change the mode of the MEM if that would change the meaning
5108
         of the address.  */
5109
      if (MEM_P (SUBREG_REG (x))
5110
          && (MEM_VOLATILE_P (SUBREG_REG (x))
5111
              || mode_dependent_address_p (XEXP (SUBREG_REG (x), 0))))
5112
        return gen_rtx_CLOBBER (mode, const0_rtx);
5113
 
5114
      /* Note that we cannot do any narrowing for non-constants since
5115
         we might have been counting on using the fact that some bits were
5116
         zero.  We now do this in the SET.  */
5117
 
5118
      break;
5119
 
5120
    case NEG:
5121
      temp = expand_compound_operation (XEXP (x, 0));
5122
 
5123
      /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
5124
         replaced by (lshiftrt X C).  This will convert
5125
         (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y).  */
5126
 
5127
      if (GET_CODE (temp) == ASHIFTRT
5128
          && CONST_INT_P (XEXP (temp, 1))
5129
          && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
5130
        return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (temp, 0),
5131
                                     INTVAL (XEXP (temp, 1)));
5132
 
5133
      /* If X has only a single bit that might be nonzero, say, bit I, convert
5134
         (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
5135
         MODE minus 1.  This will convert (neg (zero_extract X 1 Y)) to
5136
         (sign_extract X 1 Y).  But only do this if TEMP isn't a register
5137
         or a SUBREG of one since we'd be making the expression more
5138
         complex if it was just a register.  */
5139
 
5140
      if (!REG_P (temp)
5141
          && ! (GET_CODE (temp) == SUBREG
5142
                && REG_P (SUBREG_REG (temp)))
5143
          && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
5144
        {
5145
          rtx temp1 = simplify_shift_const
5146
            (NULL_RTX, ASHIFTRT, mode,
5147
             simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
5148
                                   GET_MODE_BITSIZE (mode) - 1 - i),
5149
             GET_MODE_BITSIZE (mode) - 1 - i);
5150
 
5151
          /* If all we did was surround TEMP with the two shifts, we
5152
             haven't improved anything, so don't use it.  Otherwise,
5153
             we are better off with TEMP1.  */
5154
          if (GET_CODE (temp1) != ASHIFTRT
5155
              || GET_CODE (XEXP (temp1, 0)) != ASHIFT
5156
              || XEXP (XEXP (temp1, 0), 0) != temp)
5157
            return temp1;
5158
        }
5159
      break;
5160
 
5161
    case TRUNCATE:
5162
      /* We can't handle truncation to a partial integer mode here
5163
         because we don't know the real bitsize of the partial
5164
         integer mode.  */
5165
      if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
5166
        break;
5167
 
5168
      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
5169
        SUBST (XEXP (x, 0),
5170
               force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
5171
                              GET_MODE_MASK (mode), 0));
5172
 
5173
      /* We can truncate a constant value and return it.  */
5174
      if (CONST_INT_P (XEXP (x, 0)))
5175
        return gen_int_mode (INTVAL (XEXP (x, 0)), mode);
5176
 
5177
      /* Similarly to what we do in simplify-rtx.c, a truncate of a register
5178
         whose value is a comparison can be replaced with a subreg if
5179
         STORE_FLAG_VALUE permits.  */
5180
      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5181
          && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0
5182
          && (temp = get_last_value (XEXP (x, 0)))
5183
          && COMPARISON_P (temp))
5184
        return gen_lowpart (mode, XEXP (x, 0));
5185
      break;
5186
 
5187
    case CONST:
5188
      /* (const (const X)) can become (const X).  Do it this way rather than
5189
         returning the inner CONST since CONST can be shared with a
5190
         REG_EQUAL note.  */
5191
      if (GET_CODE (XEXP (x, 0)) == CONST)
5192
        SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
5193
      break;
5194
 
5195
#ifdef HAVE_lo_sum
5196
    case LO_SUM:
5197
      /* Convert (lo_sum (high FOO) FOO) to FOO.  This is necessary so we
5198
         can add in an offset.  find_split_point will split this address up
5199
         again if it doesn't match.  */
5200
      if (GET_CODE (XEXP (x, 0)) == HIGH
5201
          && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
5202
        return XEXP (x, 1);
5203
      break;
5204
#endif
5205
 
5206
    case PLUS:
5207
      /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
5208
         when c is (const_int (pow2 + 1) / 2) is a sign extension of a
5209
         bit-field and can be replaced by either a sign_extend or a
5210
         sign_extract.  The `and' may be a zero_extend and the two
5211
         <c>, -<c> constants may be reversed.  */
5212
      if (GET_CODE (XEXP (x, 0)) == XOR
5213
          && CONST_INT_P (XEXP (x, 1))
5214
          && CONST_INT_P (XEXP (XEXP (x, 0), 1))
5215
          && INTVAL (XEXP (x, 1)) == -INTVAL (XEXP (XEXP (x, 0), 1))
5216
          && ((i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
5217
              || (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
5218
          && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5219
          && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
5220
               && CONST_INT_P (XEXP (XEXP (XEXP (x, 0), 0), 1))
5221
               && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
5222
                   == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
5223
              || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
5224
                  && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
5225
                      == (unsigned int) i + 1))))
5226
        return simplify_shift_const
5227
          (NULL_RTX, ASHIFTRT, mode,
5228
           simplify_shift_const (NULL_RTX, ASHIFT, mode,
5229
                                 XEXP (XEXP (XEXP (x, 0), 0), 0),
5230
                                 GET_MODE_BITSIZE (mode) - (i + 1)),
5231
           GET_MODE_BITSIZE (mode) - (i + 1));
5232
 
5233
      /* If only the low-order bit of X is possibly nonzero, (plus x -1)
5234
         can become (ashiftrt (ashift (xor x 1) C) C) where C is
5235
         the bitsize of the mode - 1.  This allows simplification of
5236
         "a = (b & 8) == 0;"  */
5237
      if (XEXP (x, 1) == constm1_rtx
5238
          && !REG_P (XEXP (x, 0))
5239
          && ! (GET_CODE (XEXP (x, 0)) == SUBREG
5240
                && REG_P (SUBREG_REG (XEXP (x, 0))))
5241
          && nonzero_bits (XEXP (x, 0), mode) == 1)
5242
        return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
5243
           simplify_shift_const (NULL_RTX, ASHIFT, mode,
5244
                                 gen_rtx_XOR (mode, XEXP (x, 0), const1_rtx),
5245
                                 GET_MODE_BITSIZE (mode) - 1),
5246
           GET_MODE_BITSIZE (mode) - 1);
5247
 
5248
      /* If we are adding two things that have no bits in common, convert
5249
         the addition into an IOR.  This will often be further simplified,
5250
         for example in cases like ((a & 1) + (a & 2)), which can
5251
         become a & 3.  */
5252
 
5253
      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5254
          && (nonzero_bits (XEXP (x, 0), mode)
5255
              & nonzero_bits (XEXP (x, 1), mode)) == 0)
5256
        {
5257
          /* Try to simplify the expression further.  */
5258
          rtx tor = simplify_gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
5259
          temp = combine_simplify_rtx (tor, mode, in_dest);
5260
 
5261
          /* If we could, great.  If not, do not go ahead with the IOR
5262
             replacement, since PLUS appears in many special purpose
5263
             address arithmetic instructions.  */
5264
          if (GET_CODE (temp) != CLOBBER && temp != tor)
5265
            return temp;
5266
        }
5267
      break;
5268
 
5269
    case MINUS:
5270
      /* (minus <foo> (and <foo> (const_int -pow2))) becomes
5271
         (and <foo> (const_int pow2-1))  */
5272
      if (GET_CODE (XEXP (x, 1)) == AND
5273
          && CONST_INT_P (XEXP (XEXP (x, 1), 1))
5274
          && exact_log2 (-INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
5275
          && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
5276
        return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
5277
                                       -INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
5278
      break;
5279
 
5280
    case MULT:
5281
      /* If we have (mult (plus A B) C), apply the distributive law and then
5282
         the inverse distributive law to see if things simplify.  This
5283
         occurs mostly in addresses, often when unrolling loops.  */
5284
 
5285
      if (GET_CODE (XEXP (x, 0)) == PLUS)
5286
        {
5287
          rtx result = distribute_and_simplify_rtx (x, 0);
5288
          if (result)
5289
            return result;
5290
        }
5291
 
5292
      /* Try simplify a*(b/c) as (a*b)/c.  */
5293
      if (FLOAT_MODE_P (mode) && flag_associative_math
5294
          && GET_CODE (XEXP (x, 0)) == DIV)
5295
        {
5296
          rtx tem = simplify_binary_operation (MULT, mode,
5297
                                               XEXP (XEXP (x, 0), 0),
5298
                                               XEXP (x, 1));
5299
          if (tem)
5300
            return simplify_gen_binary (DIV, mode, tem, XEXP (XEXP (x, 0), 1));
5301
        }
5302
      break;
5303
 
5304
    case UDIV:
5305
      /* If this is a divide by a power of two, treat it as a shift if
5306
         its first operand is a shift.  */
5307
      if (CONST_INT_P (XEXP (x, 1))
5308
          && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
5309
          && (GET_CODE (XEXP (x, 0)) == ASHIFT
5310
              || GET_CODE (XEXP (x, 0)) == LSHIFTRT
5311
              || GET_CODE (XEXP (x, 0)) == ASHIFTRT
5312
              || GET_CODE (XEXP (x, 0)) == ROTATE
5313
              || GET_CODE (XEXP (x, 0)) == ROTATERT))
5314
        return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
5315
      break;
5316
 
5317
    case EQ:  case NE:
5318
    case GT:  case GTU:  case GE:  case GEU:
5319
    case LT:  case LTU:  case LE:  case LEU:
5320
    case UNEQ:  case LTGT:
5321
    case UNGT:  case UNGE:
5322
    case UNLT:  case UNLE:
5323
    case UNORDERED: case ORDERED:
5324
      /* If the first operand is a condition code, we can't do anything
5325
         with it.  */
5326
      if (GET_CODE (XEXP (x, 0)) == COMPARE
5327
          || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
5328
              && ! CC0_P (XEXP (x, 0))))
5329
        {
5330
          rtx op0 = XEXP (x, 0);
5331
          rtx op1 = XEXP (x, 1);
5332
          enum rtx_code new_code;
5333
 
5334
          if (GET_CODE (op0) == COMPARE)
5335
            op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5336
 
5337
          /* Simplify our comparison, if possible.  */
5338
          new_code = simplify_comparison (code, &op0, &op1);
5339
 
5340
          /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
5341
             if only the low-order bit is possibly nonzero in X (such as when
5342
             X is a ZERO_EXTRACT of one bit).  Similarly, we can convert EQ to
5343
             (xor X 1) or (minus 1 X); we use the former.  Finally, if X is
5344
             known to be either 0 or -1, NE becomes a NEG and EQ becomes
5345
             (plus X 1).
5346
 
5347
             Remove any ZERO_EXTRACT we made when thinking this was a
5348
             comparison.  It may now be simpler to use, e.g., an AND.  If a
5349
             ZERO_EXTRACT is indeed appropriate, it will be placed back by
5350
             the call to make_compound_operation in the SET case.  */
5351
 
5352
          if (STORE_FLAG_VALUE == 1
5353
              && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5354
              && op1 == const0_rtx
5355
              && mode == GET_MODE (op0)
5356
              && nonzero_bits (op0, mode) == 1)
5357
            return gen_lowpart (mode,
5358
                                expand_compound_operation (op0));
5359
 
5360
          else if (STORE_FLAG_VALUE == 1
5361
                   && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5362
                   && op1 == const0_rtx
5363
                   && mode == GET_MODE (op0)
5364
                   && (num_sign_bit_copies (op0, mode)
5365
                       == GET_MODE_BITSIZE (mode)))
5366
            {
5367
              op0 = expand_compound_operation (op0);
5368
              return simplify_gen_unary (NEG, mode,
5369
                                         gen_lowpart (mode, op0),
5370
                                         mode);
5371
            }
5372
 
5373
          else if (STORE_FLAG_VALUE == 1
5374
                   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5375
                   && op1 == const0_rtx
5376
                   && mode == GET_MODE (op0)
5377
                   && nonzero_bits (op0, mode) == 1)
5378
            {
5379
              op0 = expand_compound_operation (op0);
5380
              return simplify_gen_binary (XOR, mode,
5381
                                          gen_lowpart (mode, op0),
5382
                                          const1_rtx);
5383
            }
5384
 
5385
          else if (STORE_FLAG_VALUE == 1
5386
                   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5387
                   && op1 == const0_rtx
5388
                   && mode == GET_MODE (op0)
5389
                   && (num_sign_bit_copies (op0, mode)
5390
                       == GET_MODE_BITSIZE (mode)))
5391
            {
5392
              op0 = expand_compound_operation (op0);
5393
              return plus_constant (gen_lowpart (mode, op0), 1);
5394
            }
5395
 
5396
          /* If STORE_FLAG_VALUE is -1, we have cases similar to
5397
             those above.  */
5398
          if (STORE_FLAG_VALUE == -1
5399
              && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5400
              && op1 == const0_rtx
5401
              && (num_sign_bit_copies (op0, mode)
5402
                  == GET_MODE_BITSIZE (mode)))
5403
            return gen_lowpart (mode,
5404
                                expand_compound_operation (op0));
5405
 
5406
          else if (STORE_FLAG_VALUE == -1
5407
                   && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5408
                   && op1 == const0_rtx
5409
                   && mode == GET_MODE (op0)
5410
                   && nonzero_bits (op0, mode) == 1)
5411
            {
5412
              op0 = expand_compound_operation (op0);
5413
              return simplify_gen_unary (NEG, mode,
5414
                                         gen_lowpart (mode, op0),
5415
                                         mode);
5416
            }
5417
 
5418
          else if (STORE_FLAG_VALUE == -1
5419
                   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5420
                   && op1 == const0_rtx
5421
                   && mode == GET_MODE (op0)
5422
                   && (num_sign_bit_copies (op0, mode)
5423
                       == GET_MODE_BITSIZE (mode)))
5424
            {
5425
              op0 = expand_compound_operation (op0);
5426
              return simplify_gen_unary (NOT, mode,
5427
                                         gen_lowpart (mode, op0),
5428
                                         mode);
5429
            }
5430
 
5431
          /* If X is 0/1, (eq X 0) is X-1.  */
5432
          else if (STORE_FLAG_VALUE == -1
5433
                   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5434
                   && op1 == const0_rtx
5435
                   && mode == GET_MODE (op0)
5436
                   && nonzero_bits (op0, mode) == 1)
5437
            {
5438
              op0 = expand_compound_operation (op0);
5439
              return plus_constant (gen_lowpart (mode, op0), -1);
5440
            }
5441
 
5442
          /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
5443
             one bit that might be nonzero, we can convert (ne x 0) to
5444
             (ashift x c) where C puts the bit in the sign bit.  Remove any
5445
             AND with STORE_FLAG_VALUE when we are done, since we are only
5446
             going to test the sign bit.  */
5447
          if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5448
              && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5449
              && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
5450
                  == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
5451
              && op1 == const0_rtx
5452
              && mode == GET_MODE (op0)
5453
              && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
5454
            {
5455
              x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
5456
                                        expand_compound_operation (op0),
5457
                                        GET_MODE_BITSIZE (mode) - 1 - i);
5458
              if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
5459
                return XEXP (x, 0);
5460
              else
5461
                return x;
5462
            }
5463
 
5464
          /* If the code changed, return a whole new comparison.  */
5465
          if (new_code != code)
5466
            return gen_rtx_fmt_ee (new_code, mode, op0, op1);
5467
 
5468
          /* Otherwise, keep this operation, but maybe change its operands.
5469
             This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR).  */
5470
          SUBST (XEXP (x, 0), op0);
5471
          SUBST (XEXP (x, 1), op1);
5472
        }
5473
      break;
5474
 
5475
    case IF_THEN_ELSE:
5476
      return simplify_if_then_else (x);
5477
 
5478
    case ZERO_EXTRACT:
5479
    case SIGN_EXTRACT:
5480
    case ZERO_EXTEND:
5481
    case SIGN_EXTEND:
5482
      /* If we are processing SET_DEST, we are done.  */
5483
      if (in_dest)
5484
        return x;
5485
 
5486
      return expand_compound_operation (x);
5487
 
5488
    case SET:
5489
      return simplify_set (x);
5490
 
5491
    case AND:
5492
    case IOR:
5493
      return simplify_logical (x);
5494
 
5495
    case ASHIFT:
5496
    case LSHIFTRT:
5497
    case ASHIFTRT:
5498
    case ROTATE:
5499
    case ROTATERT:
5500
      /* If this is a shift by a constant amount, simplify it.  */
5501
      if (CONST_INT_P (XEXP (x, 1)))
5502
        return simplify_shift_const (x, code, mode, XEXP (x, 0),
5503
                                     INTVAL (XEXP (x, 1)));
5504
 
5505
      else if (SHIFT_COUNT_TRUNCATED && !REG_P (XEXP (x, 1)))
5506
        SUBST (XEXP (x, 1),
5507
               force_to_mode (XEXP (x, 1), GET_MODE (XEXP (x, 1)),
5508
                              ((HOST_WIDE_INT) 1
5509
                               << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
5510
                              - 1,
5511
                              0));
5512
      break;
5513
 
5514
    default:
5515
      break;
5516
    }
5517
 
5518
  return x;
5519
}
5520
 
5521
/* Simplify X, an IF_THEN_ELSE expression.  Return the new expression.  */
5522
 
5523
static rtx
5524
simplify_if_then_else (rtx x)
5525
{
5526
  enum machine_mode mode = GET_MODE (x);
5527
  rtx cond = XEXP (x, 0);
5528
  rtx true_rtx = XEXP (x, 1);
5529
  rtx false_rtx = XEXP (x, 2);
5530
  enum rtx_code true_code = GET_CODE (cond);
5531
  int comparison_p = COMPARISON_P (cond);
5532
  rtx temp;
5533
  int i;
5534
  enum rtx_code false_code;
5535
  rtx reversed;
5536
 
5537
  /* Simplify storing of the truth value.  */
5538
  if (comparison_p && true_rtx == const_true_rtx && false_rtx == const0_rtx)
5539
    return simplify_gen_relational (true_code, mode, VOIDmode,
5540
                                    XEXP (cond, 0), XEXP (cond, 1));
5541
 
5542
  /* Also when the truth value has to be reversed.  */
5543
  if (comparison_p
5544
      && true_rtx == const0_rtx && false_rtx == const_true_rtx
5545
      && (reversed = reversed_comparison (cond, mode)))
5546
    return reversed;
5547
 
5548
  /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
5549
     in it is being compared against certain values.  Get the true and false
5550
     comparisons and see if that says anything about the value of each arm.  */
5551
 
5552
  if (comparison_p
5553
      && ((false_code = reversed_comparison_code (cond, NULL))
5554
          != UNKNOWN)
5555
      && REG_P (XEXP (cond, 0)))
5556
    {
5557
      HOST_WIDE_INT nzb;
5558
      rtx from = XEXP (cond, 0);
5559
      rtx true_val = XEXP (cond, 1);
5560
      rtx false_val = true_val;
5561
      int swapped = 0;
5562
 
5563
      /* If FALSE_CODE is EQ, swap the codes and arms.  */
5564
 
5565
      if (false_code == EQ)
5566
        {
5567
          swapped = 1, true_code = EQ, false_code = NE;
5568
          temp = true_rtx, true_rtx = false_rtx, false_rtx = temp;
5569
        }
5570
 
5571
      /* If we are comparing against zero and the expression being tested has
5572
         only a single bit that might be nonzero, that is its value when it is
5573
         not equal to zero.  Similarly if it is known to be -1 or 0.  */
5574
 
5575
      if (true_code == EQ && true_val == const0_rtx
5576
          && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
5577
        {
5578
          false_code = EQ;
5579
          false_val = GEN_INT (trunc_int_for_mode (nzb, GET_MODE (from)));
5580
        }
5581
      else if (true_code == EQ && true_val == const0_rtx
5582
               && (num_sign_bit_copies (from, GET_MODE (from))
5583
                   == GET_MODE_BITSIZE (GET_MODE (from))))
5584
        {
5585
          false_code = EQ;
5586
          false_val = constm1_rtx;
5587
        }
5588
 
5589
      /* Now simplify an arm if we know the value of the register in the
5590
         branch and it is used in the arm.  Be careful due to the potential
5591
         of locally-shared RTL.  */
5592
 
5593
      if (reg_mentioned_p (from, true_rtx))
5594
        true_rtx = subst (known_cond (copy_rtx (true_rtx), true_code,
5595
                                      from, true_val),
5596
                      pc_rtx, pc_rtx, 0, 0);
5597
      if (reg_mentioned_p (from, false_rtx))
5598
        false_rtx = subst (known_cond (copy_rtx (false_rtx), false_code,
5599
                                   from, false_val),
5600
                       pc_rtx, pc_rtx, 0, 0);
5601
 
5602
      SUBST (XEXP (x, 1), swapped ? false_rtx : true_rtx);
5603
      SUBST (XEXP (x, 2), swapped ? true_rtx : false_rtx);
5604
 
5605
      true_rtx = XEXP (x, 1);
5606
      false_rtx = XEXP (x, 2);
5607
      true_code = GET_CODE (cond);
5608
    }
5609
 
5610
  /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
5611
     reversed, do so to avoid needing two sets of patterns for
5612
     subtract-and-branch insns.  Similarly if we have a constant in the true
5613
     arm, the false arm is the same as the first operand of the comparison, or
5614
     the false arm is more complicated than the true arm.  */
5615
 
5616
  if (comparison_p
5617
      && reversed_comparison_code (cond, NULL) != UNKNOWN
5618
      && (true_rtx == pc_rtx
5619
          || (CONSTANT_P (true_rtx)
5620
              && !CONST_INT_P (false_rtx) && false_rtx != pc_rtx)
5621
          || true_rtx == const0_rtx
5622
          || (OBJECT_P (true_rtx) && !OBJECT_P (false_rtx))
5623
          || (GET_CODE (true_rtx) == SUBREG && OBJECT_P (SUBREG_REG (true_rtx))
5624
              && !OBJECT_P (false_rtx))
5625
          || reg_mentioned_p (true_rtx, false_rtx)
5626
          || rtx_equal_p (false_rtx, XEXP (cond, 0))))
5627
    {
5628
      true_code = reversed_comparison_code (cond, NULL);
5629
      SUBST (XEXP (x, 0), reversed_comparison (cond, GET_MODE (cond)));
5630
      SUBST (XEXP (x, 1), false_rtx);
5631
      SUBST (XEXP (x, 2), true_rtx);
5632
 
5633
      temp = true_rtx, true_rtx = false_rtx, false_rtx = temp;
5634
      cond = XEXP (x, 0);
5635
 
5636
      /* It is possible that the conditional has been simplified out.  */
5637
      true_code = GET_CODE (cond);
5638
      comparison_p = COMPARISON_P (cond);
5639
    }
5640
 
5641
  /* If the two arms are identical, we don't need the comparison.  */
5642
 
5643
  if (rtx_equal_p (true_rtx, false_rtx) && ! side_effects_p (cond))
5644
    return true_rtx;
5645
 
5646
  /* Convert a == b ? b : a to "a".  */
5647
  if (true_code == EQ && ! side_effects_p (cond)
5648
      && !HONOR_NANS (mode)
5649
      && rtx_equal_p (XEXP (cond, 0), false_rtx)
5650
      && rtx_equal_p (XEXP (cond, 1), true_rtx))
5651
    return false_rtx;
5652
  else if (true_code == NE && ! side_effects_p (cond)
5653
           && !HONOR_NANS (mode)
5654
           && rtx_equal_p (XEXP (cond, 0), true_rtx)
5655
           && rtx_equal_p (XEXP (cond, 1), false_rtx))
5656
    return true_rtx;
5657
 
5658
  /* Look for cases where we have (abs x) or (neg (abs X)).  */
5659
 
5660
  if (GET_MODE_CLASS (mode) == MODE_INT
5661
      && comparison_p
5662
      && XEXP (cond, 1) == const0_rtx
5663
      && GET_CODE (false_rtx) == NEG
5664
      && rtx_equal_p (true_rtx, XEXP (false_rtx, 0))
5665
      && rtx_equal_p (true_rtx, XEXP (cond, 0))
5666
      && ! side_effects_p (true_rtx))
5667
    switch (true_code)
5668
      {
5669
      case GT:
5670
      case GE:
5671
        return simplify_gen_unary (ABS, mode, true_rtx, mode);
5672
      case LT:
5673
      case LE:
5674
        return
5675
          simplify_gen_unary (NEG, mode,
5676
                              simplify_gen_unary (ABS, mode, true_rtx, mode),
5677
                              mode);
5678
      default:
5679
        break;
5680
      }
5681
 
5682
  /* Look for MIN or MAX.  */
5683
 
5684
  if ((! FLOAT_MODE_P (mode) || flag_unsafe_math_optimizations)
5685
      && comparison_p
5686
      && rtx_equal_p (XEXP (cond, 0), true_rtx)
5687
      && rtx_equal_p (XEXP (cond, 1), false_rtx)
5688
      && ! side_effects_p (cond))
5689
    switch (true_code)
5690
      {
5691
      case GE:
5692
      case GT:
5693
        return simplify_gen_binary (SMAX, mode, true_rtx, false_rtx);
5694
      case LE:
5695
      case LT:
5696
        return simplify_gen_binary (SMIN, mode, true_rtx, false_rtx);
5697
      case GEU:
5698
      case GTU:
5699
        return simplify_gen_binary (UMAX, mode, true_rtx, false_rtx);
5700
      case LEU:
5701
      case LTU:
5702
        return simplify_gen_binary (UMIN, mode, true_rtx, false_rtx);
5703
      default:
5704
        break;
5705
      }
5706
 
5707
  /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
5708
     second operand is zero, this can be done as (OP Z (mult COND C2)) where
5709
     C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
5710
     SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
5711
     We can do this kind of thing in some cases when STORE_FLAG_VALUE is
5712
     neither 1 or -1, but it isn't worth checking for.  */
5713
 
5714
  if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
5715
      && comparison_p
5716
      && GET_MODE_CLASS (mode) == MODE_INT
5717
      && ! side_effects_p (x))
5718
    {
5719
      rtx t = make_compound_operation (true_rtx, SET);
5720
      rtx f = make_compound_operation (false_rtx, SET);
5721
      rtx cond_op0 = XEXP (cond, 0);
5722
      rtx cond_op1 = XEXP (cond, 1);
5723
      enum rtx_code op = UNKNOWN, extend_op = UNKNOWN;
5724
      enum machine_mode m = mode;
5725
      rtx z = 0, c1 = NULL_RTX;
5726
 
5727
      if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
5728
           || GET_CODE (t) == IOR || GET_CODE (t) == XOR
5729
           || GET_CODE (t) == ASHIFT
5730
           || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
5731
          && rtx_equal_p (XEXP (t, 0), f))
5732
        c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
5733
 
5734
      /* If an identity-zero op is commutative, check whether there
5735
         would be a match if we swapped the operands.  */
5736
      else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
5737
                || GET_CODE (t) == XOR)
5738
               && rtx_equal_p (XEXP (t, 1), f))
5739
        c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
5740
      else if (GET_CODE (t) == SIGN_EXTEND
5741
               && (GET_CODE (XEXP (t, 0)) == PLUS
5742
                   || GET_CODE (XEXP (t, 0)) == MINUS
5743
                   || GET_CODE (XEXP (t, 0)) == IOR
5744
                   || GET_CODE (XEXP (t, 0)) == XOR
5745
                   || GET_CODE (XEXP (t, 0)) == ASHIFT
5746
                   || GET_CODE (XEXP (t, 0)) == LSHIFTRT
5747
                   || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
5748
               && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
5749
               && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
5750
               && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
5751
               && (num_sign_bit_copies (f, GET_MODE (f))
5752
                   > (unsigned int)
5753
                     (GET_MODE_BITSIZE (mode)
5754
                      - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
5755
        {
5756
          c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
5757
          extend_op = SIGN_EXTEND;
5758
          m = GET_MODE (XEXP (t, 0));
5759
        }
5760
      else if (GET_CODE (t) == SIGN_EXTEND
5761
               && (GET_CODE (XEXP (t, 0)) == PLUS
5762
                   || GET_CODE (XEXP (t, 0)) == IOR
5763
                   || GET_CODE (XEXP (t, 0)) == XOR)
5764
               && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
5765
               && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
5766
               && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
5767
               && (num_sign_bit_copies (f, GET_MODE (f))
5768
                   > (unsigned int)
5769
                     (GET_MODE_BITSIZE (mode)
5770
                      - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1))))))
5771
        {
5772
          c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
5773
          extend_op = SIGN_EXTEND;
5774
          m = GET_MODE (XEXP (t, 0));
5775
        }
5776
      else if (GET_CODE (t) == ZERO_EXTEND
5777
               && (GET_CODE (XEXP (t, 0)) == PLUS
5778
                   || GET_CODE (XEXP (t, 0)) == MINUS
5779
                   || GET_CODE (XEXP (t, 0)) == IOR
5780
                   || GET_CODE (XEXP (t, 0)) == XOR
5781
                   || GET_CODE (XEXP (t, 0)) == ASHIFT
5782
                   || GET_CODE (XEXP (t, 0)) == LSHIFTRT
5783
                   || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
5784
               && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
5785
               && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5786
               && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
5787
               && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
5788
               && ((nonzero_bits (f, GET_MODE (f))
5789
                    & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
5790
                   == 0))
5791
        {
5792
          c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
5793
          extend_op = ZERO_EXTEND;
5794
          m = GET_MODE (XEXP (t, 0));
5795
        }
5796
      else if (GET_CODE (t) == ZERO_EXTEND
5797
               && (GET_CODE (XEXP (t, 0)) == PLUS
5798
                   || GET_CODE (XEXP (t, 0)) == IOR
5799
                   || GET_CODE (XEXP (t, 0)) == XOR)
5800
               && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
5801
               && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5802
               && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
5803
               && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
5804
               && ((nonzero_bits (f, GET_MODE (f))
5805
                    & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
5806
                   == 0))
5807
        {
5808
          c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
5809
          extend_op = ZERO_EXTEND;
5810
          m = GET_MODE (XEXP (t, 0));
5811
        }
5812
 
5813
      if (z)
5814
        {
5815
          temp = subst (simplify_gen_relational (true_code, m, VOIDmode,
5816
                                                 cond_op0, cond_op1),
5817
                        pc_rtx, pc_rtx, 0, 0);
5818
          temp = simplify_gen_binary (MULT, m, temp,
5819
                                      simplify_gen_binary (MULT, m, c1,
5820
                                                           const_true_rtx));
5821
          temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
5822
          temp = simplify_gen_binary (op, m, gen_lowpart (m, z), temp);
5823
 
5824
          if (extend_op != UNKNOWN)
5825
            temp = simplify_gen_unary (extend_op, mode, temp, m);
5826
 
5827
          return temp;
5828
        }
5829
    }
5830
 
5831
  /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
5832
     1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
5833
     negation of a single bit, we can convert this operation to a shift.  We
5834
     can actually do this more generally, but it doesn't seem worth it.  */
5835
 
5836
  if (true_code == NE && XEXP (cond, 1) == const0_rtx
5837
      && false_rtx == const0_rtx && CONST_INT_P (true_rtx)
5838
      && ((1 == nonzero_bits (XEXP (cond, 0), mode)
5839
           && (i = exact_log2 (INTVAL (true_rtx))) >= 0)
5840
          || ((num_sign_bit_copies (XEXP (cond, 0), mode)
5841
               == GET_MODE_BITSIZE (mode))
5842
              && (i = exact_log2 (-INTVAL (true_rtx))) >= 0)))
5843
    return
5844
      simplify_shift_const (NULL_RTX, ASHIFT, mode,
5845
                            gen_lowpart (mode, XEXP (cond, 0)), i);
5846
 
5847
  /* (IF_THEN_ELSE (NE REG 0) (0) (8)) is REG for nonzero_bits (REG) == 8.  */
5848
  if (true_code == NE && XEXP (cond, 1) == const0_rtx
5849
      && false_rtx == const0_rtx && CONST_INT_P (true_rtx)
5850
      && GET_MODE (XEXP (cond, 0)) == mode
5851
      && (INTVAL (true_rtx) & GET_MODE_MASK (mode))
5852
          == nonzero_bits (XEXP (cond, 0), mode)
5853
      && (i = exact_log2 (INTVAL (true_rtx) & GET_MODE_MASK (mode))) >= 0)
5854
    return XEXP (cond, 0);
5855
 
5856
  return x;
5857
}
5858
 
5859
/* Simplify X, a SET expression.  Return the new expression.  */
5860
 
5861
static rtx
5862
simplify_set (rtx x)
5863
{
5864
  rtx src = SET_SRC (x);
5865
  rtx dest = SET_DEST (x);
5866
  enum machine_mode mode
5867
    = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
5868
  rtx other_insn;
5869
  rtx *cc_use;
5870
 
5871
  /* (set (pc) (return)) gets written as (return).  */
5872
  if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN)
5873
    return src;
5874
 
5875
  /* Now that we know for sure which bits of SRC we are using, see if we can
5876
     simplify the expression for the object knowing that we only need the
5877
     low-order bits.  */
5878
 
5879
  if (GET_MODE_CLASS (mode) == MODE_INT
5880
      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
5881
    {
5882
      src = force_to_mode (src, mode, ~(HOST_WIDE_INT) 0, 0);
5883
      SUBST (SET_SRC (x), src);
5884
    }
5885
 
5886
  /* If we are setting CC0 or if the source is a COMPARE, look for the use of
5887
     the comparison result and try to simplify it unless we already have used
5888
     undobuf.other_insn.  */
5889
  if ((GET_MODE_CLASS (mode) == MODE_CC
5890
       || GET_CODE (src) == COMPARE
5891
       || CC0_P (dest))
5892
      && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
5893
      && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
5894
      && COMPARISON_P (*cc_use)
5895
      && rtx_equal_p (XEXP (*cc_use, 0), dest))
5896
    {
5897
      enum rtx_code old_code = GET_CODE (*cc_use);
5898
      enum rtx_code new_code;
5899
      rtx op0, op1, tmp;
5900
      int other_changed = 0;
5901
      enum machine_mode compare_mode = GET_MODE (dest);
5902
 
5903
      if (GET_CODE (src) == COMPARE)
5904
        op0 = XEXP (src, 0), op1 = XEXP (src, 1);
5905
      else
5906
        op0 = src, op1 = CONST0_RTX (GET_MODE (src));
5907
 
5908
      tmp = simplify_relational_operation (old_code, compare_mode, VOIDmode,
5909
                                           op0, op1);
5910
      if (!tmp)
5911
        new_code = old_code;
5912
      else if (!CONSTANT_P (tmp))
5913
        {
5914
          new_code = GET_CODE (tmp);
5915
          op0 = XEXP (tmp, 0);
5916
          op1 = XEXP (tmp, 1);
5917
        }
5918
      else
5919
        {
5920
          rtx pat = PATTERN (other_insn);
5921
          undobuf.other_insn = other_insn;
5922
          SUBST (*cc_use, tmp);
5923
 
5924
          /* Attempt to simplify CC user.  */
5925
          if (GET_CODE (pat) == SET)
5926
            {
5927
              rtx new_rtx = simplify_rtx (SET_SRC (pat));
5928
              if (new_rtx != NULL_RTX)
5929
                SUBST (SET_SRC (pat), new_rtx);
5930
            }
5931
 
5932
          /* Convert X into a no-op move.  */
5933
          SUBST (SET_DEST (x), pc_rtx);
5934
          SUBST (SET_SRC (x), pc_rtx);
5935
          return x;
5936
        }
5937
 
5938
      /* Simplify our comparison, if possible.  */
5939
      new_code = simplify_comparison (new_code, &op0, &op1);
5940
 
5941
#ifdef SELECT_CC_MODE
5942
      /* If this machine has CC modes other than CCmode, check to see if we
5943
         need to use a different CC mode here.  */
5944
      if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
5945
        compare_mode = GET_MODE (op0);
5946
      else
5947
        compare_mode = SELECT_CC_MODE (new_code, op0, op1);
5948
 
5949
#ifndef HAVE_cc0
5950
      /* If the mode changed, we have to change SET_DEST, the mode in the
5951
         compare, and the mode in the place SET_DEST is used.  If SET_DEST is
5952
         a hard register, just build new versions with the proper mode.  If it
5953
         is a pseudo, we lose unless it is only time we set the pseudo, in
5954
         which case we can safely change its mode.  */
5955
      if (compare_mode != GET_MODE (dest))
5956
        {
5957
          if (can_change_dest_mode (dest, 0, compare_mode))
5958
            {
5959
              unsigned int regno = REGNO (dest);
5960
              rtx new_dest;
5961
 
5962
              if (regno < FIRST_PSEUDO_REGISTER)
5963
                new_dest = gen_rtx_REG (compare_mode, regno);
5964
              else
5965
                {
5966
                  SUBST_MODE (regno_reg_rtx[regno], compare_mode);
5967
                  new_dest = regno_reg_rtx[regno];
5968
                }
5969
 
5970
              SUBST (SET_DEST (x), new_dest);
5971
              SUBST (XEXP (*cc_use, 0), new_dest);
5972
              other_changed = 1;
5973
 
5974
              dest = new_dest;
5975
            }
5976
        }
5977
#endif  /* cc0 */
5978
#endif  /* SELECT_CC_MODE */
5979
 
5980
      /* If the code changed, we have to build a new comparison in
5981
         undobuf.other_insn.  */
5982
      if (new_code != old_code)
5983
        {
5984
          int other_changed_previously = other_changed;
5985
          unsigned HOST_WIDE_INT mask;
5986
          rtx old_cc_use = *cc_use;
5987
 
5988
          SUBST (*cc_use, gen_rtx_fmt_ee (new_code, GET_MODE (*cc_use),
5989
                                          dest, const0_rtx));
5990
          other_changed = 1;
5991
 
5992
          /* If the only change we made was to change an EQ into an NE or
5993
             vice versa, OP0 has only one bit that might be nonzero, and OP1
5994
             is zero, check if changing the user of the condition code will
5995
             produce a valid insn.  If it won't, we can keep the original code
5996
             in that insn by surrounding our operation with an XOR.  */
5997
 
5998
          if (((old_code == NE && new_code == EQ)
5999
               || (old_code == EQ && new_code == NE))
6000
              && ! other_changed_previously && op1 == const0_rtx
6001
              && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
6002
              && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
6003
            {
6004
              rtx pat = PATTERN (other_insn), note = 0;
6005
 
6006
              if ((recog_for_combine (&pat, other_insn, &note) < 0
6007
                   && ! check_asm_operands (pat)))
6008
                {
6009
                  *cc_use = old_cc_use;
6010
                  other_changed = 0;
6011
 
6012
                  op0 = simplify_gen_binary (XOR, GET_MODE (op0),
6013
                                             op0, GEN_INT (mask));
6014
                }
6015
            }
6016
        }
6017
 
6018
      if (other_changed)
6019
        undobuf.other_insn = other_insn;
6020
 
6021
      /* Otherwise, if we didn't previously have a COMPARE in the
6022
         correct mode, we need one.  */
6023
      if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode)
6024
        {
6025
          SUBST (SET_SRC (x), gen_rtx_COMPARE (compare_mode, op0, op1));
6026
          src = SET_SRC (x);
6027
        }
6028
      else if (GET_MODE (op0) == compare_mode && op1 == const0_rtx)
6029
        {
6030
          SUBST (SET_SRC (x), op0);
6031
          src = SET_SRC (x);
6032
        }
6033
      /* Otherwise, update the COMPARE if needed.  */
6034
      else if (XEXP (src, 0) != op0 || XEXP (src, 1) != op1)
6035
        {
6036
          SUBST (SET_SRC (x), gen_rtx_COMPARE (compare_mode, op0, op1));
6037
          src = SET_SRC (x);
6038
        }
6039
    }
6040
  else
6041
    {
6042
      /* Get SET_SRC in a form where we have placed back any
6043
         compound expressions.  Then do the checks below.  */
6044
      src = make_compound_operation (src, SET);
6045
      SUBST (SET_SRC (x), src);
6046
    }
6047
 
6048
  /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
6049
     and X being a REG or (subreg (reg)), we may be able to convert this to
6050
     (set (subreg:m2 x) (op)).
6051
 
6052
     We can always do this if M1 is narrower than M2 because that means that
6053
     we only care about the low bits of the result.
6054
 
6055
     However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
6056
     perform a narrower operation than requested since the high-order bits will
6057
     be undefined.  On machine where it is defined, this transformation is safe
6058
     as long as M1 and M2 have the same number of words.  */
6059
 
6060
  if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
6061
      && !OBJECT_P (SUBREG_REG (src))
6062
      && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
6063
           / UNITS_PER_WORD)
6064
          == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
6065
               + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
6066
#ifndef WORD_REGISTER_OPERATIONS
6067
      && (GET_MODE_SIZE (GET_MODE (src))
6068
        < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
6069
#endif
6070
#ifdef CANNOT_CHANGE_MODE_CLASS
6071
      && ! (REG_P (dest) && REGNO (dest) < FIRST_PSEUDO_REGISTER
6072
            && REG_CANNOT_CHANGE_MODE_P (REGNO (dest),
6073
                                         GET_MODE (SUBREG_REG (src)),
6074
                                         GET_MODE (src)))
6075
#endif
6076
      && (REG_P (dest)
6077
          || (GET_CODE (dest) == SUBREG
6078
              && REG_P (SUBREG_REG (dest)))))
6079
    {
6080
      SUBST (SET_DEST (x),
6081
             gen_lowpart (GET_MODE (SUBREG_REG (src)),
6082
                                      dest));
6083
      SUBST (SET_SRC (x), SUBREG_REG (src));
6084
 
6085
      src = SET_SRC (x), dest = SET_DEST (x);
6086
    }
6087
 
6088
#ifdef HAVE_cc0
6089
  /* If we have (set (cc0) (subreg ...)), we try to remove the subreg
6090
     in SRC.  */
6091
  if (dest == cc0_rtx
6092
      && GET_CODE (src) == SUBREG
6093
      && subreg_lowpart_p (src)
6094
      && (GET_MODE_BITSIZE (GET_MODE (src))
6095
          < GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (src)))))
6096
    {
6097
      rtx inner = SUBREG_REG (src);
6098
      enum machine_mode inner_mode = GET_MODE (inner);
6099
 
6100
      /* Here we make sure that we don't have a sign bit on.  */
6101
      if (GET_MODE_BITSIZE (inner_mode) <= HOST_BITS_PER_WIDE_INT
6102
          && (nonzero_bits (inner, inner_mode)
6103
              < ((unsigned HOST_WIDE_INT) 1
6104
                 << (GET_MODE_BITSIZE (GET_MODE (src)) - 1))))
6105
        {
6106
          SUBST (SET_SRC (x), inner);
6107
          src = SET_SRC (x);
6108
        }
6109
    }
6110
#endif
6111
 
6112
#ifdef LOAD_EXTEND_OP
6113
  /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
6114
     would require a paradoxical subreg.  Replace the subreg with a
6115
     zero_extend to avoid the reload that would otherwise be required.  */
6116
 
6117
  if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
6118
      && INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (src)))
6119
      && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != UNKNOWN
6120
      && SUBREG_BYTE (src) == 0
6121
      && (GET_MODE_SIZE (GET_MODE (src))
6122
          > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
6123
      && MEM_P (SUBREG_REG (src)))
6124
    {
6125
      SUBST (SET_SRC (x),
6126
             gen_rtx_fmt_e (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
6127
                            GET_MODE (src), SUBREG_REG (src)));
6128
 
6129
      src = SET_SRC (x);
6130
    }
6131
#endif
6132
 
6133
  /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
6134
     are comparing an item known to be 0 or -1 against 0, use a logical
6135
     operation instead. Check for one of the arms being an IOR of the other
6136
     arm with some value.  We compute three terms to be IOR'ed together.  In
6137
     practice, at most two will be nonzero.  Then we do the IOR's.  */
6138
 
6139
  if (GET_CODE (dest) != PC
6140
      && GET_CODE (src) == IF_THEN_ELSE
6141
      && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
6142
      && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
6143
      && XEXP (XEXP (src, 0), 1) == const0_rtx
6144
      && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0))
6145
#ifdef HAVE_conditional_move
6146
      && ! can_conditionally_move_p (GET_MODE (src))
6147
#endif
6148
      && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
6149
                               GET_MODE (XEXP (XEXP (src, 0), 0)))
6150
          == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0))))
6151
      && ! side_effects_p (src))
6152
    {
6153
      rtx true_rtx = (GET_CODE (XEXP (src, 0)) == NE
6154
                      ? XEXP (src, 1) : XEXP (src, 2));
6155
      rtx false_rtx = (GET_CODE (XEXP (src, 0)) == NE
6156
                   ? XEXP (src, 2) : XEXP (src, 1));
6157
      rtx term1 = const0_rtx, term2, term3;
6158
 
6159
      if (GET_CODE (true_rtx) == IOR
6160
          && rtx_equal_p (XEXP (true_rtx, 0), false_rtx))
6161
        term1 = false_rtx, true_rtx = XEXP (true_rtx, 1), false_rtx = const0_rtx;
6162
      else if (GET_CODE (true_rtx) == IOR
6163
               && rtx_equal_p (XEXP (true_rtx, 1), false_rtx))
6164
        term1 = false_rtx, true_rtx = XEXP (true_rtx, 0), false_rtx = const0_rtx;
6165
      else if (GET_CODE (false_rtx) == IOR
6166
               && rtx_equal_p (XEXP (false_rtx, 0), true_rtx))
6167
        term1 = true_rtx, false_rtx = XEXP (false_rtx, 1), true_rtx = const0_rtx;
6168
      else if (GET_CODE (false_rtx) == IOR
6169
               && rtx_equal_p (XEXP (false_rtx, 1), true_rtx))
6170
        term1 = true_rtx, false_rtx = XEXP (false_rtx, 0), true_rtx = const0_rtx;
6171
 
6172
      term2 = simplify_gen_binary (AND, GET_MODE (src),
6173
                                   XEXP (XEXP (src, 0), 0), true_rtx);
6174
      term3 = simplify_gen_binary (AND, GET_MODE (src),
6175
                                   simplify_gen_unary (NOT, GET_MODE (src),
6176
                                                       XEXP (XEXP (src, 0), 0),
6177
                                                       GET_MODE (src)),
6178
                                   false_rtx);
6179
 
6180
      SUBST (SET_SRC (x),
6181
             simplify_gen_binary (IOR, GET_MODE (src),
6182
                                  simplify_gen_binary (IOR, GET_MODE (src),
6183
                                                       term1, term2),
6184
                                  term3));
6185
 
6186
      src = SET_SRC (x);
6187
    }
6188
 
6189
  /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
6190
     whole thing fail.  */
6191
  if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
6192
    return src;
6193
  else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
6194
    return dest;
6195
  else
6196
    /* Convert this into a field assignment operation, if possible.  */
6197
    return make_field_assignment (x);
6198
}
6199
 
6200
/* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
6201
   result.  */
6202
 
6203
static rtx
6204
simplify_logical (rtx x)
6205
{
6206
  enum machine_mode mode = GET_MODE (x);
6207
  rtx op0 = XEXP (x, 0);
6208
  rtx op1 = XEXP (x, 1);
6209
 
6210
  switch (GET_CODE (x))
6211
    {
6212
    case AND:
6213
      /* We can call simplify_and_const_int only if we don't lose
6214
         any (sign) bits when converting INTVAL (op1) to
6215
         "unsigned HOST_WIDE_INT".  */
6216
      if (CONST_INT_P (op1)
6217
          && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6218
              || INTVAL (op1) > 0))
6219
        {
6220
          x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
6221
          if (GET_CODE (x) != AND)
6222
            return x;
6223
 
6224
          op0 = XEXP (x, 0);
6225
          op1 = XEXP (x, 1);
6226
        }
6227
 
6228
      /* If we have any of (and (ior A B) C) or (and (xor A B) C),
6229
         apply the distributive law and then the inverse distributive
6230
         law to see if things simplify.  */
6231
      if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
6232
        {
6233
          rtx result = distribute_and_simplify_rtx (x, 0);
6234
          if (result)
6235
            return result;
6236
        }
6237
      if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
6238
        {
6239
          rtx result = distribute_and_simplify_rtx (x, 1);
6240
          if (result)
6241
            return result;
6242
        }
6243
      break;
6244
 
6245
    case IOR:
6246
      /* If we have (ior (and A B) C), apply the distributive law and then
6247
         the inverse distributive law to see if things simplify.  */
6248
 
6249
      if (GET_CODE (op0) == AND)
6250
        {
6251
          rtx result = distribute_and_simplify_rtx (x, 0);
6252
          if (result)
6253
            return result;
6254
        }
6255
 
6256
      if (GET_CODE (op1) == AND)
6257
        {
6258
          rtx result = distribute_and_simplify_rtx (x, 1);
6259
          if (result)
6260
            return result;
6261
        }
6262
      break;
6263
 
6264
    default:
6265
      gcc_unreachable ();
6266
    }
6267
 
6268
  return x;
6269
}
6270
 
6271
/* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
6272
   operations" because they can be replaced with two more basic operations.
6273
   ZERO_EXTEND is also considered "compound" because it can be replaced with
6274
   an AND operation, which is simpler, though only one operation.
6275
 
6276
   The function expand_compound_operation is called with an rtx expression
6277
   and will convert it to the appropriate shifts and AND operations,
6278
   simplifying at each stage.
6279
 
6280
   The function make_compound_operation is called to convert an expression
6281
   consisting of shifts and ANDs into the equivalent compound expression.
6282
   It is the inverse of this function, loosely speaking.  */
6283
 
6284
static rtx
6285
expand_compound_operation (rtx x)
6286
{
6287
  unsigned HOST_WIDE_INT pos = 0, len;
6288
  int unsignedp = 0;
6289
  unsigned int modewidth;
6290
  rtx tem;
6291
 
6292
  switch (GET_CODE (x))
6293
    {
6294
    case ZERO_EXTEND:
6295
      unsignedp = 1;
6296
    case SIGN_EXTEND:
6297
      /* We can't necessarily use a const_int for a multiword mode;
6298
         it depends on implicitly extending the value.
6299
         Since we don't know the right way to extend it,
6300
         we can't tell whether the implicit way is right.
6301
 
6302
         Even for a mode that is no wider than a const_int,
6303
         we can't win, because we need to sign extend one of its bits through
6304
         the rest of it, and we don't know which bit.  */
6305
      if (CONST_INT_P (XEXP (x, 0)))
6306
        return x;
6307
 
6308
      /* Return if (subreg:MODE FROM 0) is not a safe replacement for
6309
         (zero_extend:MODE FROM) or (sign_extend:MODE FROM).  It is for any MEM
6310
         because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
6311
         reloaded. If not for that, MEM's would very rarely be safe.
6312
 
6313
         Reject MODEs bigger than a word, because we might not be able
6314
         to reference a two-register group starting with an arbitrary register
6315
         (and currently gen_lowpart might crash for a SUBREG).  */
6316
 
6317
      if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD)
6318
        return x;
6319
 
6320
      /* Reject MODEs that aren't scalar integers because turning vector
6321
         or complex modes into shifts causes problems.  */
6322
 
6323
      if (! SCALAR_INT_MODE_P (GET_MODE (XEXP (x, 0))))
6324
        return x;
6325
 
6326
      len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
6327
      /* If the inner object has VOIDmode (the only way this can happen
6328
         is if it is an ASM_OPERANDS), we can't do anything since we don't
6329
         know how much masking to do.  */
6330
      if (len == 0)
6331
        return x;
6332
 
6333
      break;
6334
 
6335
    case ZERO_EXTRACT:
6336
      unsignedp = 1;
6337
 
6338
      /* ... fall through ...  */
6339
 
6340
    case SIGN_EXTRACT:
6341
      /* If the operand is a CLOBBER, just return it.  */
6342
      if (GET_CODE (XEXP (x, 0)) == CLOBBER)
6343
        return XEXP (x, 0);
6344
 
6345
      if (!CONST_INT_P (XEXP (x, 1))
6346
          || !CONST_INT_P (XEXP (x, 2))
6347
          || GET_MODE (XEXP (x, 0)) == VOIDmode)
6348
        return x;
6349
 
6350
      /* Reject MODEs that aren't scalar integers because turning vector
6351
         or complex modes into shifts causes problems.  */
6352
 
6353
      if (! SCALAR_INT_MODE_P (GET_MODE (XEXP (x, 0))))
6354
        return x;
6355
 
6356
      len = INTVAL (XEXP (x, 1));
6357
      pos = INTVAL (XEXP (x, 2));
6358
 
6359
      /* This should stay within the object being extracted, fail otherwise.  */
6360
      if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
6361
        return x;
6362
 
6363
      if (BITS_BIG_ENDIAN)
6364
        pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
6365
 
6366
      break;
6367
 
6368
    default:
6369
      return x;
6370
    }
6371
  /* Convert sign extension to zero extension, if we know that the high
6372
     bit is not set, as this is easier to optimize.  It will be converted
6373
     back to cheaper alternative in make_extraction.  */
6374
  if (GET_CODE (x) == SIGN_EXTEND
6375
      && (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
6376
          && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
6377
                & ~(((unsigned HOST_WIDE_INT)
6378
                      GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
6379
                     >> 1))
6380
               == 0)))
6381
    {
6382
      rtx temp = gen_rtx_ZERO_EXTEND (GET_MODE (x), XEXP (x, 0));
6383
      rtx temp2 = expand_compound_operation (temp);
6384
 
6385
      /* Make sure this is a profitable operation.  */
6386
      if (rtx_cost (x, SET, optimize_this_for_speed_p)
6387
          > rtx_cost (temp2, SET, optimize_this_for_speed_p))
6388
       return temp2;
6389
      else if (rtx_cost (x, SET, optimize_this_for_speed_p)
6390
               > rtx_cost (temp, SET, optimize_this_for_speed_p))
6391
       return temp;
6392
      else
6393
       return x;
6394
    }
6395
 
6396
  /* We can optimize some special cases of ZERO_EXTEND.  */
6397
  if (GET_CODE (x) == ZERO_EXTEND)
6398
    {
6399
      /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we
6400
         know that the last value didn't have any inappropriate bits
6401
         set.  */
6402
      if (GET_CODE (XEXP (x, 0)) == TRUNCATE
6403
          && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
6404
          && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
6405
          && (nonzero_bits (XEXP (XEXP (x, 0), 0), GET_MODE (x))
6406
              & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
6407
        return XEXP (XEXP (x, 0), 0);
6408
 
6409
      /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)).  */
6410
      if (GET_CODE (XEXP (x, 0)) == SUBREG
6411
          && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
6412
          && subreg_lowpart_p (XEXP (x, 0))
6413
          && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
6414
          && (nonzero_bits (SUBREG_REG (XEXP (x, 0)), GET_MODE (x))
6415
              & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
6416
        return SUBREG_REG (XEXP (x, 0));
6417
 
6418
      /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
6419
         is a comparison and STORE_FLAG_VALUE permits.  This is like
6420
         the first case, but it works even when GET_MODE (x) is larger
6421
         than HOST_WIDE_INT.  */
6422
      if (GET_CODE (XEXP (x, 0)) == TRUNCATE
6423
          && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
6424
          && COMPARISON_P (XEXP (XEXP (x, 0), 0))
6425
          && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
6426
              <= HOST_BITS_PER_WIDE_INT)
6427
          && ((HOST_WIDE_INT) STORE_FLAG_VALUE
6428
              & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
6429
        return XEXP (XEXP (x, 0), 0);
6430
 
6431
      /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)).  */
6432
      if (GET_CODE (XEXP (x, 0)) == SUBREG
6433
          && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
6434
          && subreg_lowpart_p (XEXP (x, 0))
6435
          && COMPARISON_P (SUBREG_REG (XEXP (x, 0)))
6436
          && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
6437
              <= HOST_BITS_PER_WIDE_INT)
6438
          && ((HOST_WIDE_INT) STORE_FLAG_VALUE
6439
              & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
6440
        return SUBREG_REG (XEXP (x, 0));
6441
 
6442
    }
6443
 
6444
  /* If we reach here, we want to return a pair of shifts.  The inner
6445
     shift is a left shift of BITSIZE - POS - LEN bits.  The outer
6446
     shift is a right shift of BITSIZE - LEN bits.  It is arithmetic or
6447
     logical depending on the value of UNSIGNEDP.
6448
 
6449
     If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
6450
     converted into an AND of a shift.
6451
 
6452
     We must check for the case where the left shift would have a negative
6453
     count.  This can happen in a case like (x >> 31) & 255 on machines
6454
     that can't shift by a constant.  On those machines, we would first
6455
     combine the shift with the AND to produce a variable-position
6456
     extraction.  Then the constant of 31 would be substituted in to produce
6457
     a such a position.  */
6458
 
6459
  modewidth = GET_MODE_BITSIZE (GET_MODE (x));
6460
  if (modewidth + len >= pos)
6461
    {
6462
      enum machine_mode mode = GET_MODE (x);
6463
      tem = gen_lowpart (mode, XEXP (x, 0));
6464
      if (!tem || GET_CODE (tem) == CLOBBER)
6465
        return x;
6466
      tem = simplify_shift_const (NULL_RTX, ASHIFT, mode,
6467
                                  tem, modewidth - pos - len);
6468
      tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
6469
                                  mode, tem, modewidth - len);
6470
    }
6471
  else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
6472
    tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
6473
                                  simplify_shift_const (NULL_RTX, LSHIFTRT,
6474
                                                        GET_MODE (x),
6475
                                                        XEXP (x, 0), pos),
6476
                                  ((HOST_WIDE_INT) 1 << len) - 1);
6477
  else
6478
    /* Any other cases we can't handle.  */
6479
    return x;
6480
 
6481
  /* If we couldn't do this for some reason, return the original
6482
     expression.  */
6483
  if (GET_CODE (tem) == CLOBBER)
6484
    return x;
6485
 
6486
  return tem;
6487
}
6488
 
6489
/* X is a SET which contains an assignment of one object into
6490
   a part of another (such as a bit-field assignment, STRICT_LOW_PART,
6491
   or certain SUBREGS). If possible, convert it into a series of
6492
   logical operations.
6493
 
6494
   We half-heartedly support variable positions, but do not at all
6495
   support variable lengths.  */
6496
 
6497
static const_rtx
6498
expand_field_assignment (const_rtx x)
6499
{
6500
  rtx inner;
6501
  rtx pos;                      /* Always counts from low bit.  */
6502
  int len;
6503
  rtx mask, cleared, masked;
6504
  enum machine_mode compute_mode;
6505
 
6506
  /* Loop until we find something we can't simplify.  */
6507
  while (1)
6508
    {
6509
      if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
6510
          && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
6511
        {
6512
          inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
6513
          len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
6514
          pos = GEN_INT (subreg_lsb (XEXP (SET_DEST (x), 0)));
6515
        }
6516
      else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
6517
               && CONST_INT_P (XEXP (SET_DEST (x), 1)))
6518
        {
6519
          inner = XEXP (SET_DEST (x), 0);
6520
          len = INTVAL (XEXP (SET_DEST (x), 1));
6521
          pos = XEXP (SET_DEST (x), 2);
6522
 
6523
          /* A constant position should stay within the width of INNER.  */
6524
          if (CONST_INT_P (pos)
6525
              && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
6526
            break;
6527
 
6528
          if (BITS_BIG_ENDIAN)
6529
            {
6530
              if (CONST_INT_P (pos))
6531
                pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
6532
                               - INTVAL (pos));
6533
              else if (GET_CODE (pos) == MINUS
6534
                       && CONST_INT_P (XEXP (pos, 1))
6535
                       && (INTVAL (XEXP (pos, 1))
6536
                           == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
6537
                /* If position is ADJUST - X, new position is X.  */
6538
                pos = XEXP (pos, 0);
6539
              else
6540
                pos = simplify_gen_binary (MINUS, GET_MODE (pos),
6541
                                           GEN_INT (GET_MODE_BITSIZE (
6542
                                                    GET_MODE (inner))
6543
                                                    - len),
6544
                                           pos);
6545
            }
6546
        }
6547
 
6548
      /* A SUBREG between two modes that occupy the same numbers of words
6549
         can be done by moving the SUBREG to the source.  */
6550
      else if (GET_CODE (SET_DEST (x)) == SUBREG
6551
               /* We need SUBREGs to compute nonzero_bits properly.  */
6552
               && nonzero_sign_valid
6553
               && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
6554
                     + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
6555
                   == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
6556
                        + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
6557
        {
6558
          x = gen_rtx_SET (VOIDmode, SUBREG_REG (SET_DEST (x)),
6559
                           gen_lowpart
6560
                           (GET_MODE (SUBREG_REG (SET_DEST (x))),
6561
                            SET_SRC (x)));
6562
          continue;
6563
        }
6564
      else
6565
        break;
6566
 
6567
      while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
6568
        inner = SUBREG_REG (inner);
6569
 
6570
      compute_mode = GET_MODE (inner);
6571
 
6572
      /* Don't attempt bitwise arithmetic on non scalar integer modes.  */
6573
      if (! SCALAR_INT_MODE_P (compute_mode))
6574
        {
6575
          enum machine_mode imode;
6576
 
6577
          /* Don't do anything for vector or complex integral types.  */
6578
          if (! FLOAT_MODE_P (compute_mode))
6579
            break;
6580
 
6581
          /* Try to find an integral mode to pun with.  */
6582
          imode = mode_for_size (GET_MODE_BITSIZE (compute_mode), MODE_INT, 0);
6583
          if (imode == BLKmode)
6584
            break;
6585
 
6586
          compute_mode = imode;
6587
          inner = gen_lowpart (imode, inner);
6588
        }
6589
 
6590
      /* Compute a mask of LEN bits, if we can do this on the host machine.  */
6591
      if (len >= HOST_BITS_PER_WIDE_INT)
6592
        break;
6593
 
6594
      /* Now compute the equivalent expression.  Make a copy of INNER
6595
         for the SET_DEST in case it is a MEM into which we will substitute;
6596
         we don't want shared RTL in that case.  */
6597
      mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
6598
      cleared = simplify_gen_binary (AND, compute_mode,
6599
                                     simplify_gen_unary (NOT, compute_mode,
6600
                                       simplify_gen_binary (ASHIFT,
6601
                                                            compute_mode,
6602
                                                            mask, pos),
6603
                                       compute_mode),
6604
                                     inner);
6605
      masked = simplify_gen_binary (ASHIFT, compute_mode,
6606
                                    simplify_gen_binary (
6607
                                      AND, compute_mode,
6608
                                      gen_lowpart (compute_mode, SET_SRC (x)),
6609
                                      mask),
6610
                                    pos);
6611
 
6612
      x = gen_rtx_SET (VOIDmode, copy_rtx (inner),
6613
                       simplify_gen_binary (IOR, compute_mode,
6614
                                            cleared, masked));
6615
    }
6616
 
6617
  return x;
6618
}
6619
 
6620
/* Return an RTX for a reference to LEN bits of INNER.  If POS_RTX is nonzero,
6621
   it is an RTX that represents a variable starting position; otherwise,
6622
   POS is the (constant) starting bit position (counted from the LSB).
6623
 
6624
   UNSIGNEDP is nonzero for an unsigned reference and zero for a
6625
   signed reference.
6626
 
6627
   IN_DEST is nonzero if this is a reference in the destination of a
6628
   SET.  This is used when a ZERO_ or SIGN_EXTRACT isn't needed.  If nonzero,
6629
   a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
6630
   be used.
6631
 
6632
   IN_COMPARE is nonzero if we are in a COMPARE.  This means that a
6633
   ZERO_EXTRACT should be built even for bits starting at bit 0.
6634
 
6635
   MODE is the desired mode of the result (if IN_DEST == 0).
6636
 
6637
   The result is an RTX for the extraction or NULL_RTX if the target
6638
   can't handle it.  */
6639
 
6640
static rtx
6641
make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos,
6642
                 rtx pos_rtx, unsigned HOST_WIDE_INT len, int unsignedp,
6643
                 int in_dest, int in_compare)
6644
{
6645
  /* This mode describes the size of the storage area
6646
     to fetch the overall value from.  Within that, we
6647
     ignore the POS lowest bits, etc.  */
6648
  enum machine_mode is_mode = GET_MODE (inner);
6649
  enum machine_mode inner_mode;
6650
  enum machine_mode wanted_inner_mode;
6651
  enum machine_mode wanted_inner_reg_mode = word_mode;
6652
  enum machine_mode pos_mode = word_mode;
6653
  enum machine_mode extraction_mode = word_mode;
6654
  enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
6655
  rtx new_rtx = 0;
6656
  rtx orig_pos_rtx = pos_rtx;
6657
  HOST_WIDE_INT orig_pos;
6658
 
6659
  if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
6660
    {
6661
      /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
6662
         consider just the QI as the memory to extract from.
6663
         The subreg adds or removes high bits; its mode is
6664
         irrelevant to the meaning of this extraction,
6665
         since POS and LEN count from the lsb.  */
6666
      if (MEM_P (SUBREG_REG (inner)))
6667
        is_mode = GET_MODE (SUBREG_REG (inner));
6668
      inner = SUBREG_REG (inner);
6669
    }
6670
  else if (GET_CODE (inner) == ASHIFT
6671
           && CONST_INT_P (XEXP (inner, 1))
6672
           && pos_rtx == 0 && pos == 0
6673
           && len > (unsigned HOST_WIDE_INT) INTVAL (XEXP (inner, 1)))
6674
    {
6675
      /* We're extracting the least significant bits of an rtx
6676
         (ashift X (const_int C)), where LEN > C.  Extract the
6677
         least significant (LEN - C) bits of X, giving an rtx
6678
         whose mode is MODE, then shift it left C times.  */
6679
      new_rtx = make_extraction (mode, XEXP (inner, 0),
6680
                             0, 0, len - INTVAL (XEXP (inner, 1)),
6681
                             unsignedp, in_dest, in_compare);
6682
      if (new_rtx != 0)
6683
        return gen_rtx_ASHIFT (mode, new_rtx, XEXP (inner, 1));
6684
    }
6685
 
6686
  inner_mode = GET_MODE (inner);
6687
 
6688
  if (pos_rtx && CONST_INT_P (pos_rtx))
6689
    pos = INTVAL (pos_rtx), pos_rtx = 0;
6690
 
6691
  /* See if this can be done without an extraction.  We never can if the
6692
     width of the field is not the same as that of some integer mode. For
6693
     registers, we can only avoid the extraction if the position is at the
6694
     low-order bit and this is either not in the destination or we have the
6695
     appropriate STRICT_LOW_PART operation available.
6696
 
6697
     For MEM, we can avoid an extract if the field starts on an appropriate
6698
     boundary and we can change the mode of the memory reference.  */
6699
 
6700
  if (tmode != BLKmode
6701
      && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0
6702
           && !MEM_P (inner)
6703
           && (inner_mode == tmode
6704
               || !REG_P (inner)
6705
               || TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (tmode),
6706
                                         GET_MODE_BITSIZE (inner_mode))
6707
               || reg_truncated_to_mode (tmode, inner))
6708
           && (! in_dest
6709
               || (REG_P (inner)
6710
                   && have_insn_for (STRICT_LOW_PART, tmode))))
6711
          || (MEM_P (inner) && pos_rtx == 0
6712
              && (pos
6713
                  % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
6714
                     : BITS_PER_UNIT)) == 0
6715
              /* We can't do this if we are widening INNER_MODE (it
6716
                 may not be aligned, for one thing).  */
6717
              && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
6718
              && (inner_mode == tmode
6719
                  || (! mode_dependent_address_p (XEXP (inner, 0))
6720
                      && ! MEM_VOLATILE_P (inner))))))
6721
    {
6722
      /* If INNER is a MEM, make a new MEM that encompasses just the desired
6723
         field.  If the original and current mode are the same, we need not
6724
         adjust the offset.  Otherwise, we do if bytes big endian.
6725
 
6726
         If INNER is not a MEM, get a piece consisting of just the field
6727
         of interest (in this case POS % BITS_PER_WORD must be 0).  */
6728
 
6729
      if (MEM_P (inner))
6730
        {
6731
          HOST_WIDE_INT offset;
6732
 
6733
          /* POS counts from lsb, but make OFFSET count in memory order.  */
6734
          if (BYTES_BIG_ENDIAN)
6735
            offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
6736
          else
6737
            offset = pos / BITS_PER_UNIT;
6738
 
6739
          new_rtx = adjust_address_nv (inner, tmode, offset);
6740
        }
6741
      else if (REG_P (inner))
6742
        {
6743
          if (tmode != inner_mode)
6744
            {
6745
              /* We can't call gen_lowpart in a DEST since we
6746
                 always want a SUBREG (see below) and it would sometimes
6747
                 return a new hard register.  */
6748
              if (pos || in_dest)
6749
                {
6750
                  HOST_WIDE_INT final_word = pos / BITS_PER_WORD;
6751
 
6752
                  if (WORDS_BIG_ENDIAN
6753
                      && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD)
6754
                    final_word = ((GET_MODE_SIZE (inner_mode)
6755
                                   - GET_MODE_SIZE (tmode))
6756
                                  / UNITS_PER_WORD) - final_word;
6757
 
6758
                  final_word *= UNITS_PER_WORD;
6759
                  if (BYTES_BIG_ENDIAN &&
6760
                      GET_MODE_SIZE (inner_mode) > GET_MODE_SIZE (tmode))
6761
                    final_word += (GET_MODE_SIZE (inner_mode)
6762
                                   - GET_MODE_SIZE (tmode)) % UNITS_PER_WORD;
6763
 
6764
                  /* Avoid creating invalid subregs, for example when
6765
                     simplifying (x>>32)&255.  */
6766
                  if (!validate_subreg (tmode, inner_mode, inner, final_word))
6767
                    return NULL_RTX;
6768
 
6769
                  new_rtx = gen_rtx_SUBREG (tmode, inner, final_word);
6770
                }
6771
              else
6772
                new_rtx = gen_lowpart (tmode, inner);
6773
            }
6774
          else
6775
            new_rtx = inner;
6776
        }
6777
      else
6778
        new_rtx = force_to_mode (inner, tmode,
6779
                             len >= HOST_BITS_PER_WIDE_INT
6780
                             ? ~(unsigned HOST_WIDE_INT) 0
6781
                             : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
6782
                             0);
6783
 
6784
      /* If this extraction is going into the destination of a SET,
6785
         make a STRICT_LOW_PART unless we made a MEM.  */
6786
 
6787
      if (in_dest)
6788
        return (MEM_P (new_rtx) ? new_rtx
6789
                : (GET_CODE (new_rtx) != SUBREG
6790
                   ? gen_rtx_CLOBBER (tmode, const0_rtx)
6791
                   : gen_rtx_STRICT_LOW_PART (VOIDmode, new_rtx)));
6792
 
6793
      if (mode == tmode)
6794
        return new_rtx;
6795
 
6796
      if (CONST_INT_P (new_rtx)
6797
          || GET_CODE (new_rtx) == CONST_DOUBLE)
6798
        return simplify_unary_operation (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
6799
                                         mode, new_rtx, tmode);
6800
 
6801
      /* If we know that no extraneous bits are set, and that the high
6802
         bit is not set, convert the extraction to the cheaper of
6803
         sign and zero extension, that are equivalent in these cases.  */
6804
      if (flag_expensive_optimizations
6805
          && (GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT
6806
              && ((nonzero_bits (new_rtx, tmode)
6807
                   & ~(((unsigned HOST_WIDE_INT)
6808
                        GET_MODE_MASK (tmode))
6809
                       >> 1))
6810
                  == 0)))
6811
        {
6812
          rtx temp = gen_rtx_ZERO_EXTEND (mode, new_rtx);
6813
          rtx temp1 = gen_rtx_SIGN_EXTEND (mode, new_rtx);
6814
 
6815
          /* Prefer ZERO_EXTENSION, since it gives more information to
6816
             backends.  */
6817
          if (rtx_cost (temp, SET, optimize_this_for_speed_p)
6818
              <= rtx_cost (temp1, SET, optimize_this_for_speed_p))
6819
            return temp;
6820
          return temp1;
6821
        }
6822
 
6823
      /* Otherwise, sign- or zero-extend unless we already are in the
6824
         proper mode.  */
6825
 
6826
      return (gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
6827
                             mode, new_rtx));
6828
    }
6829
 
6830
  /* Unless this is a COMPARE or we have a funny memory reference,
6831
     don't do anything with zero-extending field extracts starting at
6832
     the low-order bit since they are simple AND operations.  */
6833
  if (pos_rtx == 0 && pos == 0 && ! in_dest
6834
      && ! in_compare && unsignedp)
6835
    return 0;
6836
 
6837
  /* Unless INNER is not MEM, reject this if we would be spanning bytes or
6838
     if the position is not a constant and the length is not 1.  In all
6839
     other cases, we would only be going outside our object in cases when
6840
     an original shift would have been undefined.  */
6841
  if (MEM_P (inner)
6842
      && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
6843
          || (pos_rtx != 0 && len != 1)))
6844
    return 0;
6845
 
6846
  /* Get the mode to use should INNER not be a MEM, the mode for the position,
6847
     and the mode for the result.  */
6848
  if (in_dest && mode_for_extraction (EP_insv, -1) != MAX_MACHINE_MODE)
6849
    {
6850
      wanted_inner_reg_mode = mode_for_extraction (EP_insv, 0);
6851
      pos_mode = mode_for_extraction (EP_insv, 2);
6852
      extraction_mode = mode_for_extraction (EP_insv, 3);
6853
    }
6854
 
6855
  if (! in_dest && unsignedp
6856
      && mode_for_extraction (EP_extzv, -1) != MAX_MACHINE_MODE)
6857
    {
6858
      wanted_inner_reg_mode = mode_for_extraction (EP_extzv, 1);
6859
      pos_mode = mode_for_extraction (EP_extzv, 3);
6860
      extraction_mode = mode_for_extraction (EP_extzv, 0);
6861
    }
6862
 
6863
  if (! in_dest && ! unsignedp
6864
      && mode_for_extraction (EP_extv, -1) != MAX_MACHINE_MODE)
6865
    {
6866
      wanted_inner_reg_mode = mode_for_extraction (EP_extv, 1);
6867
      pos_mode = mode_for_extraction (EP_extv, 3);
6868
      extraction_mode = mode_for_extraction (EP_extv, 0);
6869
    }
6870
 
6871
  /* Never narrow an object, since that might not be safe.  */
6872
 
6873
  if (mode != VOIDmode
6874
      && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
6875
    extraction_mode = mode;
6876
 
6877
  if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
6878
      && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
6879
    pos_mode = GET_MODE (pos_rtx);
6880
 
6881
  /* If this is not from memory, the desired mode is the preferred mode
6882
     for an extraction pattern's first input operand, or word_mode if there
6883
     is none.  */
6884
  if (!MEM_P (inner))
6885
    wanted_inner_mode = wanted_inner_reg_mode;
6886
  else
6887
    {
6888
      /* Be careful not to go beyond the extracted object and maintain the
6889
         natural alignment of the memory.  */
6890
      wanted_inner_mode = smallest_mode_for_size (len, MODE_INT);
6891
      while (pos % GET_MODE_BITSIZE (wanted_inner_mode) + len
6892
             > GET_MODE_BITSIZE (wanted_inner_mode))
6893
        {
6894
          wanted_inner_mode = GET_MODE_WIDER_MODE (wanted_inner_mode);
6895
          gcc_assert (wanted_inner_mode != VOIDmode);
6896
        }
6897
 
6898
      /* If we have to change the mode of memory and cannot, the desired mode
6899
         is EXTRACTION_MODE.  */
6900
      if (inner_mode != wanted_inner_mode
6901
          && (mode_dependent_address_p (XEXP (inner, 0))
6902
              || MEM_VOLATILE_P (inner)
6903
              || pos_rtx))
6904
        wanted_inner_mode = extraction_mode;
6905
    }
6906
 
6907
  orig_pos = pos;
6908
 
6909
  if (BITS_BIG_ENDIAN)
6910
    {
6911
      /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to
6912
         BITS_BIG_ENDIAN style.  If position is constant, compute new
6913
         position.  Otherwise, build subtraction.
6914
         Note that POS is relative to the mode of the original argument.
6915
         If it's a MEM we need to recompute POS relative to that.
6916
         However, if we're extracting from (or inserting into) a register,
6917
         we want to recompute POS relative to wanted_inner_mode.  */
6918
      int width = (MEM_P (inner)
6919
                   ? GET_MODE_BITSIZE (is_mode)
6920
                   : GET_MODE_BITSIZE (wanted_inner_mode));
6921
 
6922
      if (pos_rtx == 0)
6923
        pos = width - len - pos;
6924
      else
6925
        pos_rtx
6926
          = gen_rtx_MINUS (GET_MODE (pos_rtx), GEN_INT (width - len), pos_rtx);
6927
      /* POS may be less than 0 now, but we check for that below.
6928
         Note that it can only be less than 0 if !MEM_P (inner).  */
6929
    }
6930
 
6931
  /* If INNER has a wider mode, and this is a constant extraction, try to
6932
     make it smaller and adjust the byte to point to the byte containing
6933
     the value.  */
6934
  if (wanted_inner_mode != VOIDmode
6935
      && inner_mode != wanted_inner_mode
6936
      && ! pos_rtx
6937
      && GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode)
6938
      && MEM_P (inner)
6939
      && ! mode_dependent_address_p (XEXP (inner, 0))
6940
      && ! MEM_VOLATILE_P (inner))
6941
    {
6942
      int offset = 0;
6943
 
6944
      /* The computations below will be correct if the machine is big
6945
         endian in both bits and bytes or little endian in bits and bytes.
6946
         If it is mixed, we must adjust.  */
6947
 
6948
      /* If bytes are big endian and we had a paradoxical SUBREG, we must
6949
         adjust OFFSET to compensate.  */
6950
      if (BYTES_BIG_ENDIAN
6951
          && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
6952
        offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
6953
 
6954
      /* We can now move to the desired byte.  */
6955
      offset += (pos / GET_MODE_BITSIZE (wanted_inner_mode))
6956
                * GET_MODE_SIZE (wanted_inner_mode);
6957
      pos %= GET_MODE_BITSIZE (wanted_inner_mode);
6958
 
6959
      if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
6960
          && is_mode != wanted_inner_mode)
6961
        offset = (GET_MODE_SIZE (is_mode)
6962
                  - GET_MODE_SIZE (wanted_inner_mode) - offset);
6963
 
6964
      inner = adjust_address_nv (inner, wanted_inner_mode, offset);
6965
    }
6966
 
6967
  /* If INNER is not memory, get it into the proper mode.  If we are changing
6968
     its mode, POS must be a constant and smaller than the size of the new
6969
     mode.  */
6970
  else if (!MEM_P (inner))
6971
    {
6972
      /* On the LHS, don't create paradoxical subregs implicitely truncating
6973
         the register unless TRULY_NOOP_TRUNCATION.  */
6974
      if (in_dest
6975
          && !TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (inner)),
6976
                                     GET_MODE_BITSIZE (wanted_inner_mode)))
6977
        return NULL_RTX;
6978
 
6979
      if (GET_MODE (inner) != wanted_inner_mode
6980
          && (pos_rtx != 0
6981
              || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode)))
6982
        return NULL_RTX;
6983
 
6984
      if (orig_pos < 0)
6985
        return NULL_RTX;
6986
 
6987
      inner = force_to_mode (inner, wanted_inner_mode,
6988
                             pos_rtx
6989
                             || len + orig_pos >= HOST_BITS_PER_WIDE_INT
6990
                             ? ~(unsigned HOST_WIDE_INT) 0
6991
                             : ((((unsigned HOST_WIDE_INT) 1 << len) - 1)
6992
                                << orig_pos),
6993
                             0);
6994
    }
6995
 
6996
  /* Adjust mode of POS_RTX, if needed.  If we want a wider mode, we
6997
     have to zero extend.  Otherwise, we can just use a SUBREG.  */
6998
  if (pos_rtx != 0
6999
      && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
7000
    {
7001
      rtx temp = gen_rtx_ZERO_EXTEND (pos_mode, pos_rtx);
7002
 
7003
      /* If we know that no extraneous bits are set, and that the high
7004
         bit is not set, convert extraction to cheaper one - either
7005
         SIGN_EXTENSION or ZERO_EXTENSION, that are equivalent in these
7006
         cases.  */
7007
      if (flag_expensive_optimizations
7008
          && (GET_MODE_BITSIZE (GET_MODE (pos_rtx)) <= HOST_BITS_PER_WIDE_INT
7009
              && ((nonzero_bits (pos_rtx, GET_MODE (pos_rtx))
7010
                   & ~(((unsigned HOST_WIDE_INT)
7011
                        GET_MODE_MASK (GET_MODE (pos_rtx)))
7012
                       >> 1))
7013
                  == 0)))
7014
        {
7015
          rtx temp1 = gen_rtx_SIGN_EXTEND (pos_mode, pos_rtx);
7016
 
7017
          /* Prefer ZERO_EXTENSION, since it gives more information to
7018
             backends.  */
7019
          if (rtx_cost (temp1, SET, optimize_this_for_speed_p)
7020
              < rtx_cost (temp, SET, optimize_this_for_speed_p))
7021
            temp = temp1;
7022
        }
7023
      pos_rtx = temp;
7024
    }
7025
  else if (pos_rtx != 0
7026
           && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
7027
    pos_rtx = gen_lowpart (pos_mode, pos_rtx);
7028
 
7029
  /* Make POS_RTX unless we already have it and it is correct.  If we don't
7030
     have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
7031
     be a CONST_INT.  */
7032
  if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
7033
    pos_rtx = orig_pos_rtx;
7034
 
7035
  else if (pos_rtx == 0)
7036
    pos_rtx = GEN_INT (pos);
7037
 
7038
  /* Make the required operation.  See if we can use existing rtx.  */
7039
  new_rtx = gen_rtx_fmt_eee (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
7040
                         extraction_mode, inner, GEN_INT (len), pos_rtx);
7041
  if (! in_dest)
7042
    new_rtx = gen_lowpart (mode, new_rtx);
7043
 
7044
  return new_rtx;
7045
}
7046
 
7047
/* See if X contains an ASHIFT of COUNT or more bits that can be commuted
7048
   with any other operations in X.  Return X without that shift if so.  */
7049
 
7050
static rtx
7051
extract_left_shift (rtx x, int count)
7052
{
7053
  enum rtx_code code = GET_CODE (x);
7054
  enum machine_mode mode = GET_MODE (x);
7055
  rtx tem;
7056
 
7057
  switch (code)
7058
    {
7059
    case ASHIFT:
7060
      /* This is the shift itself.  If it is wide enough, we will return
7061
         either the value being shifted if the shift count is equal to
7062
         COUNT or a shift for the difference.  */
7063
      if (CONST_INT_P (XEXP (x, 1))
7064
          && INTVAL (XEXP (x, 1)) >= count)
7065
        return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
7066
                                     INTVAL (XEXP (x, 1)) - count);
7067
      break;
7068
 
7069
    case NEG:  case NOT:
7070
      if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0)
7071
        return simplify_gen_unary (code, mode, tem, mode);
7072
 
7073
      break;
7074
 
7075
    case PLUS:  case IOR:  case XOR:  case AND:
7076
      /* If we can safely shift this constant and we find the inner shift,
7077
         make a new operation.  */
7078
      if (CONST_INT_P (XEXP (x, 1))
7079
          && (INTVAL (XEXP (x, 1)) & ((((HOST_WIDE_INT) 1 << count)) - 1)) == 0
7080
          && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
7081
        return simplify_gen_binary (code, mode, tem,
7082
                                    GEN_INT (INTVAL (XEXP (x, 1)) >> count));
7083
 
7084
      break;
7085
 
7086
    default:
7087
      break;
7088
    }
7089
 
7090
  return 0;
7091
}
7092
 
7093
/* Look at the expression rooted at X.  Look for expressions
7094
   equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
7095
   Form these expressions.
7096
 
7097
   Return the new rtx, usually just X.
7098
 
7099
   Also, for machines like the VAX that don't have logical shift insns,
7100
   try to convert logical to arithmetic shift operations in cases where
7101
   they are equivalent.  This undoes the canonicalizations to logical
7102
   shifts done elsewhere.
7103
 
7104
   We try, as much as possible, to re-use rtl expressions to save memory.
7105
 
7106
   IN_CODE says what kind of expression we are processing.  Normally, it is
7107
   SET.  In a memory address (inside a MEM, PLUS or minus, the latter two
7108
   being kludges), it is MEM.  When processing the arguments of a comparison
7109
   or a COMPARE against zero, it is COMPARE.  */
7110
 
7111
static rtx
7112
make_compound_operation (rtx x, enum rtx_code in_code)
7113
{
7114
  enum rtx_code code = GET_CODE (x);
7115
  enum machine_mode mode = GET_MODE (x);
7116
  int mode_width = GET_MODE_BITSIZE (mode);
7117
  rtx rhs, lhs;
7118
  enum rtx_code next_code;
7119
  int i, j;
7120
  rtx new_rtx = 0;
7121
  rtx tem;
7122
  const char *fmt;
7123
 
7124
  /* Select the code to be used in recursive calls.  Once we are inside an
7125
     address, we stay there.  If we have a comparison, set to COMPARE,
7126
     but once inside, go back to our default of SET.  */
7127
 
7128
  next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
7129
               : ((code == COMPARE || COMPARISON_P (x))
7130
                  && XEXP (x, 1) == const0_rtx) ? COMPARE
7131
               : in_code == COMPARE ? SET : in_code);
7132
 
7133
  /* Process depending on the code of this operation.  If NEW is set
7134
     nonzero, it will be returned.  */
7135
 
7136
  switch (code)
7137
    {
7138
    case ASHIFT:
7139
      /* Convert shifts by constants into multiplications if inside
7140
         an address.  */
7141
      if (in_code == MEM && CONST_INT_P (XEXP (x, 1))
7142
          && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
7143
          && INTVAL (XEXP (x, 1)) >= 0)
7144
        {
7145
          new_rtx = make_compound_operation (XEXP (x, 0), next_code);
7146
          new_rtx = gen_rtx_MULT (mode, new_rtx,
7147
                              GEN_INT ((HOST_WIDE_INT) 1
7148
                                       << INTVAL (XEXP (x, 1))));
7149
        }
7150
      break;
7151
 
7152
    case AND:
7153
      /* If the second operand is not a constant, we can't do anything
7154
         with it.  */
7155
      if (!CONST_INT_P (XEXP (x, 1)))
7156
        break;
7157
 
7158
      /* If the constant is a power of two minus one and the first operand
7159
         is a logical right shift, make an extraction.  */
7160
      if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
7161
          && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
7162
        {
7163
          new_rtx = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
7164
          new_rtx = make_extraction (mode, new_rtx, 0, XEXP (XEXP (x, 0), 1), i, 1,
7165
                                 0, in_code == COMPARE);
7166
        }
7167
 
7168
      /* Same as previous, but for (subreg (lshiftrt ...)) in first op.  */
7169
      else if (GET_CODE (XEXP (x, 0)) == SUBREG
7170
               && subreg_lowpart_p (XEXP (x, 0))
7171
               && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
7172
               && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
7173
        {
7174
          new_rtx = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
7175
                                         next_code);
7176
          new_rtx = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new_rtx, 0,
7177
                                 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
7178
                                 0, in_code == COMPARE);
7179
        }
7180
      /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)).  */
7181
      else if ((GET_CODE (XEXP (x, 0)) == XOR
7182
                || GET_CODE (XEXP (x, 0)) == IOR)
7183
               && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
7184
               && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
7185
               && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
7186
        {
7187
          /* Apply the distributive law, and then try to make extractions.  */
7188
          new_rtx = gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)), mode,
7189
                                gen_rtx_AND (mode, XEXP (XEXP (x, 0), 0),
7190
                                             XEXP (x, 1)),
7191
                                gen_rtx_AND (mode, XEXP (XEXP (x, 0), 1),
7192
                                             XEXP (x, 1)));
7193
          new_rtx = make_compound_operation (new_rtx, in_code);
7194
        }
7195
 
7196
      /* If we are have (and (rotate X C) M) and C is larger than the number
7197
         of bits in M, this is an extraction.  */
7198
 
7199
      else if (GET_CODE (XEXP (x, 0)) == ROTATE
7200
               && CONST_INT_P (XEXP (XEXP (x, 0), 1))
7201
               && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
7202
               && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
7203
        {
7204
          new_rtx = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
7205
          new_rtx = make_extraction (mode, new_rtx,
7206
                                 (GET_MODE_BITSIZE (mode)
7207
                                  - INTVAL (XEXP (XEXP (x, 0), 1))),
7208
                                 NULL_RTX, i, 1, 0, in_code == COMPARE);
7209
        }
7210
 
7211
      /* On machines without logical shifts, if the operand of the AND is
7212
         a logical shift and our mask turns off all the propagated sign
7213
         bits, we can replace the logical shift with an arithmetic shift.  */
7214
      else if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
7215
               && !have_insn_for (LSHIFTRT, mode)
7216
               && have_insn_for (ASHIFTRT, mode)
7217
               && CONST_INT_P (XEXP (XEXP (x, 0), 1))
7218
               && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
7219
               && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
7220
               && mode_width <= HOST_BITS_PER_WIDE_INT)
7221
        {
7222
          unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
7223
 
7224
          mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
7225
          if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
7226
            SUBST (XEXP (x, 0),
7227
                   gen_rtx_ASHIFTRT (mode,
7228
                                     make_compound_operation
7229
                                     (XEXP (XEXP (x, 0), 0), next_code),
7230
                                     XEXP (XEXP (x, 0), 1)));
7231
        }
7232
 
7233
      /* If the constant is one less than a power of two, this might be
7234
         representable by an extraction even if no shift is present.
7235
         If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
7236
         we are in a COMPARE.  */
7237
      else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
7238
        new_rtx = make_extraction (mode,
7239
                               make_compound_operation (XEXP (x, 0),
7240
                                                        next_code),
7241
                               0, NULL_RTX, i, 1, 0, in_code == COMPARE);
7242
 
7243
      /* If we are in a comparison and this is an AND with a power of two,
7244
         convert this into the appropriate bit extract.  */
7245
      else if (in_code == COMPARE
7246
               && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
7247
        new_rtx = make_extraction (mode,
7248
                               make_compound_operation (XEXP (x, 0),
7249
                                                        next_code),
7250
                               i, NULL_RTX, 1, 1, 0, 1);
7251
 
7252
      break;
7253
 
7254
    case LSHIFTRT:
7255
      /* If the sign bit is known to be zero, replace this with an
7256
         arithmetic shift.  */
7257
      if (have_insn_for (ASHIFTRT, mode)
7258
          && ! have_insn_for (LSHIFTRT, mode)
7259
          && mode_width <= HOST_BITS_PER_WIDE_INT
7260
          && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
7261
        {
7262
          new_rtx = gen_rtx_ASHIFTRT (mode,
7263
                                  make_compound_operation (XEXP (x, 0),
7264
                                                           next_code),
7265
                                  XEXP (x, 1));
7266
          break;
7267
        }
7268
 
7269
      /* ... fall through ...  */
7270
 
7271
    case ASHIFTRT:
7272
      lhs = XEXP (x, 0);
7273
      rhs = XEXP (x, 1);
7274
 
7275
      /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
7276
         this is a SIGN_EXTRACT.  */
7277
      if (CONST_INT_P (rhs)
7278
          && GET_CODE (lhs) == ASHIFT
7279
          && CONST_INT_P (XEXP (lhs, 1))
7280
          && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1))
7281
          && INTVAL (rhs) < mode_width)
7282
        {
7283
          new_rtx = make_compound_operation (XEXP (lhs, 0), next_code);
7284
          new_rtx = make_extraction (mode, new_rtx,
7285
                                 INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
7286
                                 NULL_RTX, mode_width - INTVAL (rhs),
7287
                                 code == LSHIFTRT, 0, in_code == COMPARE);
7288
          break;
7289
        }
7290
 
7291
      /* See if we have operations between an ASHIFTRT and an ASHIFT.
7292
         If so, try to merge the shifts into a SIGN_EXTEND.  We could
7293
         also do this for some cases of SIGN_EXTRACT, but it doesn't
7294
         seem worth the effort; the case checked for occurs on Alpha.  */
7295
 
7296
      if (!OBJECT_P (lhs)
7297
          && ! (GET_CODE (lhs) == SUBREG
7298
                && (OBJECT_P (SUBREG_REG (lhs))))
7299
          && CONST_INT_P (rhs)
7300
          && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
7301
          && INTVAL (rhs) < mode_width
7302
          && (new_rtx = extract_left_shift (lhs, INTVAL (rhs))) != 0)
7303
        new_rtx = make_extraction (mode, make_compound_operation (new_rtx, next_code),
7304
                               0, NULL_RTX, mode_width - INTVAL (rhs),
7305
                               code == LSHIFTRT, 0, in_code == COMPARE);
7306
 
7307
      break;
7308
 
7309
    case SUBREG:
7310
      /* Call ourselves recursively on the inner expression.  If we are
7311
         narrowing the object and it has a different RTL code from
7312
         what it originally did, do this SUBREG as a force_to_mode.  */
7313
 
7314
      tem = make_compound_operation (SUBREG_REG (x), in_code);
7315
 
7316
      {
7317
        rtx simplified = simplify_subreg (mode, tem, GET_MODE (SUBREG_REG (x)),
7318
                                          SUBREG_BYTE (x));
7319
 
7320
        if (simplified)
7321
          tem = simplified;
7322
 
7323
        if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
7324
            && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
7325
            && subreg_lowpart_p (x))
7326
          {
7327
            rtx newer = force_to_mode (tem, mode, ~(HOST_WIDE_INT) 0,
7328
                                       0);
7329
 
7330
            /* If we have something other than a SUBREG, we might have
7331
               done an expansion, so rerun ourselves.  */
7332
            if (GET_CODE (newer) != SUBREG)
7333
              newer = make_compound_operation (newer, in_code);
7334
 
7335
            /* force_to_mode can expand compounds.  If it just re-expanded the
7336
               compound use gen_lowpart instead to convert to the desired
7337
               mode.  */
7338
            if (rtx_equal_p (newer, x))
7339
              return gen_lowpart (GET_MODE (x), tem);
7340
 
7341
            return newer;
7342
          }
7343
 
7344
        if (simplified)
7345
          return tem;
7346
      }
7347
      break;
7348
 
7349
    default:
7350
      break;
7351
    }
7352
 
7353
  if (new_rtx)
7354
    {
7355
      x = gen_lowpart (mode, new_rtx);
7356
      code = GET_CODE (x);
7357
    }
7358
 
7359
  /* Now recursively process each operand of this operation.  */
7360
  fmt = GET_RTX_FORMAT (code);
7361
  for (i = 0; i < GET_RTX_LENGTH (code); i++)
7362
    if (fmt[i] == 'e')
7363
      {
7364
        new_rtx = make_compound_operation (XEXP (x, i), next_code);
7365
        SUBST (XEXP (x, i), new_rtx);
7366
      }
7367
    else if (fmt[i] == 'E')
7368
      for (j = 0; j < XVECLEN (x, i); j++)
7369
        {
7370
          new_rtx = make_compound_operation (XVECEXP (x, i, j), next_code);
7371
          SUBST (XVECEXP (x, i, j), new_rtx);
7372
        }
7373
 
7374
  /* If this is a commutative operation, the changes to the operands
7375
     may have made it noncanonical.  */
7376
  if (COMMUTATIVE_ARITH_P (x)
7377
      && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
7378
    {
7379
      tem = XEXP (x, 0);
7380
      SUBST (XEXP (x, 0), XEXP (x, 1));
7381
      SUBST (XEXP (x, 1), tem);
7382
    }
7383
 
7384
  return x;
7385
}
7386
 
7387
/* Given M see if it is a value that would select a field of bits
7388
   within an item, but not the entire word.  Return -1 if not.
7389
   Otherwise, return the starting position of the field, where 0 is the
7390
   low-order bit.
7391
 
7392
   *PLEN is set to the length of the field.  */
7393
 
7394
static int
7395
get_pos_from_mask (unsigned HOST_WIDE_INT m, unsigned HOST_WIDE_INT *plen)
7396
{
7397
  /* Get the bit number of the first 1 bit from the right, -1 if none.  */
7398
  int pos = exact_log2 (m & -m);
7399
  int len = 0;
7400
 
7401
  if (pos >= 0)
7402
    /* Now shift off the low-order zero bits and see if we have a
7403
       power of two minus 1.  */
7404
    len = exact_log2 ((m >> pos) + 1);
7405
 
7406
  if (len <= 0)
7407
    pos = -1;
7408
 
7409
  *plen = len;
7410
  return pos;
7411
}
7412
 
7413
/* If X refers to a register that equals REG in value, replace these
7414
   references with REG.  */
7415
static rtx
7416
canon_reg_for_combine (rtx x, rtx reg)
7417
{
7418
  rtx op0, op1, op2;
7419
  const char *fmt;
7420
  int i;
7421
  bool copied;
7422
 
7423
  enum rtx_code code = GET_CODE (x);
7424
  switch (GET_RTX_CLASS (code))
7425
    {
7426
    case RTX_UNARY:
7427
      op0 = canon_reg_for_combine (XEXP (x, 0), reg);
7428
      if (op0 != XEXP (x, 0))
7429
        return simplify_gen_unary (GET_CODE (x), GET_MODE (x), op0,
7430
                                   GET_MODE (reg));
7431
      break;
7432
 
7433
    case RTX_BIN_ARITH:
7434
    case RTX_COMM_ARITH:
7435
      op0 = canon_reg_for_combine (XEXP (x, 0), reg);
7436
      op1 = canon_reg_for_combine (XEXP (x, 1), reg);
7437
      if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
7438
        return simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0, op1);
7439
      break;
7440
 
7441
    case RTX_COMPARE:
7442
    case RTX_COMM_COMPARE:
7443
      op0 = canon_reg_for_combine (XEXP (x, 0), reg);
7444
      op1 = canon_reg_for_combine (XEXP (x, 1), reg);
7445
      if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
7446
        return simplify_gen_relational (GET_CODE (x), GET_MODE (x),
7447
                                        GET_MODE (op0), op0, op1);
7448
      break;
7449
 
7450
    case RTX_TERNARY:
7451
    case RTX_BITFIELD_OPS:
7452
      op0 = canon_reg_for_combine (XEXP (x, 0), reg);
7453
      op1 = canon_reg_for_combine (XEXP (x, 1), reg);
7454
      op2 = canon_reg_for_combine (XEXP (x, 2), reg);
7455
      if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1) || op2 != XEXP (x, 2))
7456
        return simplify_gen_ternary (GET_CODE (x), GET_MODE (x),
7457
                                     GET_MODE (op0), op0, op1, op2);
7458
 
7459
    case RTX_OBJ:
7460
      if (REG_P (x))
7461
        {
7462
          if (rtx_equal_p (get_last_value (reg), x)
7463
              || rtx_equal_p (reg, get_last_value (x)))
7464
            return reg;
7465
          else
7466
            break;
7467
        }
7468
 
7469
      /* fall through */
7470
 
7471
    default:
7472
      fmt = GET_RTX_FORMAT (code);
7473
      copied = false;
7474
      for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7475
        if (fmt[i] == 'e')
7476
          {
7477
            rtx op = canon_reg_for_combine (XEXP (x, i), reg);
7478
            if (op != XEXP (x, i))
7479
              {
7480
                if (!copied)
7481
                  {
7482
                    copied = true;
7483
                    x = copy_rtx (x);
7484
                  }
7485
                XEXP (x, i) = op;
7486
              }
7487
          }
7488
        else if (fmt[i] == 'E')
7489
          {
7490
            int j;
7491
            for (j = 0; j < XVECLEN (x, i); j++)
7492
              {
7493
                rtx op = canon_reg_for_combine (XVECEXP (x, i, j), reg);
7494
                if (op != XVECEXP (x, i, j))
7495
                  {
7496
                    if (!copied)
7497
                      {
7498
                        copied = true;
7499
                        x = copy_rtx (x);
7500
                      }
7501
                    XVECEXP (x, i, j) = op;
7502
                  }
7503
              }
7504
          }
7505
 
7506
      break;
7507
    }
7508
 
7509
  return x;
7510
}
7511
 
7512
/* Return X converted to MODE.  If the value is already truncated to
7513
   MODE we can just return a subreg even though in the general case we
7514
   would need an explicit truncation.  */
7515
 
7516
static rtx
7517
gen_lowpart_or_truncate (enum machine_mode mode, rtx x)
7518
{
7519
  if (!CONST_INT_P (x)
7520
      && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x))
7521
      && !TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
7522
                                 GET_MODE_BITSIZE (GET_MODE (x)))
7523
      && !(REG_P (x) && reg_truncated_to_mode (mode, x)))
7524
    {
7525
      /* Bit-cast X into an integer mode.  */
7526
      if (!SCALAR_INT_MODE_P (GET_MODE (x)))
7527
        x = gen_lowpart (int_mode_for_mode (GET_MODE (x)), x);
7528
      x = simplify_gen_unary (TRUNCATE, int_mode_for_mode (mode),
7529
                              x, GET_MODE (x));
7530
    }
7531
 
7532
  return gen_lowpart (mode, x);
7533
}
7534
 
7535
/* See if X can be simplified knowing that we will only refer to it in
7536
   MODE and will only refer to those bits that are nonzero in MASK.
7537
   If other bits are being computed or if masking operations are done
7538
   that select a superset of the bits in MASK, they can sometimes be
7539
   ignored.
7540
 
7541
   Return a possibly simplified expression, but always convert X to
7542
   MODE.  If X is a CONST_INT, AND the CONST_INT with MASK.
7543
 
7544
   If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
7545
   are all off in X.  This is used when X will be complemented, by either
7546
   NOT, NEG, or XOR.  */
7547
 
7548
static rtx
7549
force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask,
7550
               int just_select)
7551
{
7552
  enum rtx_code code = GET_CODE (x);
7553
  int next_select = just_select || code == XOR || code == NOT || code == NEG;
7554
  enum machine_mode op_mode;
7555
  unsigned HOST_WIDE_INT fuller_mask, nonzero;
7556
  rtx op0, op1, temp;
7557
 
7558
  /* If this is a CALL or ASM_OPERANDS, don't do anything.  Some of the
7559
     code below will do the wrong thing since the mode of such an
7560
     expression is VOIDmode.
7561
 
7562
     Also do nothing if X is a CLOBBER; this can happen if X was
7563
     the return value from a call to gen_lowpart.  */
7564
  if (code == CALL || code == ASM_OPERANDS || code == CLOBBER)
7565
    return x;
7566
 
7567
  /* We want to perform the operation is its present mode unless we know
7568
     that the operation is valid in MODE, in which case we do the operation
7569
     in MODE.  */
7570
  op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
7571
              && have_insn_for (code, mode))
7572
             ? mode : GET_MODE (x));
7573
 
7574
  /* It is not valid to do a right-shift in a narrower mode
7575
     than the one it came in with.  */
7576
  if ((code == LSHIFTRT || code == ASHIFTRT)
7577
      && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x)))
7578
    op_mode = GET_MODE (x);
7579
 
7580
  /* Truncate MASK to fit OP_MODE.  */
7581
  if (op_mode)
7582
    mask &= GET_MODE_MASK (op_mode);
7583
 
7584
  /* When we have an arithmetic operation, or a shift whose count we
7585
     do not know, we need to assume that all bits up to the highest-order
7586
     bit in MASK will be needed.  This is how we form such a mask.  */
7587
  if (mask & ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)))
7588
    fuller_mask = ~(unsigned HOST_WIDE_INT) 0;
7589
  else
7590
    fuller_mask = (((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1))
7591
                   - 1);
7592
 
7593
  /* Determine what bits of X are guaranteed to be (non)zero.  */
7594
  nonzero = nonzero_bits (x, mode);
7595
 
7596
  /* If none of the bits in X are needed, return a zero.  */
7597
  if (!just_select && (nonzero & mask) == 0 && !side_effects_p (x))
7598
    x = const0_rtx;
7599
 
7600
  /* If X is a CONST_INT, return a new one.  Do this here since the
7601
     test below will fail.  */
7602
  if (CONST_INT_P (x))
7603
    {
7604
      if (SCALAR_INT_MODE_P (mode))
7605
        return gen_int_mode (INTVAL (x) & mask, mode);
7606
      else
7607
        {
7608
          x = GEN_INT (INTVAL (x) & mask);
7609
          return gen_lowpart_common (mode, x);
7610
        }
7611
    }
7612
 
7613
  /* If X is narrower than MODE and we want all the bits in X's mode, just
7614
     get X in the proper mode.  */
7615
  if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
7616
      && (GET_MODE_MASK (GET_MODE (x)) & ~mask) == 0)
7617
    return gen_lowpart (mode, x);
7618
 
7619
  /* We can ignore the effect of a SUBREG if it narrows the mode or
7620
     if the constant masks to zero all the bits the mode doesn't have.  */
7621
  if (GET_CODE (x) == SUBREG
7622
      && subreg_lowpart_p (x)
7623
      && ((GET_MODE_SIZE (GET_MODE (x))
7624
           < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
7625
          || (0 == (mask
7626
                    & GET_MODE_MASK (GET_MODE (x))
7627
                    & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
7628
    return force_to_mode (SUBREG_REG (x), mode, mask, next_select);
7629
 
7630
  /* The arithmetic simplifications here only work for scalar integer modes.  */
7631
  if (!SCALAR_INT_MODE_P (mode) || !SCALAR_INT_MODE_P (GET_MODE (x)))
7632
    return gen_lowpart_or_truncate (mode, x);
7633
 
7634
  switch (code)
7635
    {
7636
    case CLOBBER:
7637
      /* If X is a (clobber (const_int)), return it since we know we are
7638
         generating something that won't match.  */
7639
      return x;
7640
 
7641
    case SIGN_EXTEND:
7642
    case ZERO_EXTEND:
7643
    case ZERO_EXTRACT:
7644
    case SIGN_EXTRACT:
7645
      x = expand_compound_operation (x);
7646
      if (GET_CODE (x) != code)
7647
        return force_to_mode (x, mode, mask, next_select);
7648
      break;
7649
 
7650
    case TRUNCATE:
7651
      /* Similarly for a truncate.  */
7652
      return force_to_mode (XEXP (x, 0), mode, mask, next_select);
7653
 
7654
    case AND:
7655
      /* If this is an AND with a constant, convert it into an AND
7656
         whose constant is the AND of that constant with MASK.  If it
7657
         remains an AND of MASK, delete it since it is redundant.  */
7658
 
7659
      if (CONST_INT_P (XEXP (x, 1)))
7660
        {
7661
          x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
7662
                                      mask & INTVAL (XEXP (x, 1)));
7663
 
7664
          /* If X is still an AND, see if it is an AND with a mask that
7665
             is just some low-order bits.  If so, and it is MASK, we don't
7666
             need it.  */
7667
 
7668
          if (GET_CODE (x) == AND && CONST_INT_P (XEXP (x, 1))
7669
              && ((INTVAL (XEXP (x, 1)) & GET_MODE_MASK (GET_MODE (x)))
7670
                  == mask))
7671
            x = XEXP (x, 0);
7672
 
7673
          /* If it remains an AND, try making another AND with the bits
7674
             in the mode mask that aren't in MASK turned on.  If the
7675
             constant in the AND is wide enough, this might make a
7676
             cheaper constant.  */
7677
 
7678
          if (GET_CODE (x) == AND && CONST_INT_P (XEXP (x, 1))
7679
              && GET_MODE_MASK (GET_MODE (x)) != mask
7680
              && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
7681
            {
7682
              HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1))
7683
                                    | (GET_MODE_MASK (GET_MODE (x)) & ~mask));
7684
              int width = GET_MODE_BITSIZE (GET_MODE (x));
7685
              rtx y;
7686
 
7687
              /* If MODE is narrower than HOST_WIDE_INT and CVAL is a negative
7688
                 number, sign extend it.  */
7689
              if (width > 0 && width < HOST_BITS_PER_WIDE_INT
7690
                  && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
7691
                cval |= (HOST_WIDE_INT) -1 << width;
7692
 
7693
              y = simplify_gen_binary (AND, GET_MODE (x),
7694
                                       XEXP (x, 0), GEN_INT (cval));
7695
              if (rtx_cost (y, SET, optimize_this_for_speed_p)
7696
                  < rtx_cost (x, SET, optimize_this_for_speed_p))
7697
                x = y;
7698
            }
7699
 
7700
          break;
7701
        }
7702
 
7703
      goto binop;
7704
 
7705
    case PLUS:
7706
      /* In (and (plus FOO C1) M), if M is a mask that just turns off
7707
         low-order bits (as in an alignment operation) and FOO is already
7708
         aligned to that boundary, mask C1 to that boundary as well.
7709
         This may eliminate that PLUS and, later, the AND.  */
7710
 
7711
      {
7712
        unsigned int width = GET_MODE_BITSIZE (mode);
7713
        unsigned HOST_WIDE_INT smask = mask;
7714
 
7715
        /* If MODE is narrower than HOST_WIDE_INT and mask is a negative
7716
           number, sign extend it.  */
7717
 
7718
        if (width < HOST_BITS_PER_WIDE_INT
7719
            && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
7720
          smask |= (HOST_WIDE_INT) -1 << width;
7721
 
7722
        if (CONST_INT_P (XEXP (x, 1))
7723
            && exact_log2 (- smask) >= 0
7724
            && (nonzero_bits (XEXP (x, 0), mode) & ~smask) == 0
7725
            && (INTVAL (XEXP (x, 1)) & ~smask) != 0)
7726
          return force_to_mode (plus_constant (XEXP (x, 0),
7727
                                               (INTVAL (XEXP (x, 1)) & smask)),
7728
                                mode, smask, next_select);
7729
      }
7730
 
7731
      /* ... fall through ...  */
7732
 
7733
    case MULT:
7734
      /* For PLUS, MINUS and MULT, we need any bits less significant than the
7735
         most significant bit in MASK since carries from those bits will
7736
         affect the bits we are interested in.  */
7737
      mask = fuller_mask;
7738
      goto binop;
7739
 
7740
    case MINUS:
7741
      /* If X is (minus C Y) where C's least set bit is larger than any bit
7742
         in the mask, then we may replace with (neg Y).  */
7743
      if (CONST_INT_P (XEXP (x, 0))
7744
          && (((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 0))
7745
                                        & -INTVAL (XEXP (x, 0))))
7746
              > mask))
7747
        {
7748
          x = simplify_gen_unary (NEG, GET_MODE (x), XEXP (x, 1),
7749
                                  GET_MODE (x));
7750
          return force_to_mode (x, mode, mask, next_select);
7751
        }
7752
 
7753
      /* Similarly, if C contains every bit in the fuller_mask, then we may
7754
         replace with (not Y).  */
7755
      if (CONST_INT_P (XEXP (x, 0))
7756
          && ((INTVAL (XEXP (x, 0)) | (HOST_WIDE_INT) fuller_mask)
7757
              == INTVAL (XEXP (x, 0))))
7758
        {
7759
          x = simplify_gen_unary (NOT, GET_MODE (x),
7760
                                  XEXP (x, 1), GET_MODE (x));
7761
          return force_to_mode (x, mode, mask, next_select);
7762
        }
7763
 
7764
      mask = fuller_mask;
7765
      goto binop;
7766
 
7767
    case IOR:
7768
    case XOR:
7769
      /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
7770
         LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
7771
         operation which may be a bitfield extraction.  Ensure that the
7772
         constant we form is not wider than the mode of X.  */
7773
 
7774
      if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
7775
          && CONST_INT_P (XEXP (XEXP (x, 0), 1))
7776
          && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
7777
          && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
7778
          && CONST_INT_P (XEXP (x, 1))
7779
          && ((INTVAL (XEXP (XEXP (x, 0), 1))
7780
               + floor_log2 (INTVAL (XEXP (x, 1))))
7781
              < GET_MODE_BITSIZE (GET_MODE (x)))
7782
          && (INTVAL (XEXP (x, 1))
7783
              & ~nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0)
7784
        {
7785
          temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
7786
                          << INTVAL (XEXP (XEXP (x, 0), 1)));
7787
          temp = simplify_gen_binary (GET_CODE (x), GET_MODE (x),
7788
                                      XEXP (XEXP (x, 0), 0), temp);
7789
          x = simplify_gen_binary (LSHIFTRT, GET_MODE (x), temp,
7790
                                   XEXP (XEXP (x, 0), 1));
7791
          return force_to_mode (x, mode, mask, next_select);
7792
        }
7793
 
7794
    binop:
7795
      /* For most binary operations, just propagate into the operation and
7796
         change the mode if we have an operation of that mode.  */
7797
 
7798
      op0 = force_to_mode (XEXP (x, 0), mode, mask, next_select);
7799
      op1 = force_to_mode (XEXP (x, 1), mode, mask, next_select);
7800
 
7801
      /* If we ended up truncating both operands, truncate the result of the
7802
         operation instead.  */
7803
      if (GET_CODE (op0) == TRUNCATE
7804
          && GET_CODE (op1) == TRUNCATE)
7805
        {
7806
          op0 = XEXP (op0, 0);
7807
          op1 = XEXP (op1, 0);
7808
        }
7809
 
7810
      op0 = gen_lowpart_or_truncate (op_mode, op0);
7811
      op1 = gen_lowpart_or_truncate (op_mode, op1);
7812
 
7813
      if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
7814
        x = simplify_gen_binary (code, op_mode, op0, op1);
7815
      break;
7816
 
7817
    case ASHIFT:
7818
      /* For left shifts, do the same, but just for the first operand.
7819
         However, we cannot do anything with shifts where we cannot
7820
         guarantee that the counts are smaller than the size of the mode
7821
         because such a count will have a different meaning in a
7822
         wider mode.  */
7823
 
7824
      if (! (CONST_INT_P (XEXP (x, 1))
7825
             && INTVAL (XEXP (x, 1)) >= 0
7826
             && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
7827
          && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
7828
                && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
7829
                    < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
7830
        break;
7831
 
7832
      /* If the shift count is a constant and we can do arithmetic in
7833
         the mode of the shift, refine which bits we need.  Otherwise, use the
7834
         conservative form of the mask.  */
7835
      if (CONST_INT_P (XEXP (x, 1))
7836
          && INTVAL (XEXP (x, 1)) >= 0
7837
          && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
7838
          && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
7839
        mask >>= INTVAL (XEXP (x, 1));
7840
      else
7841
        mask = fuller_mask;
7842
 
7843
      op0 = gen_lowpart_or_truncate (op_mode,
7844
                                     force_to_mode (XEXP (x, 0), op_mode,
7845
                                                    mask, next_select));
7846
 
7847
      if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
7848
        x = simplify_gen_binary (code, op_mode, op0, XEXP (x, 1));
7849
      break;
7850
 
7851
    case LSHIFTRT:
7852
      /* Here we can only do something if the shift count is a constant,
7853
         this shift constant is valid for the host, and we can do arithmetic
7854
         in OP_MODE.  */
7855
 
7856
      if (CONST_INT_P (XEXP (x, 1))
7857
          && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
7858
          && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
7859
        {
7860
          rtx inner = XEXP (x, 0);
7861
          unsigned HOST_WIDE_INT inner_mask;
7862
 
7863
          /* Select the mask of the bits we need for the shift operand.  */
7864
          inner_mask = mask << INTVAL (XEXP (x, 1));
7865
 
7866
          /* We can only change the mode of the shift if we can do arithmetic
7867
             in the mode of the shift and INNER_MASK is no wider than the
7868
             width of X's mode.  */
7869
          if ((inner_mask & ~GET_MODE_MASK (GET_MODE (x))) != 0)
7870
            op_mode = GET_MODE (x);
7871
 
7872
          inner = force_to_mode (inner, op_mode, inner_mask, next_select);
7873
 
7874
          if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
7875
            x = simplify_gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
7876
        }
7877
 
7878
      /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
7879
         shift and AND produces only copies of the sign bit (C2 is one less
7880
         than a power of two), we can do this with just a shift.  */
7881
 
7882
      if (GET_CODE (x) == LSHIFTRT
7883
          && CONST_INT_P (XEXP (x, 1))
7884
          /* The shift puts one of the sign bit copies in the least significant
7885
             bit.  */
7886
          && ((INTVAL (XEXP (x, 1))
7887
               + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
7888
              >= GET_MODE_BITSIZE (GET_MODE (x)))
7889
          && exact_log2 (mask + 1) >= 0
7890
          /* Number of bits left after the shift must be more than the mask
7891
             needs.  */
7892
          && ((INTVAL (XEXP (x, 1)) + exact_log2 (mask + 1))
7893
              <= GET_MODE_BITSIZE (GET_MODE (x)))
7894
          /* Must be more sign bit copies than the mask needs.  */
7895
          && ((int) num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
7896
              >= exact_log2 (mask + 1)))
7897
        x = simplify_gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
7898
                                 GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
7899
                                          - exact_log2 (mask + 1)));
7900
 
7901
      goto shiftrt;
7902
 
7903
    case ASHIFTRT:
7904
      /* If we are just looking for the sign bit, we don't need this shift at
7905
         all, even if it has a variable count.  */
7906
      if (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
7907
          && (mask == ((unsigned HOST_WIDE_INT) 1
7908
                       << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
7909
        return force_to_mode (XEXP (x, 0), mode, mask, next_select);
7910
 
7911
      /* If this is a shift by a constant, get a mask that contains those bits
7912
         that are not copies of the sign bit.  We then have two cases:  If
7913
         MASK only includes those bits, this can be a logical shift, which may
7914
         allow simplifications.  If MASK is a single-bit field not within
7915
         those bits, we are requesting a copy of the sign bit and hence can
7916
         shift the sign bit to the appropriate location.  */
7917
 
7918
      if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) >= 0
7919
          && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
7920
        {
7921
          int i;
7922
 
7923
          /* If the considered data is wider than HOST_WIDE_INT, we can't
7924
             represent a mask for all its bits in a single scalar.
7925
             But we only care about the lower bits, so calculate these.  */
7926
 
7927
          if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT)
7928
            {
7929
              nonzero = ~(HOST_WIDE_INT) 0;
7930
 
7931
              /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7932
                 is the number of bits a full-width mask would have set.
7933
                 We need only shift if these are fewer than nonzero can
7934
                 hold.  If not, we must keep all bits set in nonzero.  */
7935
 
7936
              if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7937
                  < HOST_BITS_PER_WIDE_INT)
7938
                nonzero >>= INTVAL (XEXP (x, 1))
7939
                            + HOST_BITS_PER_WIDE_INT
7940
                            - GET_MODE_BITSIZE (GET_MODE (x)) ;
7941
            }
7942
          else
7943
            {
7944
              nonzero = GET_MODE_MASK (GET_MODE (x));
7945
              nonzero >>= INTVAL (XEXP (x, 1));
7946
            }
7947
 
7948
          if ((mask & ~nonzero) == 0)
7949
            {
7950
              x = simplify_shift_const (NULL_RTX, LSHIFTRT, GET_MODE (x),
7951
                                        XEXP (x, 0), INTVAL (XEXP (x, 1)));
7952
              if (GET_CODE (x) != ASHIFTRT)
7953
                return force_to_mode (x, mode, mask, next_select);
7954
            }
7955
 
7956
          else if ((i = exact_log2 (mask)) >= 0)
7957
            {
7958
              x = simplify_shift_const
7959
                  (NULL_RTX, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
7960
                   GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
7961
 
7962
              if (GET_CODE (x) != ASHIFTRT)
7963
                return force_to_mode (x, mode, mask, next_select);
7964
            }
7965
        }
7966
 
7967
      /* If MASK is 1, convert this to an LSHIFTRT.  This can be done
7968
         even if the shift count isn't a constant.  */
7969
      if (mask == 1)
7970
        x = simplify_gen_binary (LSHIFTRT, GET_MODE (x),
7971
                                 XEXP (x, 0), XEXP (x, 1));
7972
 
7973
    shiftrt:
7974
 
7975
      /* If this is a zero- or sign-extension operation that just affects bits
7976
         we don't care about, remove it.  Be sure the call above returned
7977
         something that is still a shift.  */
7978
 
7979
      if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
7980
          && CONST_INT_P (XEXP (x, 1))
7981
          && INTVAL (XEXP (x, 1)) >= 0
7982
          && (INTVAL (XEXP (x, 1))
7983
              <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
7984
          && GET_CODE (XEXP (x, 0)) == ASHIFT
7985
          && XEXP (XEXP (x, 0), 1) == XEXP (x, 1))
7986
        return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
7987
                              next_select);
7988
 
7989
      break;
7990
 
7991
    case ROTATE:
7992
    case ROTATERT:
7993
      /* If the shift count is constant and we can do computations
7994
         in the mode of X, compute where the bits we care about are.
7995
         Otherwise, we can't do anything.  Don't change the mode of
7996
         the shift or propagate MODE into the shift, though.  */
7997
      if (CONST_INT_P (XEXP (x, 1))
7998
          && INTVAL (XEXP (x, 1)) >= 0)
7999
        {
8000
          temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
8001
                                            GET_MODE (x), GEN_INT (mask),
8002
                                            XEXP (x, 1));
8003
          if (temp && CONST_INT_P (temp))
8004
            SUBST (XEXP (x, 0),
8005
                   force_to_mode (XEXP (x, 0), GET_MODE (x),
8006
                                  INTVAL (temp), next_select));
8007
        }
8008
      break;
8009
 
8010
    case NEG:
8011
      /* If we just want the low-order bit, the NEG isn't needed since it
8012
         won't change the low-order bit.  */
8013
      if (mask == 1)
8014
        return force_to_mode (XEXP (x, 0), mode, mask, just_select);
8015
 
8016
      /* We need any bits less significant than the most significant bit in
8017
         MASK since carries from those bits will affect the bits we are
8018
         interested in.  */
8019
      mask = fuller_mask;
8020
      goto unop;
8021
 
8022
    case NOT:
8023
      /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
8024
         same as the XOR case above.  Ensure that the constant we form is not
8025
         wider than the mode of X.  */
8026
 
8027
      if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
8028
          && CONST_INT_P (XEXP (XEXP (x, 0), 1))
8029
          && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
8030
          && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
8031
              < GET_MODE_BITSIZE (GET_MODE (x)))
8032
          && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
8033
        {
8034
          temp = gen_int_mode (mask << INTVAL (XEXP (XEXP (x, 0), 1)),
8035
                               GET_MODE (x));
8036
          temp = simplify_gen_binary (XOR, GET_MODE (x),
8037
                                      XEXP (XEXP (x, 0), 0), temp);
8038
          x = simplify_gen_binary (LSHIFTRT, GET_MODE (x),
8039
                                   temp, XEXP (XEXP (x, 0), 1));
8040
 
8041
          return force_to_mode (x, mode, mask, next_select);
8042
        }
8043
 
8044
      /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must
8045
         use the full mask inside the NOT.  */
8046
      mask = fuller_mask;
8047
 
8048
    unop:
8049
      op0 = gen_lowpart_or_truncate (op_mode,
8050
                                     force_to_mode (XEXP (x, 0), mode, mask,
8051
                                                    next_select));
8052
      if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
8053
        x = simplify_gen_unary (code, op_mode, op0, op_mode);
8054
      break;
8055
 
8056
    case NE:
8057
      /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
8058
         in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
8059
         which is equal to STORE_FLAG_VALUE.  */
8060
      if ((mask & ~STORE_FLAG_VALUE) == 0 && XEXP (x, 1) == const0_rtx
8061
          && GET_MODE (XEXP (x, 0)) == mode
8062
          && exact_log2 (nonzero_bits (XEXP (x, 0), mode)) >= 0
8063
          && (nonzero_bits (XEXP (x, 0), mode)
8064
              == (unsigned HOST_WIDE_INT) STORE_FLAG_VALUE))
8065
        return force_to_mode (XEXP (x, 0), mode, mask, next_select);
8066
 
8067
      break;
8068
 
8069
    case IF_THEN_ELSE:
8070
      /* We have no way of knowing if the IF_THEN_ELSE can itself be
8071
         written in a narrower mode.  We play it safe and do not do so.  */
8072
 
8073
      SUBST (XEXP (x, 1),
8074
             gen_lowpart_or_truncate (GET_MODE (x),
8075
                                      force_to_mode (XEXP (x, 1), mode,
8076
                                                     mask, next_select)));
8077
      SUBST (XEXP (x, 2),
8078
             gen_lowpart_or_truncate (GET_MODE (x),
8079
                                      force_to_mode (XEXP (x, 2), mode,
8080
                                                     mask, next_select)));
8081
      break;
8082
 
8083
    default:
8084
      break;
8085
    }
8086
 
8087
  /* Ensure we return a value of the proper mode.  */
8088
  return gen_lowpart_or_truncate (mode, x);
8089
}
8090
 
8091
/* Return nonzero if X is an expression that has one of two values depending on
8092
   whether some other value is zero or nonzero.  In that case, we return the
8093
   value that is being tested, *PTRUE is set to the value if the rtx being
8094
   returned has a nonzero value, and *PFALSE is set to the other alternative.
8095
 
8096
   If we return zero, we set *PTRUE and *PFALSE to X.  */
8097
 
8098
static rtx
8099
if_then_else_cond (rtx x, rtx *ptrue, rtx *pfalse)
8100
{
8101
  enum machine_mode mode = GET_MODE (x);
8102
  enum rtx_code code = GET_CODE (x);
8103
  rtx cond0, cond1, true0, true1, false0, false1;
8104
  unsigned HOST_WIDE_INT nz;
8105
 
8106
  /* If we are comparing a value against zero, we are done.  */
8107
  if ((code == NE || code == EQ)
8108
      && XEXP (x, 1) == const0_rtx)
8109
    {
8110
      *ptrue = (code == NE) ? const_true_rtx : const0_rtx;
8111
      *pfalse = (code == NE) ? const0_rtx : const_true_rtx;
8112
      return XEXP (x, 0);
8113
    }
8114
 
8115
  /* If this is a unary operation whose operand has one of two values, apply
8116
     our opcode to compute those values.  */
8117
  else if (UNARY_P (x)
8118
           && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
8119
    {
8120
      *ptrue = simplify_gen_unary (code, mode, true0, GET_MODE (XEXP (x, 0)));
8121
      *pfalse = simplify_gen_unary (code, mode, false0,
8122
                                    GET_MODE (XEXP (x, 0)));
8123
      return cond0;
8124
    }
8125
 
8126
  /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
8127
     make can't possibly match and would suppress other optimizations.  */
8128
  else if (code == COMPARE)
8129
    ;
8130
 
8131
  /* If this is a binary operation, see if either side has only one of two
8132
     values.  If either one does or if both do and they are conditional on
8133
     the same value, compute the new true and false values.  */
8134
  else if (BINARY_P (x))
8135
    {
8136
      cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
8137
      cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
8138
 
8139
      if ((cond0 != 0 || cond1 != 0)
8140
          && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
8141
        {
8142
          /* If if_then_else_cond returned zero, then true/false are the
8143
             same rtl.  We must copy one of them to prevent invalid rtl
8144
             sharing.  */
8145
          if (cond0 == 0)
8146
            true0 = copy_rtx (true0);
8147
          else if (cond1 == 0)
8148
            true1 = copy_rtx (true1);
8149
 
8150
          if (COMPARISON_P (x))
8151
            {
8152
              *ptrue = simplify_gen_relational (code, mode, VOIDmode,
8153
                                                true0, true1);
8154
              *pfalse = simplify_gen_relational (code, mode, VOIDmode,
8155
                                                 false0, false1);
8156
             }
8157
          else
8158
            {
8159
              *ptrue = simplify_gen_binary (code, mode, true0, true1);
8160
              *pfalse = simplify_gen_binary (code, mode, false0, false1);
8161
            }
8162
 
8163
          return cond0 ? cond0 : cond1;
8164
        }
8165
 
8166
      /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
8167
         operands is zero when the other is nonzero, and vice-versa,
8168
         and STORE_FLAG_VALUE is 1 or -1.  */
8169
 
8170
      if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8171
          && (code == PLUS || code == IOR || code == XOR || code == MINUS
8172
              || code == UMAX)
8173
          && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
8174
        {
8175
          rtx op0 = XEXP (XEXP (x, 0), 1);
8176
          rtx op1 = XEXP (XEXP (x, 1), 1);
8177
 
8178
          cond0 = XEXP (XEXP (x, 0), 0);
8179
          cond1 = XEXP (XEXP (x, 1), 0);
8180
 
8181
          if (COMPARISON_P (cond0)
8182
              && COMPARISON_P (cond1)
8183
              && ((GET_CODE (cond0) == reversed_comparison_code (cond1, NULL)
8184
                   && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
8185
                   && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
8186
                  || ((swap_condition (GET_CODE (cond0))
8187
                       == reversed_comparison_code (cond1, NULL))
8188
                      && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
8189
                      && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
8190
              && ! side_effects_p (x))
8191
            {
8192
              *ptrue = simplify_gen_binary (MULT, mode, op0, const_true_rtx);
8193
              *pfalse = simplify_gen_binary (MULT, mode,
8194
                                             (code == MINUS
8195
                                              ? simplify_gen_unary (NEG, mode,
8196
                                                                    op1, mode)
8197
                                              : op1),
8198
                                              const_true_rtx);
8199
              return cond0;
8200
            }
8201
        }
8202
 
8203
      /* Similarly for MULT, AND and UMIN, except that for these the result
8204
         is always zero.  */
8205
      if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8206
          && (code == MULT || code == AND || code == UMIN)
8207
          && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
8208
        {
8209
          cond0 = XEXP (XEXP (x, 0), 0);
8210
          cond1 = XEXP (XEXP (x, 1), 0);
8211
 
8212
          if (COMPARISON_P (cond0)
8213
              && COMPARISON_P (cond1)
8214
              && ((GET_CODE (cond0) == reversed_comparison_code (cond1, NULL)
8215
                   && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
8216
                   && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
8217
                  || ((swap_condition (GET_CODE (cond0))
8218
                       == reversed_comparison_code (cond1, NULL))
8219
                      && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
8220
                      && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
8221
              && ! side_effects_p (x))
8222
            {
8223
              *ptrue = *pfalse = const0_rtx;
8224
              return cond0;
8225
            }
8226
        }
8227
    }
8228
 
8229
  else if (code == IF_THEN_ELSE)
8230
    {
8231
      /* If we have IF_THEN_ELSE already, extract the condition and
8232
         canonicalize it if it is NE or EQ.  */
8233
      cond0 = XEXP (x, 0);
8234
      *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
8235
      if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
8236
        return XEXP (cond0, 0);
8237
      else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
8238
        {
8239
          *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
8240
          return XEXP (cond0, 0);
8241
        }
8242
      else
8243
        return cond0;
8244
    }
8245
 
8246
  /* If X is a SUBREG, we can narrow both the true and false values
8247
     if the inner expression, if there is a condition.  */
8248
  else if (code == SUBREG
8249
           && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
8250
                                               &true0, &false0)))
8251
    {
8252
      true0 = simplify_gen_subreg (mode, true0,
8253
                                   GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
8254
      false0 = simplify_gen_subreg (mode, false0,
8255
                                    GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
8256
      if (true0 && false0)
8257
        {
8258
          *ptrue = true0;
8259
          *pfalse = false0;
8260
          return cond0;
8261
        }
8262
    }
8263
 
8264
  /* If X is a constant, this isn't special and will cause confusions
8265
     if we treat it as such.  Likewise if it is equivalent to a constant.  */
8266
  else if (CONSTANT_P (x)
8267
           || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
8268
    ;
8269
 
8270
  /* If we're in BImode, canonicalize on 0 and STORE_FLAG_VALUE, as that
8271
     will be least confusing to the rest of the compiler.  */
8272
  else if (mode == BImode)
8273
    {
8274
      *ptrue = GEN_INT (STORE_FLAG_VALUE), *pfalse = const0_rtx;
8275
      return x;
8276
    }
8277
 
8278
  /* If X is known to be either 0 or -1, those are the true and
8279
     false values when testing X.  */
8280
  else if (x == constm1_rtx || x == const0_rtx
8281
           || (mode != VOIDmode
8282
               && num_sign_bit_copies (x, mode) == GET_MODE_BITSIZE (mode)))
8283
    {
8284
      *ptrue = constm1_rtx, *pfalse = const0_rtx;
8285
      return x;
8286
    }
8287
 
8288
  /* Likewise for 0 or a single bit.  */
8289
  else if (SCALAR_INT_MODE_P (mode)
8290
           && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8291
           && exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
8292
    {
8293
      *ptrue = gen_int_mode (nz, mode), *pfalse = const0_rtx;
8294
      return x;
8295
    }
8296
 
8297
  /* Otherwise fail; show no condition with true and false values the same.  */
8298
  *ptrue = *pfalse = x;
8299
  return 0;
8300
}
8301
 
8302
/* Return the value of expression X given the fact that condition COND
8303
   is known to be true when applied to REG as its first operand and VAL
8304
   as its second.  X is known to not be shared and so can be modified in
8305
   place.
8306
 
8307
   We only handle the simplest cases, and specifically those cases that
8308
   arise with IF_THEN_ELSE expressions.  */
8309
 
8310
static rtx
8311
known_cond (rtx x, enum rtx_code cond, rtx reg, rtx val)
8312
{
8313
  enum rtx_code code = GET_CODE (x);
8314
  rtx temp;
8315
  const char *fmt;
8316
  int i, j;
8317
 
8318
  if (side_effects_p (x))
8319
    return x;
8320
 
8321
  /* If either operand of the condition is a floating point value,
8322
     then we have to avoid collapsing an EQ comparison.  */
8323
  if (cond == EQ
8324
      && rtx_equal_p (x, reg)
8325
      && ! FLOAT_MODE_P (GET_MODE (x))
8326
      && ! FLOAT_MODE_P (GET_MODE (val)))
8327
    return val;
8328
 
8329
  if (cond == UNEQ && rtx_equal_p (x, reg))
8330
    return val;
8331
 
8332
  /* If X is (abs REG) and we know something about REG's relationship
8333
     with zero, we may be able to simplify this.  */
8334
 
8335
  if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
8336
    switch (cond)
8337
      {
8338
      case GE:  case GT:  case EQ:
8339
        return XEXP (x, 0);
8340
      case LT:  case LE:
8341
        return simplify_gen_unary (NEG, GET_MODE (XEXP (x, 0)),
8342
                                   XEXP (x, 0),
8343
                                   GET_MODE (XEXP (x, 0)));
8344
      default:
8345
        break;
8346
      }
8347
 
8348
  /* The only other cases we handle are MIN, MAX, and comparisons if the
8349
     operands are the same as REG and VAL.  */
8350
 
8351
  else if (COMPARISON_P (x) || COMMUTATIVE_ARITH_P (x))
8352
    {
8353
      if (rtx_equal_p (XEXP (x, 0), val))
8354
        cond = swap_condition (cond), temp = val, val = reg, reg = temp;
8355
 
8356
      if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
8357
        {
8358
          if (COMPARISON_P (x))
8359
            {
8360
              if (comparison_dominates_p (cond, code))
8361
                return const_true_rtx;
8362
 
8363
              code = reversed_comparison_code (x, NULL);
8364
              if (code != UNKNOWN
8365
                  && comparison_dominates_p (cond, code))
8366
                return const0_rtx;
8367
              else
8368
                return x;
8369
            }
8370
          else if (code == SMAX || code == SMIN
8371
                   || code == UMIN || code == UMAX)
8372
            {
8373
              int unsignedp = (code == UMIN || code == UMAX);
8374
 
8375
              /* Do not reverse the condition when it is NE or EQ.
8376
                 This is because we cannot conclude anything about
8377
                 the value of 'SMAX (x, y)' when x is not equal to y,
8378
                 but we can when x equals y.  */
8379
              if ((code == SMAX || code == UMAX)
8380
                  && ! (cond == EQ || cond == NE))
8381
                cond = reverse_condition (cond);
8382
 
8383
              switch (cond)
8384
                {
8385
                case GE:   case GT:
8386
                  return unsignedp ? x : XEXP (x, 1);
8387
                case LE:   case LT:
8388
                  return unsignedp ? x : XEXP (x, 0);
8389
                case GEU:  case GTU:
8390
                  return unsignedp ? XEXP (x, 1) : x;
8391
                case LEU:  case LTU:
8392
                  return unsignedp ? XEXP (x, 0) : x;
8393
                default:
8394
                  break;
8395
                }
8396
            }
8397
        }
8398
    }
8399
  else if (code == SUBREG)
8400
    {
8401
      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
8402
      rtx new_rtx, r = known_cond (SUBREG_REG (x), cond, reg, val);
8403
 
8404
      if (SUBREG_REG (x) != r)
8405
        {
8406
          /* We must simplify subreg here, before we lose track of the
8407
             original inner_mode.  */
8408
          new_rtx = simplify_subreg (GET_MODE (x), r,
8409
                                 inner_mode, SUBREG_BYTE (x));
8410
          if (new_rtx)
8411
            return new_rtx;
8412
          else
8413
            SUBST (SUBREG_REG (x), r);
8414
        }
8415
 
8416
      return x;
8417
    }
8418
  /* We don't have to handle SIGN_EXTEND here, because even in the
8419
     case of replacing something with a modeless CONST_INT, a
8420
     CONST_INT is already (supposed to be) a valid sign extension for
8421
     its narrower mode, which implies it's already properly
8422
     sign-extended for the wider mode.  Now, for ZERO_EXTEND, the
8423
     story is different.  */
8424
  else if (code == ZERO_EXTEND)
8425
    {
8426
      enum machine_mode inner_mode = GET_MODE (XEXP (x, 0));
8427
      rtx new_rtx, r = known_cond (XEXP (x, 0), cond, reg, val);
8428
 
8429
      if (XEXP (x, 0) != r)
8430
        {
8431
          /* We must simplify the zero_extend here, before we lose
8432
             track of the original inner_mode.  */
8433
          new_rtx = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
8434
                                          r, inner_mode);
8435
          if (new_rtx)
8436
            return new_rtx;
8437
          else
8438
            SUBST (XEXP (x, 0), r);
8439
        }
8440
 
8441
      return x;
8442
    }
8443
 
8444
  fmt = GET_RTX_FORMAT (code);
8445
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8446
    {
8447
      if (fmt[i] == 'e')
8448
        SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
8449
      else if (fmt[i] == 'E')
8450
        for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8451
          SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
8452
                                                cond, reg, val));
8453
    }
8454
 
8455
  return x;
8456
}
8457
 
8458
/* See if X and Y are equal for the purposes of seeing if we can rewrite an
8459
   assignment as a field assignment.  */
8460
 
8461
static int
8462
rtx_equal_for_field_assignment_p (rtx x, rtx y)
8463
{
8464
  if (x == y || rtx_equal_p (x, y))
8465
    return 1;
8466
 
8467
  if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y))
8468
    return 0;
8469
 
8470
  /* Check for a paradoxical SUBREG of a MEM compared with the MEM.
8471
     Note that all SUBREGs of MEM are paradoxical; otherwise they
8472
     would have been rewritten.  */
8473
  if (MEM_P (x) && GET_CODE (y) == SUBREG
8474
      && MEM_P (SUBREG_REG (y))
8475
      && rtx_equal_p (SUBREG_REG (y),
8476
                      gen_lowpart (GET_MODE (SUBREG_REG (y)), x)))
8477
    return 1;
8478
 
8479
  if (MEM_P (y) && GET_CODE (x) == SUBREG
8480
      && MEM_P (SUBREG_REG (x))
8481
      && rtx_equal_p (SUBREG_REG (x),
8482
                      gen_lowpart (GET_MODE (SUBREG_REG (x)), y)))
8483
    return 1;
8484
 
8485
  /* We used to see if get_last_value of X and Y were the same but that's
8486
     not correct.  In one direction, we'll cause the assignment to have
8487
     the wrong destination and in the case, we'll import a register into this
8488
     insn that might have already have been dead.   So fail if none of the
8489
     above cases are true.  */
8490
  return 0;
8491
}
8492
 
8493
/* See if X, a SET operation, can be rewritten as a bit-field assignment.
8494
   Return that assignment if so.
8495
 
8496
   We only handle the most common cases.  */
8497
 
8498
static rtx
8499
make_field_assignment (rtx x)
8500
{
8501
  rtx dest = SET_DEST (x);
8502
  rtx src = SET_SRC (x);
8503
  rtx assign;
8504
  rtx rhs, lhs;
8505
  HOST_WIDE_INT c1;
8506
  HOST_WIDE_INT pos;
8507
  unsigned HOST_WIDE_INT len;
8508
  rtx other;
8509
  enum machine_mode mode;
8510
 
8511
  /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
8512
     a clear of a one-bit field.  We will have changed it to
8513
     (and (rotate (const_int -2) POS) DEST), so check for that.  Also check
8514
     for a SUBREG.  */
8515
 
8516
  if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
8517
      && CONST_INT_P (XEXP (XEXP (src, 0), 0))
8518
      && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
8519
      && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
8520
    {
8521
      assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
8522
                                1, 1, 1, 0);
8523
      if (assign != 0)
8524
        return gen_rtx_SET (VOIDmode, assign, const0_rtx);
8525
      return x;
8526
    }
8527
 
8528
  if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
8529
      && subreg_lowpart_p (XEXP (src, 0))
8530
      && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
8531
          < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
8532
      && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
8533
      && CONST_INT_P (XEXP (SUBREG_REG (XEXP (src, 0)), 0))
8534
      && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
8535
      && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
8536
    {
8537
      assign = make_extraction (VOIDmode, dest, 0,
8538
                                XEXP (SUBREG_REG (XEXP (src, 0)), 1),
8539
                                1, 1, 1, 0);
8540
      if (assign != 0)
8541
        return gen_rtx_SET (VOIDmode, assign, const0_rtx);
8542
      return x;
8543
    }
8544
 
8545
  /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
8546
     one-bit field.  */
8547
  if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
8548
      && XEXP (XEXP (src, 0), 0) == const1_rtx
8549
      && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
8550
    {
8551
      assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
8552
                                1, 1, 1, 0);
8553
      if (assign != 0)
8554
        return gen_rtx_SET (VOIDmode, assign, const1_rtx);
8555
      return x;
8556
    }
8557
 
8558
  /* If DEST is already a field assignment, i.e. ZERO_EXTRACT, and the
8559
     SRC is an AND with all bits of that field set, then we can discard
8560
     the AND.  */
8561
  if (GET_CODE (dest) == ZERO_EXTRACT
8562
      && CONST_INT_P (XEXP (dest, 1))
8563
      && GET_CODE (src) == AND
8564
      && CONST_INT_P (XEXP (src, 1)))
8565
    {
8566
      HOST_WIDE_INT width = INTVAL (XEXP (dest, 1));
8567
      unsigned HOST_WIDE_INT and_mask = INTVAL (XEXP (src, 1));
8568
      unsigned HOST_WIDE_INT ze_mask;
8569
 
8570
      if (width >= HOST_BITS_PER_WIDE_INT)
8571
        ze_mask = -1;
8572
      else
8573
        ze_mask = ((unsigned HOST_WIDE_INT)1 << width) - 1;
8574
 
8575
      /* Complete overlap.  We can remove the source AND.  */
8576
      if ((and_mask & ze_mask) == ze_mask)
8577
        return gen_rtx_SET (VOIDmode, dest, XEXP (src, 0));
8578
 
8579
      /* Partial overlap.  We can reduce the source AND.  */
8580
      if ((and_mask & ze_mask) != and_mask)
8581
        {
8582
          mode = GET_MODE (src);
8583
          src = gen_rtx_AND (mode, XEXP (src, 0),
8584
                             gen_int_mode (and_mask & ze_mask, mode));
8585
          return gen_rtx_SET (VOIDmode, dest, src);
8586
        }
8587
    }
8588
 
8589
  /* The other case we handle is assignments into a constant-position
8590
     field.  They look like (ior/xor (and DEST C1) OTHER).  If C1 represents
8591
     a mask that has all one bits except for a group of zero bits and
8592
     OTHER is known to have zeros where C1 has ones, this is such an
8593
     assignment.  Compute the position and length from C1.  Shift OTHER
8594
     to the appropriate position, force it to the required mode, and
8595
     make the extraction.  Check for the AND in both operands.  */
8596
 
8597
  if (GET_CODE (src) != IOR && GET_CODE (src) != XOR)
8598
    return x;
8599
 
8600
  rhs = expand_compound_operation (XEXP (src, 0));
8601
  lhs = expand_compound_operation (XEXP (src, 1));
8602
 
8603
  if (GET_CODE (rhs) == AND
8604
      && CONST_INT_P (XEXP (rhs, 1))
8605
      && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest))
8606
    c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
8607
  else if (GET_CODE (lhs) == AND
8608
           && CONST_INT_P (XEXP (lhs, 1))
8609
           && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest))
8610
    c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
8611
  else
8612
    return x;
8613
 
8614
  pos = get_pos_from_mask ((~c1) & GET_MODE_MASK (GET_MODE (dest)), &len);
8615
  if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
8616
      || GET_MODE_BITSIZE (GET_MODE (dest)) > HOST_BITS_PER_WIDE_INT
8617
      || (c1 & nonzero_bits (other, GET_MODE (dest))) != 0)
8618
    return x;
8619
 
8620
  assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
8621
  if (assign == 0)
8622
    return x;
8623
 
8624
  /* The mode to use for the source is the mode of the assignment, or of
8625
     what is inside a possible STRICT_LOW_PART.  */
8626
  mode = (GET_CODE (assign) == STRICT_LOW_PART
8627
          ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
8628
 
8629
  /* Shift OTHER right POS places and make it the source, restricting it
8630
     to the proper length and mode.  */
8631
 
8632
  src = canon_reg_for_combine (simplify_shift_const (NULL_RTX, LSHIFTRT,
8633
                                                     GET_MODE (src),
8634
                                                     other, pos),
8635
                               dest);
8636
  src = force_to_mode (src, mode,
8637
                       GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
8638
                       ? ~(unsigned HOST_WIDE_INT) 0
8639
                       : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
8640
                       0);
8641
 
8642
  /* If SRC is masked by an AND that does not make a difference in
8643
     the value being stored, strip it.  */
8644
  if (GET_CODE (assign) == ZERO_EXTRACT
8645
      && CONST_INT_P (XEXP (assign, 1))
8646
      && INTVAL (XEXP (assign, 1)) < HOST_BITS_PER_WIDE_INT
8647
      && GET_CODE (src) == AND
8648
      && CONST_INT_P (XEXP (src, 1))
8649
      && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (src, 1))
8650
          == ((unsigned HOST_WIDE_INT) 1 << INTVAL (XEXP (assign, 1))) - 1))
8651
    src = XEXP (src, 0);
8652
 
8653
  return gen_rtx_SET (VOIDmode, assign, src);
8654
}
8655
 
8656
/* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
8657
   if so.  */
8658
 
8659
static rtx
8660
apply_distributive_law (rtx x)
8661
{
8662
  enum rtx_code code = GET_CODE (x);
8663
  enum rtx_code inner_code;
8664
  rtx lhs, rhs, other;
8665
  rtx tem;
8666
 
8667
  /* Distributivity is not true for floating point as it can change the
8668
     value.  So we don't do it unless -funsafe-math-optimizations.  */
8669
  if (FLOAT_MODE_P (GET_MODE (x))
8670
      && ! flag_unsafe_math_optimizations)
8671
    return x;
8672
 
8673
  /* The outer operation can only be one of the following:  */
8674
  if (code != IOR && code != AND && code != XOR
8675
      && code != PLUS && code != MINUS)
8676
    return x;
8677
 
8678
  lhs = XEXP (x, 0);
8679
  rhs = XEXP (x, 1);
8680
 
8681
  /* If either operand is a primitive we can't do anything, so get out
8682
     fast.  */
8683
  if (OBJECT_P (lhs) || OBJECT_P (rhs))
8684
    return x;
8685
 
8686
  lhs = expand_compound_operation (lhs);
8687
  rhs = expand_compound_operation (rhs);
8688
  inner_code = GET_CODE (lhs);
8689
  if (inner_code != GET_CODE (rhs))
8690
    return x;
8691
 
8692
  /* See if the inner and outer operations distribute.  */
8693
  switch (inner_code)
8694
    {
8695
    case LSHIFTRT:
8696
    case ASHIFTRT:
8697
    case AND:
8698
    case IOR:
8699
      /* These all distribute except over PLUS.  */
8700
      if (code == PLUS || code == MINUS)
8701
        return x;
8702
      break;
8703
 
8704
    case MULT:
8705
      if (code != PLUS && code != MINUS)
8706
        return x;
8707
      break;
8708
 
8709
    case ASHIFT:
8710
      /* This is also a multiply, so it distributes over everything.  */
8711
      break;
8712
 
8713
    case SUBREG:
8714
      /* Non-paradoxical SUBREGs distributes over all operations,
8715
         provided the inner modes and byte offsets are the same, this
8716
         is an extraction of a low-order part, we don't convert an fp
8717
         operation to int or vice versa, this is not a vector mode,
8718
         and we would not be converting a single-word operation into a
8719
         multi-word operation.  The latter test is not required, but
8720
         it prevents generating unneeded multi-word operations.  Some
8721
         of the previous tests are redundant given the latter test,
8722
         but are retained because they are required for correctness.
8723
 
8724
         We produce the result slightly differently in this case.  */
8725
 
8726
      if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
8727
          || SUBREG_BYTE (lhs) != SUBREG_BYTE (rhs)
8728
          || ! subreg_lowpart_p (lhs)
8729
          || (GET_MODE_CLASS (GET_MODE (lhs))
8730
              != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
8731
          || (GET_MODE_SIZE (GET_MODE (lhs))
8732
              > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
8733
          || VECTOR_MODE_P (GET_MODE (lhs))
8734
          || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD
8735
          /* Result might need to be truncated.  Don't change mode if
8736
             explicit truncation is needed.  */
8737
          || !TRULY_NOOP_TRUNCATION
8738
               (GET_MODE_BITSIZE (GET_MODE (x)),
8739
                GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (lhs)))))
8740
        return x;
8741
 
8742
      tem = simplify_gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
8743
                                 SUBREG_REG (lhs), SUBREG_REG (rhs));
8744
      return gen_lowpart (GET_MODE (x), tem);
8745
 
8746
    default:
8747
      return x;
8748
    }
8749
 
8750
  /* Set LHS and RHS to the inner operands (A and B in the example
8751
     above) and set OTHER to the common operand (C in the example).
8752
     There is only one way to do this unless the inner operation is
8753
     commutative.  */
8754
  if (COMMUTATIVE_ARITH_P (lhs)
8755
      && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
8756
    other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
8757
  else if (COMMUTATIVE_ARITH_P (lhs)
8758
           && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
8759
    other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
8760
  else if (COMMUTATIVE_ARITH_P (lhs)
8761
           && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
8762
    other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
8763
  else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
8764
    other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
8765
  else
8766
    return x;
8767
 
8768
  /* Form the new inner operation, seeing if it simplifies first.  */
8769
  tem = simplify_gen_binary (code, GET_MODE (x), lhs, rhs);
8770
 
8771
  /* There is one exception to the general way of distributing:
8772
     (a | c) ^ (b | c) -> (a ^ b) & ~c  */
8773
  if (code == XOR && inner_code == IOR)
8774
    {
8775
      inner_code = AND;
8776
      other = simplify_gen_unary (NOT, GET_MODE (x), other, GET_MODE (x));
8777
    }
8778
 
8779
  /* We may be able to continuing distributing the result, so call
8780
     ourselves recursively on the inner operation before forming the
8781
     outer operation, which we return.  */
8782
  return simplify_gen_binary (inner_code, GET_MODE (x),
8783
                              apply_distributive_law (tem), other);
8784
}
8785
 
8786
/* See if X is of the form (* (+ A B) C), and if so convert to
8787
   (+ (* A C) (* B C)) and try to simplify.
8788
 
8789
   Most of the time, this results in no change.  However, if some of
8790
   the operands are the same or inverses of each other, simplifications
8791
   will result.
8792
 
8793
   For example, (and (ior A B) (not B)) can occur as the result of
8794
   expanding a bit field assignment.  When we apply the distributive
8795
   law to this, we get (ior (and (A (not B))) (and (B (not B)))),
8796
   which then simplifies to (and (A (not B))).
8797
 
8798
   Note that no checks happen on the validity of applying the inverse
8799
   distributive law.  This is pointless since we can do it in the
8800
   few places where this routine is called.
8801
 
8802
   N is the index of the term that is decomposed (the arithmetic operation,
8803
   i.e. (+ A B) in the first example above).  !N is the index of the term that
8804
   is distributed, i.e. of C in the first example above.  */
8805
static rtx
8806
distribute_and_simplify_rtx (rtx x, int n)
8807
{
8808
  enum machine_mode mode;
8809
  enum rtx_code outer_code, inner_code;
8810
  rtx decomposed, distributed, inner_op0, inner_op1, new_op0, new_op1, tmp;
8811
 
8812
  /* Distributivity is not true for floating point as it can change the
8813
     value.  So we don't do it unless -funsafe-math-optimizations.  */
8814
  if (FLOAT_MODE_P (GET_MODE (x))
8815
      && ! flag_unsafe_math_optimizations)
8816
    return NULL_RTX;
8817
 
8818
  decomposed = XEXP (x, n);
8819
  if (!ARITHMETIC_P (decomposed))
8820
    return NULL_RTX;
8821
 
8822
  mode = GET_MODE (x);
8823
  outer_code = GET_CODE (x);
8824
  distributed = XEXP (x, !n);
8825
 
8826
  inner_code = GET_CODE (decomposed);
8827
  inner_op0 = XEXP (decomposed, 0);
8828
  inner_op1 = XEXP (decomposed, 1);
8829
 
8830
  /* Special case (and (xor B C) (not A)), which is equivalent to
8831
     (xor (ior A B) (ior A C))  */
8832
  if (outer_code == AND && inner_code == XOR && GET_CODE (distributed) == NOT)
8833
    {
8834
      distributed = XEXP (distributed, 0);
8835
      outer_code = IOR;
8836
    }
8837
 
8838
  if (n == 0)
8839
    {
8840
      /* Distribute the second term.  */
8841
      new_op0 = simplify_gen_binary (outer_code, mode, inner_op0, distributed);
8842
      new_op1 = simplify_gen_binary (outer_code, mode, inner_op1, distributed);
8843
    }
8844
  else
8845
    {
8846
      /* Distribute the first term.  */
8847
      new_op0 = simplify_gen_binary (outer_code, mode, distributed, inner_op0);
8848
      new_op1 = simplify_gen_binary (outer_code, mode, distributed, inner_op1);
8849
    }
8850
 
8851
  tmp = apply_distributive_law (simplify_gen_binary (inner_code, mode,
8852
                                                     new_op0, new_op1));
8853
  if (GET_CODE (tmp) != outer_code
8854
      && rtx_cost (tmp, SET, optimize_this_for_speed_p)
8855
         < rtx_cost (x, SET, optimize_this_for_speed_p))
8856
    return tmp;
8857
 
8858
  return NULL_RTX;
8859
}
8860
 
8861
/* Simplify a logical `and' of VAROP with the constant CONSTOP, to be done
8862
   in MODE.  Return an equivalent form, if different from (and VAROP
8863
   (const_int CONSTOP)).  Otherwise, return NULL_RTX.  */
8864
 
8865
static rtx
8866
simplify_and_const_int_1 (enum machine_mode mode, rtx varop,
8867
                          unsigned HOST_WIDE_INT constop)
8868
{
8869
  unsigned HOST_WIDE_INT nonzero;
8870
  unsigned HOST_WIDE_INT orig_constop;
8871
  rtx orig_varop;
8872
  int i;
8873
 
8874
  orig_varop = varop;
8875
  orig_constop = constop;
8876
  if (GET_CODE (varop) == CLOBBER)
8877
    return NULL_RTX;
8878
 
8879
  /* Simplify VAROP knowing that we will be only looking at some of the
8880
     bits in it.
8881
 
8882
     Note by passing in CONSTOP, we guarantee that the bits not set in
8883
     CONSTOP are not significant and will never be examined.  We must
8884
     ensure that is the case by explicitly masking out those bits
8885
     before returning.  */
8886
  varop = force_to_mode (varop, mode, constop, 0);
8887
 
8888
  /* If VAROP is a CLOBBER, we will fail so return it.  */
8889
  if (GET_CODE (varop) == CLOBBER)
8890
    return varop;
8891
 
8892
  /* If VAROP is a CONST_INT, then we need to apply the mask in CONSTOP
8893
     to VAROP and return the new constant.  */
8894
  if (CONST_INT_P (varop))
8895
    return gen_int_mode (INTVAL (varop) & constop, mode);
8896
 
8897
  /* See what bits may be nonzero in VAROP.  Unlike the general case of
8898
     a call to nonzero_bits, here we don't care about bits outside
8899
     MODE.  */
8900
 
8901
  nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
8902
 
8903
  /* Turn off all bits in the constant that are known to already be zero.
8904
     Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
8905
     which is tested below.  */
8906
 
8907
  constop &= nonzero;
8908
 
8909
  /* If we don't have any bits left, return zero.  */
8910
  if (constop == 0)
8911
    return const0_rtx;
8912
 
8913
  /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
8914
     a power of two, we can replace this with an ASHIFT.  */
8915
  if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
8916
      && (i = exact_log2 (constop)) >= 0)
8917
    return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
8918
 
8919
  /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
8920
     or XOR, then try to apply the distributive law.  This may eliminate
8921
     operations if either branch can be simplified because of the AND.
8922
     It may also make some cases more complex, but those cases probably
8923
     won't match a pattern either with or without this.  */
8924
 
8925
  if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
8926
    return
8927
      gen_lowpart
8928
        (mode,
8929
         apply_distributive_law
8930
         (simplify_gen_binary (GET_CODE (varop), GET_MODE (varop),
8931
                               simplify_and_const_int (NULL_RTX,
8932
                                                       GET_MODE (varop),
8933
                                                       XEXP (varop, 0),
8934
                                                       constop),
8935
                               simplify_and_const_int (NULL_RTX,
8936
                                                       GET_MODE (varop),
8937
                                                       XEXP (varop, 1),
8938
                                                       constop))));
8939
 
8940
  /* If VAROP is PLUS, and the constant is a mask of low bits, distribute
8941
     the AND and see if one of the operands simplifies to zero.  If so, we
8942
     may eliminate it.  */
8943
 
8944
  if (GET_CODE (varop) == PLUS
8945
      && exact_log2 (constop + 1) >= 0)
8946
    {
8947
      rtx o0, o1;
8948
 
8949
      o0 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 0), constop);
8950
      o1 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 1), constop);
8951
      if (o0 == const0_rtx)
8952
        return o1;
8953
      if (o1 == const0_rtx)
8954
        return o0;
8955
    }
8956
 
8957
  /* Make a SUBREG if necessary.  If we can't make it, fail.  */
8958
  varop = gen_lowpart (mode, varop);
8959
  if (varop == NULL_RTX || GET_CODE (varop) == CLOBBER)
8960
    return NULL_RTX;
8961
 
8962
  /* If we are only masking insignificant bits, return VAROP.  */
8963
  if (constop == nonzero)
8964
    return varop;
8965
 
8966
  if (varop == orig_varop && constop == orig_constop)
8967
    return NULL_RTX;
8968
 
8969
  /* Otherwise, return an AND.  */
8970
  return simplify_gen_binary (AND, mode, varop, gen_int_mode (constop, mode));
8971
}
8972
 
8973
 
8974
/* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
8975
   in MODE.
8976
 
8977
   Return an equivalent form, if different from X.  Otherwise, return X.  If
8978
   X is zero, we are to always construct the equivalent form.  */
8979
 
8980
static rtx
8981
simplify_and_const_int (rtx x, enum machine_mode mode, rtx varop,
8982
                        unsigned HOST_WIDE_INT constop)
8983
{
8984
  rtx tem = simplify_and_const_int_1 (mode, varop, constop);
8985
  if (tem)
8986
    return tem;
8987
 
8988
  if (!x)
8989
    x = simplify_gen_binary (AND, GET_MODE (varop), varop,
8990
                             gen_int_mode (constop, mode));
8991
  if (GET_MODE (x) != mode)
8992
    x = gen_lowpart (mode, x);
8993
  return x;
8994
}
8995
 
8996
/* Given a REG, X, compute which bits in X can be nonzero.
8997
   We don't care about bits outside of those defined in MODE.
8998
 
8999
   For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
9000
   a shift, AND, or zero_extract, we can do better.  */
9001
 
9002
static rtx
9003
reg_nonzero_bits_for_combine (const_rtx x, enum machine_mode mode,
9004
                              const_rtx known_x ATTRIBUTE_UNUSED,
9005
                              enum machine_mode known_mode ATTRIBUTE_UNUSED,
9006
                              unsigned HOST_WIDE_INT known_ret ATTRIBUTE_UNUSED,
9007
                              unsigned HOST_WIDE_INT *nonzero)
9008
{
9009
  rtx tem;
9010
  reg_stat_type *rsp;
9011
 
9012
  /* If X is a register whose nonzero bits value is current, use it.
9013
     Otherwise, if X is a register whose value we can find, use that
9014
     value.  Otherwise, use the previously-computed global nonzero bits
9015
     for this register.  */
9016
 
9017
  rsp = VEC_index (reg_stat_type, reg_stat, REGNO (x));
9018
  if (rsp->last_set_value != 0
9019
      && (rsp->last_set_mode == mode
9020
          || (GET_MODE_CLASS (rsp->last_set_mode) == MODE_INT
9021
              && GET_MODE_CLASS (mode) == MODE_INT))
9022
      && ((rsp->last_set_label >= label_tick_ebb_start
9023
           && rsp->last_set_label < label_tick)
9024
          || (rsp->last_set_label == label_tick
9025
              && DF_INSN_LUID (rsp->last_set) < subst_low_luid)
9026
          || (REGNO (x) >= FIRST_PSEUDO_REGISTER
9027
              && REG_N_SETS (REGNO (x)) == 1
9028
              && !REGNO_REG_SET_P
9029
                  (DF_LR_IN (ENTRY_BLOCK_PTR->next_bb), REGNO (x)))))
9030
    {
9031
      *nonzero &= rsp->last_set_nonzero_bits;
9032
      return NULL;
9033
    }
9034
 
9035
  tem = get_last_value (x);
9036
 
9037
  if (tem)
9038
    {
9039
#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
9040
      /* If X is narrower than MODE and TEM is a non-negative
9041
         constant that would appear negative in the mode of X,
9042
         sign-extend it for use in reg_nonzero_bits because some
9043
         machines (maybe most) will actually do the sign-extension
9044
         and this is the conservative approach.
9045
 
9046
         ??? For 2.5, try to tighten up the MD files in this regard
9047
         instead of this kludge.  */
9048
 
9049
      if (GET_MODE_BITSIZE (GET_MODE (x)) < GET_MODE_BITSIZE (mode)
9050
          && CONST_INT_P (tem)
9051
          && INTVAL (tem) > 0
9052
          && 0 != (INTVAL (tem)
9053
                   & ((HOST_WIDE_INT) 1
9054
                      << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
9055
        tem = GEN_INT (INTVAL (tem)
9056
                       | ((HOST_WIDE_INT) (-1)
9057
                          << GET_MODE_BITSIZE (GET_MODE (x))));
9058
#endif
9059
      return tem;
9060
    }
9061
  else if (nonzero_sign_valid && rsp->nonzero_bits)
9062
    {
9063
      unsigned HOST_WIDE_INT mask = rsp->nonzero_bits;
9064
 
9065
      if (GET_MODE_BITSIZE (GET_MODE (x)) < GET_MODE_BITSIZE (mode))
9066
        /* We don't know anything about the upper bits.  */
9067
        mask |= GET_MODE_MASK (mode) ^ GET_MODE_MASK (GET_MODE (x));
9068
      *nonzero &= mask;
9069
    }
9070
 
9071
  return NULL;
9072
}
9073
 
9074
/* Return the number of bits at the high-order end of X that are known to
9075
   be equal to the sign bit.  X will be used in mode MODE; if MODE is
9076
   VOIDmode, X will be used in its own mode.  The returned value  will always
9077
   be between 1 and the number of bits in MODE.  */
9078
 
9079
static rtx
9080
reg_num_sign_bit_copies_for_combine (const_rtx x, enum machine_mode mode,
9081
                                     const_rtx known_x ATTRIBUTE_UNUSED,
9082
                                     enum machine_mode known_mode
9083
                                     ATTRIBUTE_UNUSED,
9084
                                     unsigned int known_ret ATTRIBUTE_UNUSED,
9085
                                     unsigned int *result)
9086
{
9087
  rtx tem;
9088
  reg_stat_type *rsp;
9089
 
9090
  rsp = VEC_index (reg_stat_type, reg_stat, REGNO (x));
9091
  if (rsp->last_set_value != 0
9092
      && rsp->last_set_mode == mode
9093
      && ((rsp->last_set_label >= label_tick_ebb_start
9094
           && rsp->last_set_label < label_tick)
9095
          || (rsp->last_set_label == label_tick
9096
              && DF_INSN_LUID (rsp->last_set) < subst_low_luid)
9097
          || (REGNO (x) >= FIRST_PSEUDO_REGISTER
9098
              && REG_N_SETS (REGNO (x)) == 1
9099
              && !REGNO_REG_SET_P
9100
                  (DF_LR_IN (ENTRY_BLOCK_PTR->next_bb), REGNO (x)))))
9101
    {
9102
      *result = rsp->last_set_sign_bit_copies;
9103
      return NULL;
9104
    }
9105
 
9106
  tem = get_last_value (x);
9107
  if (tem != 0)
9108
    return tem;
9109
 
9110
  if (nonzero_sign_valid && rsp->sign_bit_copies != 0
9111
      && GET_MODE_BITSIZE (GET_MODE (x)) == GET_MODE_BITSIZE (mode))
9112
    *result = rsp->sign_bit_copies;
9113
 
9114
  return NULL;
9115
}
9116
 
9117
/* Return the number of "extended" bits there are in X, when interpreted
9118
   as a quantity in MODE whose signedness is indicated by UNSIGNEDP.  For
9119
   unsigned quantities, this is the number of high-order zero bits.
9120
   For signed quantities, this is the number of copies of the sign bit
9121
   minus 1.  In both case, this function returns the number of "spare"
9122
   bits.  For example, if two quantities for which this function returns
9123
   at least 1 are added, the addition is known not to overflow.
9124
 
9125
   This function will always return 0 unless called during combine, which
9126
   implies that it must be called from a define_split.  */
9127
 
9128
unsigned int
9129
extended_count (const_rtx x, enum machine_mode mode, int unsignedp)
9130
{
9131
  if (nonzero_sign_valid == 0)
9132
    return 0;
9133
 
9134
  return (unsignedp
9135
          ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
9136
             ? (unsigned int) (GET_MODE_BITSIZE (mode) - 1
9137
                               - floor_log2 (nonzero_bits (x, mode)))
9138
             : 0)
9139
          : num_sign_bit_copies (x, mode) - 1);
9140
}
9141
 
9142
/* This function is called from `simplify_shift_const' to merge two
9143
   outer operations.  Specifically, we have already found that we need
9144
   to perform operation *POP0 with constant *PCONST0 at the outermost
9145
   position.  We would now like to also perform OP1 with constant CONST1
9146
   (with *POP0 being done last).
9147
 
9148
   Return 1 if we can do the operation and update *POP0 and *PCONST0 with
9149
   the resulting operation.  *PCOMP_P is set to 1 if we would need to
9150
   complement the innermost operand, otherwise it is unchanged.
9151
 
9152
   MODE is the mode in which the operation will be done.  No bits outside
9153
   the width of this mode matter.  It is assumed that the width of this mode
9154
   is smaller than or equal to HOST_BITS_PER_WIDE_INT.
9155
 
9156
   If *POP0 or OP1 are UNKNOWN, it means no operation is required.  Only NEG, PLUS,
9157
   IOR, XOR, and AND are supported.  We may set *POP0 to SET if the proper
9158
   result is simply *PCONST0.
9159
 
9160
   If the resulting operation cannot be expressed as one operation, we
9161
   return 0 and do not change *POP0, *PCONST0, and *PCOMP_P.  */
9162
 
9163
static int
9164
merge_outer_ops (enum rtx_code *pop0, HOST_WIDE_INT *pconst0, enum rtx_code op1, HOST_WIDE_INT const1, enum machine_mode mode, int *pcomp_p)
9165
{
9166
  enum rtx_code op0 = *pop0;
9167
  HOST_WIDE_INT const0 = *pconst0;
9168
 
9169
  const0 &= GET_MODE_MASK (mode);
9170
  const1 &= GET_MODE_MASK (mode);
9171
 
9172
  /* If OP0 is an AND, clear unimportant bits in CONST1.  */
9173
  if (op0 == AND)
9174
    const1 &= const0;
9175
 
9176
  /* If OP0 or OP1 is UNKNOWN, this is easy.  Similarly if they are the same or
9177
     if OP0 is SET.  */
9178
 
9179
  if (op1 == UNKNOWN || op0 == SET)
9180
    return 1;
9181
 
9182
  else if (op0 == UNKNOWN)
9183
    op0 = op1, const0 = const1;
9184
 
9185
  else if (op0 == op1)
9186
    {
9187
      switch (op0)
9188
        {
9189
        case AND:
9190
          const0 &= const1;
9191
          break;
9192
        case IOR:
9193
          const0 |= const1;
9194
          break;
9195
        case XOR:
9196
          const0 ^= const1;
9197
          break;
9198
        case PLUS:
9199
          const0 += const1;
9200
          break;
9201
        case NEG:
9202
          op0 = UNKNOWN;
9203
          break;
9204
        default:
9205
          break;
9206
        }
9207
    }
9208
 
9209
  /* Otherwise, if either is a PLUS or NEG, we can't do anything.  */
9210
  else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
9211
    return 0;
9212
 
9213
  /* If the two constants aren't the same, we can't do anything.  The
9214
     remaining six cases can all be done.  */
9215
  else if (const0 != const1)
9216
    return 0;
9217
 
9218
  else
9219
    switch (op0)
9220
      {
9221
      case IOR:
9222
        if (op1 == AND)
9223
          /* (a & b) | b == b */
9224
          op0 = SET;
9225
        else /* op1 == XOR */
9226
          /* (a ^ b) | b == a | b */
9227
          {;}
9228
        break;
9229
 
9230
      case XOR:
9231
        if (op1 == AND)
9232
          /* (a & b) ^ b == (~a) & b */
9233
          op0 = AND, *pcomp_p = 1;
9234
        else /* op1 == IOR */
9235
          /* (a | b) ^ b == a & ~b */
9236
          op0 = AND, const0 = ~const0;
9237
        break;
9238
 
9239
      case AND:
9240
        if (op1 == IOR)
9241
          /* (a | b) & b == b */
9242
        op0 = SET;
9243
        else /* op1 == XOR */
9244
          /* (a ^ b) & b) == (~a) & b */
9245
          *pcomp_p = 1;
9246
        break;
9247
      default:
9248
        break;
9249
      }
9250
 
9251
  /* Check for NO-OP cases.  */
9252
  const0 &= GET_MODE_MASK (mode);
9253
  if (const0 == 0
9254
      && (op0 == IOR || op0 == XOR || op0 == PLUS))
9255
    op0 = UNKNOWN;
9256
  else if (const0 == 0 && op0 == AND)
9257
    op0 = SET;
9258
  else if ((unsigned HOST_WIDE_INT) const0 == GET_MODE_MASK (mode)
9259
           && op0 == AND)
9260
    op0 = UNKNOWN;
9261
 
9262
  *pop0 = op0;
9263
 
9264
  /* ??? Slightly redundant with the above mask, but not entirely.
9265
     Moving this above means we'd have to sign-extend the mode mask
9266
     for the final test.  */
9267
  if (op0 != UNKNOWN && op0 != NEG)
9268
    *pconst0 = trunc_int_for_mode (const0, mode);
9269
 
9270
  return 1;
9271
}
9272
 
9273
/* A helper to simplify_shift_const_1 to determine the mode we can perform
9274
   the shift in.  The original shift operation CODE is performed on OP in
9275
   ORIG_MODE.  Return the wider mode MODE if we can perform the operation
9276
   in that mode.  Return ORIG_MODE otherwise.  We can also assume that the
9277
   result of the shift is subject to operation OUTER_CODE with operand
9278
   OUTER_CONST.  */
9279
 
9280
static enum machine_mode
9281
try_widen_shift_mode (enum rtx_code code, rtx op, int count,
9282
                      enum machine_mode orig_mode, enum machine_mode mode,
9283
                      enum rtx_code outer_code, HOST_WIDE_INT outer_const)
9284
{
9285
  if (orig_mode == mode)
9286
    return mode;
9287
  gcc_assert (GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (orig_mode));
9288
 
9289
  /* In general we can't perform in wider mode for right shift and rotate.  */
9290
  switch (code)
9291
    {
9292
    case ASHIFTRT:
9293
      /* We can still widen if the bits brought in from the left are identical
9294
         to the sign bit of ORIG_MODE.  */
9295
      if (num_sign_bit_copies (op, mode)
9296
          > (unsigned) (GET_MODE_BITSIZE (mode)
9297
                        - GET_MODE_BITSIZE (orig_mode)))
9298
        return mode;
9299
      return orig_mode;
9300
 
9301
    case LSHIFTRT:
9302
      /* Similarly here but with zero bits.  */
9303
      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
9304
          && (nonzero_bits (op, mode) & ~GET_MODE_MASK (orig_mode)) == 0)
9305
        return mode;
9306
 
9307
      /* We can also widen if the bits brought in will be masked off.  This
9308
         operation is performed in ORIG_MODE.  */
9309
      if (outer_code == AND)
9310
        {
9311
          int care_bits = low_bitmask_len (orig_mode, outer_const);
9312
 
9313
          if (care_bits >= 0
9314
              && GET_MODE_BITSIZE (orig_mode) - care_bits >= count)
9315
            return mode;
9316
        }
9317
      /* fall through */
9318
 
9319
    case ROTATE:
9320
      return orig_mode;
9321
 
9322
    case ROTATERT:
9323
      gcc_unreachable ();
9324
 
9325
    default:
9326
      return mode;
9327
    }
9328
}
9329
 
9330
/* Simplify a shift of VAROP by COUNT bits.  CODE says what kind of shift.
9331
   The result of the shift is RESULT_MODE.  Return NULL_RTX if we cannot
9332
   simplify it.  Otherwise, return a simplified value.
9333
 
9334
   The shift is normally computed in the widest mode we find in VAROP, as
9335
   long as it isn't a different number of words than RESULT_MODE.  Exceptions
9336
   are ASHIFTRT and ROTATE, which are always done in their original mode.  */
9337
 
9338
static rtx
9339
simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
9340
                        rtx varop, int orig_count)
9341
{
9342
  enum rtx_code orig_code = code;
9343
  rtx orig_varop = varop;
9344
  int count;
9345
  enum machine_mode mode = result_mode;
9346
  enum machine_mode shift_mode, tmode;
9347
  unsigned int mode_words
9348
    = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
9349
  /* We form (outer_op (code varop count) (outer_const)).  */
9350
  enum rtx_code outer_op = UNKNOWN;
9351
  HOST_WIDE_INT outer_const = 0;
9352
  int complement_p = 0;
9353
  rtx new_rtx, x;
9354
 
9355
  /* Make sure and truncate the "natural" shift on the way in.  We don't
9356
     want to do this inside the loop as it makes it more difficult to
9357
     combine shifts.  */
9358
  if (SHIFT_COUNT_TRUNCATED)
9359
    orig_count &= GET_MODE_BITSIZE (mode) - 1;
9360
 
9361
  /* If we were given an invalid count, don't do anything except exactly
9362
     what was requested.  */
9363
 
9364
  if (orig_count < 0 || orig_count >= (int) GET_MODE_BITSIZE (mode))
9365
    return NULL_RTX;
9366
 
9367
  count = orig_count;
9368
 
9369
  /* Unless one of the branches of the `if' in this loop does a `continue',
9370
     we will `break' the loop after the `if'.  */
9371
 
9372
  while (count != 0)
9373
    {
9374
      /* If we have an operand of (clobber (const_int 0)), fail.  */
9375
      if (GET_CODE (varop) == CLOBBER)
9376
        return NULL_RTX;
9377
 
9378
      /* Convert ROTATERT to ROTATE.  */
9379
      if (code == ROTATERT)
9380
        {
9381
          unsigned int bitsize = GET_MODE_BITSIZE (result_mode);;
9382
          code = ROTATE;
9383
          if (VECTOR_MODE_P (result_mode))
9384
            count = bitsize / GET_MODE_NUNITS (result_mode) - count;
9385
          else
9386
            count = bitsize - count;
9387
        }
9388
 
9389
      shift_mode = try_widen_shift_mode (code, varop, count, result_mode,
9390
                                         mode, outer_op, outer_const);
9391
 
9392
      /* Handle cases where the count is greater than the size of the mode
9393
         minus 1.  For ASHIFT, use the size minus one as the count (this can
9394
         occur when simplifying (lshiftrt (ashiftrt ..))).  For rotates,
9395
         take the count modulo the size.  For other shifts, the result is
9396
         zero.
9397
 
9398
         Since these shifts are being produced by the compiler by combining
9399
         multiple operations, each of which are defined, we know what the
9400
         result is supposed to be.  */
9401
 
9402
      if (count > (GET_MODE_BITSIZE (shift_mode) - 1))
9403
        {
9404
          if (code == ASHIFTRT)
9405
            count = GET_MODE_BITSIZE (shift_mode) - 1;
9406
          else if (code == ROTATE || code == ROTATERT)
9407
            count %= GET_MODE_BITSIZE (shift_mode);
9408
          else
9409
            {
9410
              /* We can't simply return zero because there may be an
9411
                 outer op.  */
9412
              varop = const0_rtx;
9413
              count = 0;
9414
              break;
9415
            }
9416
        }
9417
 
9418
      /* If we discovered we had to complement VAROP, leave.  Making a NOT
9419
         here would cause an infinite loop.  */
9420
      if (complement_p)
9421
        break;
9422
 
9423
      /* An arithmetic right shift of a quantity known to be -1 or 0
9424
         is a no-op.  */
9425
      if (code == ASHIFTRT
9426
          && (num_sign_bit_copies (varop, shift_mode)
9427
              == GET_MODE_BITSIZE (shift_mode)))
9428
        {
9429
          count = 0;
9430
          break;
9431
        }
9432
 
9433
      /* If we are doing an arithmetic right shift and discarding all but
9434
         the sign bit copies, this is equivalent to doing a shift by the
9435
         bitsize minus one.  Convert it into that shift because it will often
9436
         allow other simplifications.  */
9437
 
9438
      if (code == ASHIFTRT
9439
          && (count + num_sign_bit_copies (varop, shift_mode)
9440
              >= GET_MODE_BITSIZE (shift_mode)))
9441
        count = GET_MODE_BITSIZE (shift_mode) - 1;
9442
 
9443
      /* We simplify the tests below and elsewhere by converting
9444
         ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
9445
         `make_compound_operation' will convert it to an ASHIFTRT for
9446
         those machines (such as VAX) that don't have an LSHIFTRT.  */
9447
      if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
9448
          && code == ASHIFTRT
9449
          && ((nonzero_bits (varop, shift_mode)
9450
               & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
9451
              == 0))
9452
        code = LSHIFTRT;
9453
 
9454
      if (((code == LSHIFTRT
9455
            && GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
9456
            && !(nonzero_bits (varop, shift_mode) >> count))
9457
           || (code == ASHIFT
9458
               && GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
9459
               && !((nonzero_bits (varop, shift_mode) << count)
9460
                    & GET_MODE_MASK (shift_mode))))
9461
          && !side_effects_p (varop))
9462
        varop = const0_rtx;
9463
 
9464
      switch (GET_CODE (varop))
9465
        {
9466
        case SIGN_EXTEND:
9467
        case ZERO_EXTEND:
9468
        case SIGN_EXTRACT:
9469
        case ZERO_EXTRACT:
9470
          new_rtx = expand_compound_operation (varop);
9471
          if (new_rtx != varop)
9472
            {
9473
              varop = new_rtx;
9474
              continue;
9475
            }
9476
          break;
9477
 
9478
        case MEM:
9479
          /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
9480
             minus the width of a smaller mode, we can do this with a
9481
             SIGN_EXTEND or ZERO_EXTEND from the narrower memory location.  */
9482
          if ((code == ASHIFTRT || code == LSHIFTRT)
9483
              && ! mode_dependent_address_p (XEXP (varop, 0))
9484
              && ! MEM_VOLATILE_P (varop)
9485
              && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
9486
                                         MODE_INT, 1)) != BLKmode)
9487
            {
9488
              new_rtx = adjust_address_nv (varop, tmode,
9489
                                       BYTES_BIG_ENDIAN ? 0
9490
                                       : count / BITS_PER_UNIT);
9491
 
9492
              varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND
9493
                                     : ZERO_EXTEND, mode, new_rtx);
9494
              count = 0;
9495
              continue;
9496
            }
9497
          break;
9498
 
9499
        case SUBREG:
9500
          /* If VAROP is a SUBREG, strip it as long as the inner operand has
9501
             the same number of words as what we've seen so far.  Then store
9502
             the widest mode in MODE.  */
9503
          if (subreg_lowpart_p (varop)
9504
              && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
9505
                  > GET_MODE_SIZE (GET_MODE (varop)))
9506
              && (unsigned int) ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
9507
                                  + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
9508
                 == mode_words)
9509
            {
9510
              varop = SUBREG_REG (varop);
9511
              if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
9512
                mode = GET_MODE (varop);
9513
              continue;
9514
            }
9515
          break;
9516
 
9517
        case MULT:
9518
          /* Some machines use MULT instead of ASHIFT because MULT
9519
             is cheaper.  But it is still better on those machines to
9520
             merge two shifts into one.  */
9521
          if (CONST_INT_P (XEXP (varop, 1))
9522
              && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
9523
            {
9524
              varop
9525
                = simplify_gen_binary (ASHIFT, GET_MODE (varop),
9526
                                       XEXP (varop, 0),
9527
                                       GEN_INT (exact_log2 (
9528
                                                INTVAL (XEXP (varop, 1)))));
9529
              continue;
9530
            }
9531
          break;
9532
 
9533
        case UDIV:
9534
          /* Similar, for when divides are cheaper.  */
9535
          if (CONST_INT_P (XEXP (varop, 1))
9536
              && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
9537
            {
9538
              varop
9539
                = simplify_gen_binary (LSHIFTRT, GET_MODE (varop),
9540
                                       XEXP (varop, 0),
9541
                                       GEN_INT (exact_log2 (
9542
                                                INTVAL (XEXP (varop, 1)))));
9543
              continue;
9544
            }
9545
          break;
9546
 
9547
        case ASHIFTRT:
9548
          /* If we are extracting just the sign bit of an arithmetic
9549
             right shift, that shift is not needed.  However, the sign
9550
             bit of a wider mode may be different from what would be
9551
             interpreted as the sign bit in a narrower mode, so, if
9552
             the result is narrower, don't discard the shift.  */
9553
          if (code == LSHIFTRT
9554
              && count == (GET_MODE_BITSIZE (result_mode) - 1)
9555
              && (GET_MODE_BITSIZE (result_mode)
9556
                  >= GET_MODE_BITSIZE (GET_MODE (varop))))
9557
            {
9558
              varop = XEXP (varop, 0);
9559
              continue;
9560
            }
9561
 
9562
          /* ... fall through ...  */
9563
 
9564
        case LSHIFTRT:
9565
        case ASHIFT:
9566
        case ROTATE:
9567
          /* Here we have two nested shifts.  The result is usually the
9568
             AND of a new shift with a mask.  We compute the result below.  */
9569
          if (CONST_INT_P (XEXP (varop, 1))
9570
              && INTVAL (XEXP (varop, 1)) >= 0
9571
              && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
9572
              && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9573
              && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
9574
              && !VECTOR_MODE_P (result_mode))
9575
            {
9576
              enum rtx_code first_code = GET_CODE (varop);
9577
              unsigned int first_count = INTVAL (XEXP (varop, 1));
9578
              unsigned HOST_WIDE_INT mask;
9579
              rtx mask_rtx;
9580
 
9581
              /* We have one common special case.  We can't do any merging if
9582
                 the inner code is an ASHIFTRT of a smaller mode.  However, if
9583
                 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
9584
                 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
9585
                 we can convert it to
9586
                 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
9587
                 This simplifies certain SIGN_EXTEND operations.  */
9588
              if (code == ASHIFT && first_code == ASHIFTRT
9589
                  && count == (GET_MODE_BITSIZE (result_mode)
9590
                               - GET_MODE_BITSIZE (GET_MODE (varop))))
9591
                {
9592
                  /* C3 has the low-order C1 bits zero.  */
9593
 
9594
                  mask = (GET_MODE_MASK (mode)
9595
                          & ~(((HOST_WIDE_INT) 1 << first_count) - 1));
9596
 
9597
                  varop = simplify_and_const_int (NULL_RTX, result_mode,
9598
                                                  XEXP (varop, 0), mask);
9599
                  varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
9600
                                                varop, count);
9601
                  count = first_count;
9602
                  code = ASHIFTRT;
9603
                  continue;
9604
                }
9605
 
9606
              /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
9607
                 than C1 high-order bits equal to the sign bit, we can convert
9608
                 this to either an ASHIFT or an ASHIFTRT depending on the
9609
                 two counts.
9610
 
9611
                 We cannot do this if VAROP's mode is not SHIFT_MODE.  */
9612
 
9613
              if (code == ASHIFTRT && first_code == ASHIFT
9614
                  && GET_MODE (varop) == shift_mode
9615
                  && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
9616
                      > first_count))
9617
                {
9618
                  varop = XEXP (varop, 0);
9619
                  count -= first_count;
9620
                  if (count < 0)
9621
                    {
9622
                      count = -count;
9623
                      code = ASHIFT;
9624
                    }
9625
 
9626
                  continue;
9627
                }
9628
 
9629
              /* There are some cases we can't do.  If CODE is ASHIFTRT,
9630
                 we can only do this if FIRST_CODE is also ASHIFTRT.
9631
 
9632
                 We can't do the case when CODE is ROTATE and FIRST_CODE is
9633
                 ASHIFTRT.
9634
 
9635
                 If the mode of this shift is not the mode of the outer shift,
9636
                 we can't do this if either shift is a right shift or ROTATE.
9637
 
9638
                 Finally, we can't do any of these if the mode is too wide
9639
                 unless the codes are the same.
9640
 
9641
                 Handle the case where the shift codes are the same
9642
                 first.  */
9643
 
9644
              if (code == first_code)
9645
                {
9646
                  if (GET_MODE (varop) != result_mode
9647
                      && (code == ASHIFTRT || code == LSHIFTRT
9648
                          || code == ROTATE))
9649
                    break;
9650
 
9651
                  count += first_count;
9652
                  varop = XEXP (varop, 0);
9653
                  continue;
9654
                }
9655
 
9656
              if (code == ASHIFTRT
9657
                  || (code == ROTATE && first_code == ASHIFTRT)
9658
                  || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
9659
                  || (GET_MODE (varop) != result_mode
9660
                      && (first_code == ASHIFTRT || first_code == LSHIFTRT
9661
                          || first_code == ROTATE
9662
                          || code == ROTATE)))
9663
                break;
9664
 
9665
              /* To compute the mask to apply after the shift, shift the
9666
                 nonzero bits of the inner shift the same way the
9667
                 outer shift will.  */
9668
 
9669
              mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
9670
 
9671
              mask_rtx
9672
                = simplify_const_binary_operation (code, result_mode, mask_rtx,
9673
                                                   GEN_INT (count));
9674
 
9675
              /* Give up if we can't compute an outer operation to use.  */
9676
              if (mask_rtx == 0
9677
                  || !CONST_INT_P (mask_rtx)
9678
                  || ! merge_outer_ops (&outer_op, &outer_const, AND,
9679
                                        INTVAL (mask_rtx),
9680
                                        result_mode, &complement_p))
9681
                break;
9682
 
9683
              /* If the shifts are in the same direction, we add the
9684
                 counts.  Otherwise, we subtract them.  */
9685
              if ((code == ASHIFTRT || code == LSHIFTRT)
9686
                  == (first_code == ASHIFTRT || first_code == LSHIFTRT))
9687
                count += first_count;
9688
              else
9689
                count -= first_count;
9690
 
9691
              /* If COUNT is positive, the new shift is usually CODE,
9692
                 except for the two exceptions below, in which case it is
9693
                 FIRST_CODE.  If the count is negative, FIRST_CODE should
9694
                 always be used  */
9695
              if (count > 0
9696
                  && ((first_code == ROTATE && code == ASHIFT)
9697
                      || (first_code == ASHIFTRT && code == LSHIFTRT)))
9698
                code = first_code;
9699
              else if (count < 0)
9700
                code = first_code, count = -count;
9701
 
9702
              varop = XEXP (varop, 0);
9703
              continue;
9704
            }
9705
 
9706
          /* If we have (A << B << C) for any shift, we can convert this to
9707
             (A << C << B).  This wins if A is a constant.  Only try this if
9708
             B is not a constant.  */
9709
 
9710
          else if (GET_CODE (varop) == code
9711
                   && CONST_INT_P (XEXP (varop, 0))
9712
                   && !CONST_INT_P (XEXP (varop, 1)))
9713
            {
9714
              rtx new_rtx = simplify_const_binary_operation (code, mode,
9715
                                                         XEXP (varop, 0),
9716
                                                         GEN_INT (count));
9717
              varop = gen_rtx_fmt_ee (code, mode, new_rtx, XEXP (varop, 1));
9718
              count = 0;
9719
              continue;
9720
            }
9721
          break;
9722
 
9723
        case NOT:
9724
          if (VECTOR_MODE_P (mode))
9725
            break;
9726
 
9727
          /* Make this fit the case below.  */
9728
          varop = gen_rtx_XOR (mode, XEXP (varop, 0),
9729
                               GEN_INT (GET_MODE_MASK (mode)));
9730
          continue;
9731
 
9732
        case IOR:
9733
        case AND:
9734
        case XOR:
9735
          /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
9736
             with C the size of VAROP - 1 and the shift is logical if
9737
             STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9738
             we have an (le X 0) operation.   If we have an arithmetic shift
9739
             and STORE_FLAG_VALUE is 1 or we have a logical shift with
9740
             STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation.  */
9741
 
9742
          if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
9743
              && XEXP (XEXP (varop, 0), 1) == constm1_rtx
9744
              && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9745
              && (code == LSHIFTRT || code == ASHIFTRT)
9746
              && count == (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)
9747
              && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9748
            {
9749
              count = 0;
9750
              varop = gen_rtx_LE (GET_MODE (varop), XEXP (varop, 1),
9751
                                  const0_rtx);
9752
 
9753
              if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
9754
                varop = gen_rtx_NEG (GET_MODE (varop), varop);
9755
 
9756
              continue;
9757
            }
9758
 
9759
          /* If we have (shift (logical)), move the logical to the outside
9760
             to allow it to possibly combine with another logical and the
9761
             shift to combine with another shift.  This also canonicalizes to
9762
             what a ZERO_EXTRACT looks like.  Also, some machines have
9763
             (and (shift)) insns.  */
9764
 
9765
          if (CONST_INT_P (XEXP (varop, 1))
9766
              /* We can't do this if we have (ashiftrt (xor))  and the
9767
                 constant has its sign bit set in shift_mode.  */
9768
              && !(code == ASHIFTRT && GET_CODE (varop) == XOR
9769
                   && 0 > trunc_int_for_mode (INTVAL (XEXP (varop, 1)),
9770
                                              shift_mode))
9771
              && (new_rtx = simplify_const_binary_operation (code, result_mode,
9772
                                                         XEXP (varop, 1),
9773
                                                         GEN_INT (count))) != 0
9774
              && CONST_INT_P (new_rtx)
9775
              && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
9776
                                  INTVAL (new_rtx), result_mode, &complement_p))
9777
            {
9778
              varop = XEXP (varop, 0);
9779
              continue;
9780
            }
9781
 
9782
          /* If we can't do that, try to simplify the shift in each arm of the
9783
             logical expression, make a new logical expression, and apply
9784
             the inverse distributive law.  This also can't be done
9785
             for some (ashiftrt (xor)).  */
9786
          if (CONST_INT_P (XEXP (varop, 1))
9787
             && !(code == ASHIFTRT && GET_CODE (varop) == XOR
9788
                  && 0 > trunc_int_for_mode (INTVAL (XEXP (varop, 1)),
9789
                                             shift_mode)))
9790
            {
9791
              rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
9792
                                              XEXP (varop, 0), count);
9793
              rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
9794
                                              XEXP (varop, 1), count);
9795
 
9796
              varop = simplify_gen_binary (GET_CODE (varop), shift_mode,
9797
                                           lhs, rhs);
9798
              varop = apply_distributive_law (varop);
9799
 
9800
              count = 0;
9801
              continue;
9802
            }
9803
          break;
9804
 
9805
        case EQ:
9806
          /* Convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
9807
             says that the sign bit can be tested, FOO has mode MODE, C is
9808
             GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
9809
             that may be nonzero.  */
9810
          if (code == LSHIFTRT
9811
              && XEXP (varop, 1) == const0_rtx
9812
              && GET_MODE (XEXP (varop, 0)) == result_mode
9813
              && count == (GET_MODE_BITSIZE (result_mode) - 1)
9814
              && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9815
              && STORE_FLAG_VALUE == -1
9816
              && nonzero_bits (XEXP (varop, 0), result_mode) == 1
9817
              && merge_outer_ops (&outer_op, &outer_const, XOR,
9818
                                  (HOST_WIDE_INT) 1, result_mode,
9819
                                  &complement_p))
9820
            {
9821
              varop = XEXP (varop, 0);
9822
              count = 0;
9823
              continue;
9824
            }
9825
          break;
9826
 
9827
        case NEG:
9828
          /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
9829
             than the number of bits in the mode is equivalent to A.  */
9830
          if (code == LSHIFTRT
9831
              && count == (GET_MODE_BITSIZE (result_mode) - 1)
9832
              && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
9833
            {
9834
              varop = XEXP (varop, 0);
9835
              count = 0;
9836
              continue;
9837
            }
9838
 
9839
          /* NEG commutes with ASHIFT since it is multiplication.  Move the
9840
             NEG outside to allow shifts to combine.  */
9841
          if (code == ASHIFT
9842
              && merge_outer_ops (&outer_op, &outer_const, NEG,
9843
                                  (HOST_WIDE_INT) 0, result_mode,
9844
                                  &complement_p))
9845
            {
9846
              varop = XEXP (varop, 0);
9847
              continue;
9848
            }
9849
          break;
9850
 
9851
        case PLUS:
9852
          /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
9853
             is one less than the number of bits in the mode is
9854
             equivalent to (xor A 1).  */
9855
          if (code == LSHIFTRT
9856
              && count == (GET_MODE_BITSIZE (result_mode) - 1)
9857
              && XEXP (varop, 1) == constm1_rtx
9858
              && nonzero_bits (XEXP (varop, 0), result_mode) == 1
9859
              && merge_outer_ops (&outer_op, &outer_const, XOR,
9860
                                  (HOST_WIDE_INT) 1, result_mode,
9861
                                  &complement_p))
9862
            {
9863
              count = 0;
9864
              varop = XEXP (varop, 0);
9865
              continue;
9866
            }
9867
 
9868
          /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
9869
             that might be nonzero in BAR are those being shifted out and those
9870
             bits are known zero in FOO, we can replace the PLUS with FOO.
9871
             Similarly in the other operand order.  This code occurs when
9872
             we are computing the size of a variable-size array.  */
9873
 
9874
          if ((code == ASHIFTRT || code == LSHIFTRT)
9875
              && count < HOST_BITS_PER_WIDE_INT
9876
              && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
9877
              && (nonzero_bits (XEXP (varop, 1), result_mode)
9878
                  & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
9879
            {
9880
              varop = XEXP (varop, 0);
9881
              continue;
9882
            }
9883
          else if ((code == ASHIFTRT || code == LSHIFTRT)
9884
                   && count < HOST_BITS_PER_WIDE_INT
9885
                   && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9886
                   && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9887
                            >> count)
9888
                   && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9889
                            & nonzero_bits (XEXP (varop, 1),
9890
                                                 result_mode)))
9891
            {
9892
              varop = XEXP (varop, 1);
9893
              continue;
9894
            }
9895
 
9896
          /* (ashift (plus foo C) N) is (plus (ashift foo N) C').  */
9897
          if (code == ASHIFT
9898
              && CONST_INT_P (XEXP (varop, 1))
9899
              && (new_rtx = simplify_const_binary_operation (ASHIFT, result_mode,
9900
                                                         XEXP (varop, 1),
9901
                                                         GEN_INT (count))) != 0
9902
              && CONST_INT_P (new_rtx)
9903
              && merge_outer_ops (&outer_op, &outer_const, PLUS,
9904
                                  INTVAL (new_rtx), result_mode, &complement_p))
9905
            {
9906
              varop = XEXP (varop, 0);
9907
              continue;
9908
            }
9909
 
9910
          /* Check for 'PLUS signbit', which is the canonical form of 'XOR
9911
             signbit', and attempt to change the PLUS to an XOR and move it to
9912
             the outer operation as is done above in the AND/IOR/XOR case
9913
             leg for shift(logical). See details in logical handling above
9914
             for reasoning in doing so.  */
9915
          if (code == LSHIFTRT
9916
              && CONST_INT_P (XEXP (varop, 1))
9917
              && mode_signbit_p (result_mode, XEXP (varop, 1))
9918
              && (new_rtx = simplify_const_binary_operation (code, result_mode,
9919
                                                         XEXP (varop, 1),
9920
                                                         GEN_INT (count))) != 0
9921
              && CONST_INT_P (new_rtx)
9922
              && merge_outer_ops (&outer_op, &outer_const, XOR,
9923
                                  INTVAL (new_rtx), result_mode, &complement_p))
9924
            {
9925
              varop = XEXP (varop, 0);
9926
              continue;
9927
            }
9928
 
9929
          break;
9930
 
9931
        case MINUS:
9932
          /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
9933
             with C the size of VAROP - 1 and the shift is logical if
9934
             STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9935
             we have a (gt X 0) operation.  If the shift is arithmetic with
9936
             STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
9937
             we have a (neg (gt X 0)) operation.  */
9938
 
9939
          if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9940
              && GET_CODE (XEXP (varop, 0)) == ASHIFTRT
9941
              && count == (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)
9942
              && (code == LSHIFTRT || code == ASHIFTRT)
9943
              && CONST_INT_P (XEXP (XEXP (varop, 0), 1))
9944
              && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
9945
              && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9946
            {
9947
              count = 0;
9948
              varop = gen_rtx_GT (GET_MODE (varop), XEXP (varop, 1),
9949
                                  const0_rtx);
9950
 
9951
              if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
9952
                varop = gen_rtx_NEG (GET_MODE (varop), varop);
9953
 
9954
              continue;
9955
            }
9956
          break;
9957
 
9958
        case TRUNCATE:
9959
          /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt))
9960
             if the truncate does not affect the value.  */
9961
          if (code == LSHIFTRT
9962
              && GET_CODE (XEXP (varop, 0)) == LSHIFTRT
9963
              && CONST_INT_P (XEXP (XEXP (varop, 0), 1))
9964
              && (INTVAL (XEXP (XEXP (varop, 0), 1))
9965
                  >= (GET_MODE_BITSIZE (GET_MODE (XEXP (varop, 0)))
9966
                      - GET_MODE_BITSIZE (GET_MODE (varop)))))
9967
            {
9968
              rtx varop_inner = XEXP (varop, 0);
9969
 
9970
              varop_inner
9971
                = gen_rtx_LSHIFTRT (GET_MODE (varop_inner),
9972
                                    XEXP (varop_inner, 0),
9973
                                    GEN_INT
9974
                                    (count + INTVAL (XEXP (varop_inner, 1))));
9975
              varop = gen_rtx_TRUNCATE (GET_MODE (varop), varop_inner);
9976
              count = 0;
9977
              continue;
9978
            }
9979
          break;
9980
 
9981
        default:
9982
          break;
9983
        }
9984
 
9985
      break;
9986
    }
9987
 
9988
  shift_mode = try_widen_shift_mode (code, varop, count, result_mode, mode,
9989
                                     outer_op, outer_const);
9990
 
9991
  /* We have now finished analyzing the shift.  The result should be
9992
     a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places.  If
9993
     OUTER_OP is non-UNKNOWN, it is an operation that needs to be applied
9994
     to the result of the shift.  OUTER_CONST is the relevant constant,
9995
     but we must turn off all bits turned off in the shift.  */
9996
 
9997
  if (outer_op == UNKNOWN
9998
      && orig_code == code && orig_count == count
9999
      && varop == orig_varop
10000
      && shift_mode == GET_MODE (varop))
10001
    return NULL_RTX;
10002
 
10003
  /* Make a SUBREG if necessary.  If we can't make it, fail.  */
10004
  varop = gen_lowpart (shift_mode, varop);
10005
  if (varop == NULL_RTX || GET_CODE (varop) == CLOBBER)
10006
    return NULL_RTX;
10007
 
10008
  /* If we have an outer operation and we just made a shift, it is
10009
     possible that we could have simplified the shift were it not
10010
     for the outer operation.  So try to do the simplification
10011
     recursively.  */
10012
 
10013
  if (outer_op != UNKNOWN)
10014
    x = simplify_shift_const_1 (code, shift_mode, varop, count);
10015
  else
10016
    x = NULL_RTX;
10017
 
10018
  if (x == NULL_RTX)
10019
    x = simplify_gen_binary (code, shift_mode, varop, GEN_INT (count));
10020
 
10021
  /* If we were doing an LSHIFTRT in a wider mode than it was originally,
10022
     turn off all the bits that the shift would have turned off.  */
10023
  if (orig_code == LSHIFTRT && result_mode != shift_mode)
10024
    x = simplify_and_const_int (NULL_RTX, shift_mode, x,
10025
                                GET_MODE_MASK (result_mode) >> orig_count);
10026
 
10027
  /* Do the remainder of the processing in RESULT_MODE.  */
10028
  x = gen_lowpart_or_truncate (result_mode, x);
10029
 
10030
  /* If COMPLEMENT_P is set, we have to complement X before doing the outer
10031
     operation.  */
10032
  if (complement_p)
10033
    x = simplify_gen_unary (NOT, result_mode, x, result_mode);
10034
 
10035
  if (outer_op != UNKNOWN)
10036
    {
10037
      if (GET_RTX_CLASS (outer_op) != RTX_UNARY
10038
          && GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
10039
        outer_const = trunc_int_for_mode (outer_const, result_mode);
10040
 
10041
      if (outer_op == AND)
10042
        x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
10043
      else if (outer_op == SET)
10044
        {
10045
          /* This means that we have determined that the result is
10046
             equivalent to a constant.  This should be rare.  */
10047
          if (!side_effects_p (x))
10048
            x = GEN_INT (outer_const);
10049
        }
10050
      else if (GET_RTX_CLASS (outer_op) == RTX_UNARY)
10051
        x = simplify_gen_unary (outer_op, result_mode, x, result_mode);
10052
      else
10053
        x = simplify_gen_binary (outer_op, result_mode, x,
10054
                                 GEN_INT (outer_const));
10055
    }
10056
 
10057
  return x;
10058
}
10059
 
10060
/* Simplify a shift of VAROP by COUNT bits.  CODE says what kind of shift.
10061
   The result of the shift is RESULT_MODE.  If we cannot simplify it,
10062
   return X or, if it is NULL, synthesize the expression with
10063
   simplify_gen_binary.  Otherwise, return a simplified value.
10064
 
10065
   The shift is normally computed in the widest mode we find in VAROP, as
10066
   long as it isn't a different number of words than RESULT_MODE.  Exceptions
10067
   are ASHIFTRT and ROTATE, which are always done in their original mode.  */
10068
 
10069
static rtx
10070
simplify_shift_const (rtx x, enum rtx_code code, enum machine_mode result_mode,
10071
                      rtx varop, int count)
10072
{
10073
  rtx tem = simplify_shift_const_1 (code, result_mode, varop, count);
10074
  if (tem)
10075
    return tem;
10076
 
10077
  if (!x)
10078
    x = simplify_gen_binary (code, GET_MODE (varop), varop, GEN_INT (count));
10079
  if (GET_MODE (x) != result_mode)
10080
    x = gen_lowpart (result_mode, x);
10081
  return x;
10082
}
10083
 
10084
 
10085
/* Like recog, but we receive the address of a pointer to a new pattern.
10086
   We try to match the rtx that the pointer points to.
10087
   If that fails, we may try to modify or replace the pattern,
10088
   storing the replacement into the same pointer object.
10089
 
10090
   Modifications include deletion or addition of CLOBBERs.
10091
 
10092
   PNOTES is a pointer to a location where any REG_UNUSED notes added for
10093
   the CLOBBERs are placed.
10094
 
10095
   The value is the final insn code from the pattern ultimately matched,
10096
   or -1.  */
10097
 
10098
static int
10099
recog_for_combine (rtx *pnewpat, rtx insn, rtx *pnotes)
10100
{
10101
  rtx pat = *pnewpat;
10102
  int insn_code_number;
10103
  int num_clobbers_to_add = 0;
10104
  int i;
10105
  rtx notes = 0;
10106
  rtx old_notes, old_pat;
10107
 
10108
  /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
10109
     we use to indicate that something didn't match.  If we find such a
10110
     thing, force rejection.  */
10111
  if (GET_CODE (pat) == PARALLEL)
10112
    for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
10113
      if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
10114
          && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
10115
        return -1;
10116
 
10117
  old_pat = PATTERN (insn);
10118
  old_notes = REG_NOTES (insn);
10119
  PATTERN (insn) = pat;
10120
  REG_NOTES (insn) = 0;
10121
 
10122
  insn_code_number = recog (pat, insn, &num_clobbers_to_add);
10123
  if (dump_file && (dump_flags & TDF_DETAILS))
10124
    {
10125
      if (insn_code_number < 0)
10126
        fputs ("Failed to match this instruction:\n", dump_file);
10127
      else
10128
        fputs ("Successfully matched this instruction:\n", dump_file);
10129
      print_rtl_single (dump_file, pat);
10130
    }
10131
 
10132
  /* If it isn't, there is the possibility that we previously had an insn
10133
     that clobbered some register as a side effect, but the combined
10134
     insn doesn't need to do that.  So try once more without the clobbers
10135
     unless this represents an ASM insn.  */
10136
 
10137
  if (insn_code_number < 0 && ! check_asm_operands (pat)
10138
      && GET_CODE (pat) == PARALLEL)
10139
    {
10140
      int pos;
10141
 
10142
      for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
10143
        if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
10144
          {
10145
            if (i != pos)
10146
              SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
10147
            pos++;
10148
          }
10149
 
10150
      SUBST_INT (XVECLEN (pat, 0), pos);
10151
 
10152
      if (pos == 1)
10153
        pat = XVECEXP (pat, 0, 0);
10154
 
10155
      PATTERN (insn) = pat;
10156
      insn_code_number = recog (pat, insn, &num_clobbers_to_add);
10157
      if (dump_file && (dump_flags & TDF_DETAILS))
10158
        {
10159
          if (insn_code_number < 0)
10160
            fputs ("Failed to match this instruction:\n", dump_file);
10161
          else
10162
            fputs ("Successfully matched this instruction:\n", dump_file);
10163
          print_rtl_single (dump_file, pat);
10164
        }
10165
    }
10166
  PATTERN (insn) = old_pat;
10167
  REG_NOTES (insn) = old_notes;
10168
 
10169
  /* Recognize all noop sets, these will be killed by followup pass.  */
10170
  if (insn_code_number < 0 && GET_CODE (pat) == SET && set_noop_p (pat))
10171
    insn_code_number = NOOP_MOVE_INSN_CODE, num_clobbers_to_add = 0;
10172
 
10173
  /* If we had any clobbers to add, make a new pattern than contains
10174
     them.  Then check to make sure that all of them are dead.  */
10175
  if (num_clobbers_to_add)
10176
    {
10177
      rtx newpat = gen_rtx_PARALLEL (VOIDmode,
10178
                                     rtvec_alloc (GET_CODE (pat) == PARALLEL
10179
                                                  ? (XVECLEN (pat, 0)
10180
                                                     + num_clobbers_to_add)
10181
                                                  : num_clobbers_to_add + 1));
10182
 
10183
      if (GET_CODE (pat) == PARALLEL)
10184
        for (i = 0; i < XVECLEN (pat, 0); i++)
10185
          XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
10186
      else
10187
        XVECEXP (newpat, 0, 0) = pat;
10188
 
10189
      add_clobbers (newpat, insn_code_number);
10190
 
10191
      for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
10192
           i < XVECLEN (newpat, 0); i++)
10193
        {
10194
          if (REG_P (XEXP (XVECEXP (newpat, 0, i), 0))
10195
              && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
10196
            return -1;
10197
          if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) != SCRATCH)
10198
            {
10199
              gcc_assert (REG_P (XEXP (XVECEXP (newpat, 0, i), 0)));
10200
              notes = alloc_reg_note (REG_UNUSED,
10201
                                      XEXP (XVECEXP (newpat, 0, i), 0), notes);
10202
            }
10203
        }
10204
      pat = newpat;
10205
    }
10206
 
10207
  *pnewpat = pat;
10208
  *pnotes = notes;
10209
 
10210
  return insn_code_number;
10211
}
10212
 
10213
/* Like gen_lowpart_general but for use by combine.  In combine it
10214
   is not possible to create any new pseudoregs.  However, it is
10215
   safe to create invalid memory addresses, because combine will
10216
   try to recognize them and all they will do is make the combine
10217
   attempt fail.
10218
 
10219
   If for some reason this cannot do its job, an rtx
10220
   (clobber (const_int 0)) is returned.
10221
   An insn containing that will not be recognized.  */
10222
 
10223
static rtx
10224
gen_lowpart_for_combine (enum machine_mode omode, rtx x)
10225
{
10226
  enum machine_mode imode = GET_MODE (x);
10227
  unsigned int osize = GET_MODE_SIZE (omode);
10228
  unsigned int isize = GET_MODE_SIZE (imode);
10229
  rtx result;
10230
 
10231
  if (omode == imode)
10232
    return x;
10233
 
10234
  /* Return identity if this is a CONST or symbolic reference.  */
10235
  if (omode == Pmode
10236
      && (GET_CODE (x) == CONST
10237
          || GET_CODE (x) == SYMBOL_REF
10238
          || GET_CODE (x) == LABEL_REF))
10239
    return x;
10240
 
10241
  /* We can only support MODE being wider than a word if X is a
10242
     constant integer or has a mode the same size.  */
10243
  if (GET_MODE_SIZE (omode) > UNITS_PER_WORD
10244
      && ! ((imode == VOIDmode
10245
             && (CONST_INT_P (x)
10246
                 || GET_CODE (x) == CONST_DOUBLE))
10247
            || isize == osize))
10248
    goto fail;
10249
 
10250
  /* X might be a paradoxical (subreg (mem)).  In that case, gen_lowpart
10251
     won't know what to do.  So we will strip off the SUBREG here and
10252
     process normally.  */
10253
  if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
10254
    {
10255
      x = SUBREG_REG (x);
10256
 
10257
      /* For use in case we fall down into the address adjustments
10258
         further below, we need to adjust the known mode and size of
10259
         x; imode and isize, since we just adjusted x.  */
10260
      imode = GET_MODE (x);
10261
 
10262
      if (imode == omode)
10263
        return x;
10264
 
10265
      isize = GET_MODE_SIZE (imode);
10266
    }
10267
 
10268
  result = gen_lowpart_common (omode, x);
10269
 
10270
  if (result)
10271
    return result;
10272
 
10273
  if (MEM_P (x))
10274
    {
10275
      int offset = 0;
10276
 
10277
      /* Refuse to work on a volatile memory ref or one with a mode-dependent
10278
         address.  */
10279
      if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
10280
        goto fail;
10281
 
10282
      /* If we want to refer to something bigger than the original memref,
10283
         generate a paradoxical subreg instead.  That will force a reload
10284
         of the original memref X.  */
10285
      if (isize < osize)
10286
        return gen_rtx_SUBREG (omode, x, 0);
10287
 
10288
      if (WORDS_BIG_ENDIAN)
10289
        offset = MAX (isize, UNITS_PER_WORD) - MAX (osize, UNITS_PER_WORD);
10290
 
10291
      /* Adjust the address so that the address-after-the-data is
10292
         unchanged.  */
10293
      if (BYTES_BIG_ENDIAN)
10294
        offset -= MIN (UNITS_PER_WORD, osize) - MIN (UNITS_PER_WORD, isize);
10295
 
10296
      return adjust_address_nv (x, omode, offset);
10297
    }
10298
 
10299
  /* If X is a comparison operator, rewrite it in a new mode.  This
10300
     probably won't match, but may allow further simplifications.  */
10301
  else if (COMPARISON_P (x))
10302
    return gen_rtx_fmt_ee (GET_CODE (x), omode, XEXP (x, 0), XEXP (x, 1));
10303
 
10304
  /* If we couldn't simplify X any other way, just enclose it in a
10305
     SUBREG.  Normally, this SUBREG won't match, but some patterns may
10306
     include an explicit SUBREG or we may simplify it further in combine.  */
10307
  else
10308
    {
10309
      int offset = 0;
10310
      rtx res;
10311
 
10312
      offset = subreg_lowpart_offset (omode, imode);
10313
      if (imode == VOIDmode)
10314
        {
10315
          imode = int_mode_for_mode (omode);
10316
          x = gen_lowpart_common (imode, x);
10317
          if (x == NULL)
10318
            goto fail;
10319
        }
10320
      res = simplify_gen_subreg (omode, x, imode, offset);
10321
      if (res)
10322
        return res;
10323
    }
10324
 
10325
 fail:
10326
  return gen_rtx_CLOBBER (omode, const0_rtx);
10327
}
10328
 
10329
/* Simplify a comparison between *POP0 and *POP1 where CODE is the
10330
   comparison code that will be tested.
10331
 
10332
   The result is a possibly different comparison code to use.  *POP0 and
10333
   *POP1 may be updated.
10334
 
10335
   It is possible that we might detect that a comparison is either always
10336
   true or always false.  However, we do not perform general constant
10337
   folding in combine, so this knowledge isn't useful.  Such tautologies
10338
   should have been detected earlier.  Hence we ignore all such cases.  */
10339
 
10340
static enum rtx_code
10341
simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
10342
{
10343
  rtx op0 = *pop0;
10344
  rtx op1 = *pop1;
10345
  rtx tem, tem1;
10346
  int i;
10347
  enum machine_mode mode, tmode;
10348
 
10349
  /* Try a few ways of applying the same transformation to both operands.  */
10350
  while (1)
10351
    {
10352
#ifndef WORD_REGISTER_OPERATIONS
10353
      /* The test below this one won't handle SIGN_EXTENDs on these machines,
10354
         so check specially.  */
10355
      if (code != GTU && code != GEU && code != LTU && code != LEU
10356
          && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
10357
          && GET_CODE (XEXP (op0, 0)) == ASHIFT
10358
          && GET_CODE (XEXP (op1, 0)) == ASHIFT
10359
          && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
10360
          && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
10361
          && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
10362
              == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
10363
          && CONST_INT_P (XEXP (op0, 1))
10364
          && XEXP (op0, 1) == XEXP (op1, 1)
10365
          && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
10366
          && XEXP (op0, 1) == XEXP (XEXP (op1, 0), 1)
10367
          && (INTVAL (XEXP (op0, 1))
10368
              == (GET_MODE_BITSIZE (GET_MODE (op0))
10369
                  - (GET_MODE_BITSIZE
10370
                     (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
10371
        {
10372
          op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
10373
          op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
10374
        }
10375
#endif
10376
 
10377
      /* If both operands are the same constant shift, see if we can ignore the
10378
         shift.  We can if the shift is a rotate or if the bits shifted out of
10379
         this shift are known to be zero for both inputs and if the type of
10380
         comparison is compatible with the shift.  */
10381
      if (GET_CODE (op0) == GET_CODE (op1)
10382
          && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
10383
          && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
10384
              || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
10385
                  && (code != GT && code != LT && code != GE && code != LE))
10386
              || (GET_CODE (op0) == ASHIFTRT
10387
                  && (code != GTU && code != LTU
10388
                      && code != GEU && code != LEU)))
10389
          && CONST_INT_P (XEXP (op0, 1))
10390
          && INTVAL (XEXP (op0, 1)) >= 0
10391
          && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
10392
          && XEXP (op0, 1) == XEXP (op1, 1))
10393
        {
10394
          enum machine_mode mode = GET_MODE (op0);
10395
          unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
10396
          int shift_count = INTVAL (XEXP (op0, 1));
10397
 
10398
          if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
10399
            mask &= (mask >> shift_count) << shift_count;
10400
          else if (GET_CODE (op0) == ASHIFT)
10401
            mask = (mask & (mask << shift_count)) >> shift_count;
10402
 
10403
          if ((nonzero_bits (XEXP (op0, 0), mode) & ~mask) == 0
10404
              && (nonzero_bits (XEXP (op1, 0), mode) & ~mask) == 0)
10405
            op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
10406
          else
10407
            break;
10408
        }
10409
 
10410
      /* If both operands are AND's of a paradoxical SUBREG by constant, the
10411
         SUBREGs are of the same mode, and, in both cases, the AND would
10412
         be redundant if the comparison was done in the narrower mode,
10413
         do the comparison in the narrower mode (e.g., we are AND'ing with 1
10414
         and the operand's possibly nonzero bits are 0xffffff01; in that case
10415
         if we only care about QImode, we don't need the AND).  This case
10416
         occurs if the output mode of an scc insn is not SImode and
10417
         STORE_FLAG_VALUE == 1 (e.g., the 386).
10418
 
10419
         Similarly, check for a case where the AND's are ZERO_EXTEND
10420
         operations from some narrower mode even though a SUBREG is not
10421
         present.  */
10422
 
10423
      else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
10424
               && CONST_INT_P (XEXP (op0, 1))
10425
               && CONST_INT_P (XEXP (op1, 1)))
10426
        {
10427
          rtx inner_op0 = XEXP (op0, 0);
10428
          rtx inner_op1 = XEXP (op1, 0);
10429
          HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
10430
          HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
10431
          int changed = 0;
10432
 
10433
          if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG
10434
              && (GET_MODE_SIZE (GET_MODE (inner_op0))
10435
                  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0))))
10436
              && (GET_MODE (SUBREG_REG (inner_op0))
10437
                  == GET_MODE (SUBREG_REG (inner_op1)))
10438
              && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (inner_op0)))
10439
                  <= HOST_BITS_PER_WIDE_INT)
10440
              && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0),
10441
                                             GET_MODE (SUBREG_REG (inner_op0)))))
10442
              && (0 == ((~c1) & nonzero_bits (SUBREG_REG (inner_op1),
10443
                                             GET_MODE (SUBREG_REG (inner_op1))))))
10444
            {
10445
              op0 = SUBREG_REG (inner_op0);
10446
              op1 = SUBREG_REG (inner_op1);
10447
 
10448
              /* The resulting comparison is always unsigned since we masked
10449
                 off the original sign bit.  */
10450
              code = unsigned_condition (code);
10451
 
10452
              changed = 1;
10453
            }
10454
 
10455
          else if (c0 == c1)
10456
            for (tmode = GET_CLASS_NARROWEST_MODE
10457
                 (GET_MODE_CLASS (GET_MODE (op0)));
10458
                 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
10459
              if ((unsigned HOST_WIDE_INT) c0 == GET_MODE_MASK (tmode))
10460
                {
10461
                  op0 = gen_lowpart (tmode, inner_op0);
10462
                  op1 = gen_lowpart (tmode, inner_op1);
10463
                  code = unsigned_condition (code);
10464
                  changed = 1;
10465
                  break;
10466
                }
10467
 
10468
          if (! changed)
10469
            break;
10470
        }
10471
 
10472
      /* If both operands are NOT, we can strip off the outer operation
10473
         and adjust the comparison code for swapped operands; similarly for
10474
         NEG, except that this must be an equality comparison.  */
10475
      else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
10476
               || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
10477
                   && (code == EQ || code == NE)))
10478
        op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
10479
 
10480
      else
10481
        break;
10482
    }
10483
 
10484
  /* If the first operand is a constant, swap the operands and adjust the
10485
     comparison code appropriately, but don't do this if the second operand
10486
     is already a constant integer.  */
10487
  if (swap_commutative_operands_p (op0, op1))
10488
    {
10489
      tem = op0, op0 = op1, op1 = tem;
10490
      code = swap_condition (code);
10491
    }
10492
 
10493
  /* We now enter a loop during which we will try to simplify the comparison.
10494
     For the most part, we only are concerned with comparisons with zero,
10495
     but some things may really be comparisons with zero but not start
10496
     out looking that way.  */
10497
 
10498
  while (CONST_INT_P (op1))
10499
    {
10500
      enum machine_mode mode = GET_MODE (op0);
10501
      unsigned int mode_width = GET_MODE_BITSIZE (mode);
10502
      unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
10503
      int equality_comparison_p;
10504
      int sign_bit_comparison_p;
10505
      int unsigned_comparison_p;
10506
      HOST_WIDE_INT const_op;
10507
 
10508
      /* We only want to handle integral modes.  This catches VOIDmode,
10509
         CCmode, and the floating-point modes.  An exception is that we
10510
         can handle VOIDmode if OP0 is a COMPARE or a comparison
10511
         operation.  */
10512
 
10513
      if (GET_MODE_CLASS (mode) != MODE_INT
10514
          && ! (mode == VOIDmode
10515
                && (GET_CODE (op0) == COMPARE || COMPARISON_P (op0))))
10516
        break;
10517
 
10518
      /* Get the constant we are comparing against and turn off all bits
10519
         not on in our mode.  */
10520
      const_op = INTVAL (op1);
10521
      if (mode != VOIDmode)
10522
        const_op = trunc_int_for_mode (const_op, mode);
10523
      op1 = GEN_INT (const_op);
10524
 
10525
      /* If we are comparing against a constant power of two and the value
10526
         being compared can only have that single bit nonzero (e.g., it was
10527
         `and'ed with that bit), we can replace this with a comparison
10528
         with zero.  */
10529
      if (const_op
10530
          && (code == EQ || code == NE || code == GE || code == GEU
10531
              || code == LT || code == LTU)
10532
          && mode_width <= HOST_BITS_PER_WIDE_INT
10533
          && exact_log2 (const_op) >= 0
10534
          && nonzero_bits (op0, mode) == (unsigned HOST_WIDE_INT) const_op)
10535
        {
10536
          code = (code == EQ || code == GE || code == GEU ? NE : EQ);
10537
          op1 = const0_rtx, const_op = 0;
10538
        }
10539
 
10540
      /* Similarly, if we are comparing a value known to be either -1 or
10541
 
10542
 
10543
      if (const_op == -1
10544
          && (code == EQ || code == NE || code == GT || code == LE
10545
              || code == GEU || code == LTU)
10546
          && num_sign_bit_copies (op0, mode) == mode_width)
10547
        {
10548
          code = (code == EQ || code == LE || code == GEU ? NE : EQ);
10549
          op1 = const0_rtx, const_op = 0;
10550
        }
10551
 
10552
      /* Do some canonicalizations based on the comparison code.  We prefer
10553
         comparisons against zero and then prefer equality comparisons.
10554
         If we can reduce the size of a constant, we will do that too.  */
10555
 
10556
      switch (code)
10557
        {
10558
        case LT:
10559
          /* < C is equivalent to <= (C - 1) */
10560
          if (const_op > 0)
10561
            {
10562
              const_op -= 1;
10563
              op1 = GEN_INT (const_op);
10564
              code = LE;
10565
              /* ... fall through to LE case below.  */
10566
            }
10567
          else
10568
            break;
10569
 
10570
        case LE:
10571
          /* <= C is equivalent to < (C + 1); we do this for C < 0  */
10572
          if (const_op < 0)
10573
            {
10574
              const_op += 1;
10575
              op1 = GEN_INT (const_op);
10576
              code = LT;
10577
            }
10578
 
10579
          /* If we are doing a <= 0 comparison on a value known to have
10580
             a zero sign bit, we can replace this with == 0.  */
10581
          else if (const_op == 0
10582
                   && mode_width <= HOST_BITS_PER_WIDE_INT
10583
                   && (nonzero_bits (op0, mode)
10584
                       & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
10585
            code = EQ;
10586
          break;
10587
 
10588
        case GE:
10589
          /* >= C is equivalent to > (C - 1).  */
10590
          if (const_op > 0)
10591
            {
10592
              const_op -= 1;
10593
              op1 = GEN_INT (const_op);
10594
              code = GT;
10595
              /* ... fall through to GT below.  */
10596
            }
10597
          else
10598
            break;
10599
 
10600
        case GT:
10601
          /* > C is equivalent to >= (C + 1); we do this for C < 0.  */
10602
          if (const_op < 0)
10603
            {
10604
              const_op += 1;
10605
              op1 = GEN_INT (const_op);
10606
              code = GE;
10607
            }
10608
 
10609
          /* If we are doing a > 0 comparison on a value known to have
10610
             a zero sign bit, we can replace this with != 0.  */
10611
          else if (const_op == 0
10612
                   && mode_width <= HOST_BITS_PER_WIDE_INT
10613
                   && (nonzero_bits (op0, mode)
10614
                       & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
10615
            code = NE;
10616
          break;
10617
 
10618
        case LTU:
10619
          /* < C is equivalent to <= (C - 1).  */
10620
          if (const_op > 0)
10621
            {
10622
              const_op -= 1;
10623
              op1 = GEN_INT (const_op);
10624
              code = LEU;
10625
              /* ... fall through ...  */
10626
            }
10627
 
10628
          /* (unsigned) < 0x80000000 is equivalent to >= 0.  */
10629
          else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10630
                   && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
10631
            {
10632
              const_op = 0, op1 = const0_rtx;
10633
              code = GE;
10634
              break;
10635
            }
10636
          else
10637
            break;
10638
 
10639
        case LEU:
10640
          /* unsigned <= 0 is equivalent to == 0 */
10641
          if (const_op == 0)
10642
            code = EQ;
10643
 
10644
          /* (unsigned) <= 0x7fffffff is equivalent to >= 0.  */
10645
          else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10646
                   && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
10647
            {
10648
              const_op = 0, op1 = const0_rtx;
10649
              code = GE;
10650
            }
10651
          break;
10652
 
10653
        case GEU:
10654
          /* >= C is equivalent to > (C - 1).  */
10655
          if (const_op > 1)
10656
            {
10657
              const_op -= 1;
10658
              op1 = GEN_INT (const_op);
10659
              code = GTU;
10660
              /* ... fall through ...  */
10661
            }
10662
 
10663
          /* (unsigned) >= 0x80000000 is equivalent to < 0.  */
10664
          else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10665
                   && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
10666
            {
10667
              const_op = 0, op1 = const0_rtx;
10668
              code = LT;
10669
              break;
10670
            }
10671
          else
10672
            break;
10673
 
10674
        case GTU:
10675
          /* unsigned > 0 is equivalent to != 0 */
10676
          if (const_op == 0)
10677
            code = NE;
10678
 
10679
          /* (unsigned) > 0x7fffffff is equivalent to < 0.  */
10680
          else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10681
                   && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
10682
            {
10683
              const_op = 0, op1 = const0_rtx;
10684
              code = LT;
10685
            }
10686
          break;
10687
 
10688
        default:
10689
          break;
10690
        }
10691
 
10692
      /* Compute some predicates to simplify code below.  */
10693
 
10694
      equality_comparison_p = (code == EQ || code == NE);
10695
      sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
10696
      unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
10697
                               || code == GEU);
10698
 
10699
      /* If this is a sign bit comparison and we can do arithmetic in
10700
         MODE, say that we will only be needing the sign bit of OP0.  */
10701
      if (sign_bit_comparison_p
10702
          && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10703
        op0 = force_to_mode (op0, mode,
10704
                             ((HOST_WIDE_INT) 1
10705
                              << (GET_MODE_BITSIZE (mode) - 1)),
10706
                             0);
10707
 
10708
      /* Now try cases based on the opcode of OP0.  If none of the cases
10709
         does a "continue", we exit this loop immediately after the
10710
         switch.  */
10711
 
10712
      switch (GET_CODE (op0))
10713
        {
10714
        case ZERO_EXTRACT:
10715
          /* If we are extracting a single bit from a variable position in
10716
             a constant that has only a single bit set and are comparing it
10717
             with zero, we can convert this into an equality comparison
10718
             between the position and the location of the single bit.  */
10719
          /* Except we can't if SHIFT_COUNT_TRUNCATED is set, since we might
10720
             have already reduced the shift count modulo the word size.  */
10721
          if (!SHIFT_COUNT_TRUNCATED
10722
              && CONST_INT_P (XEXP (op0, 0))
10723
              && XEXP (op0, 1) == const1_rtx
10724
              && equality_comparison_p && const_op == 0
10725
              && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
10726
            {
10727
              if (BITS_BIG_ENDIAN)
10728
                {
10729
                  enum machine_mode new_mode
10730
                    = mode_for_extraction (EP_extzv, 1);
10731
                  if (new_mode == MAX_MACHINE_MODE)
10732
                    i = BITS_PER_WORD - 1 - i;
10733
                  else
10734
                    {
10735
                      mode = new_mode;
10736
                      i = (GET_MODE_BITSIZE (mode) - 1 - i);
10737
                    }
10738
                }
10739
 
10740
              op0 = XEXP (op0, 2);
10741
              op1 = GEN_INT (i);
10742
              const_op = i;
10743
 
10744
              /* Result is nonzero iff shift count is equal to I.  */
10745
              code = reverse_condition (code);
10746
              continue;
10747
            }
10748
 
10749
          /* ... fall through ...  */
10750
 
10751
        case SIGN_EXTRACT:
10752
          tem = expand_compound_operation (op0);
10753
          if (tem != op0)
10754
            {
10755
              op0 = tem;
10756
              continue;
10757
            }
10758
          break;
10759
 
10760
        case NOT:
10761
          /* If testing for equality, we can take the NOT of the constant.  */
10762
          if (equality_comparison_p
10763
              && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
10764
            {
10765
              op0 = XEXP (op0, 0);
10766
              op1 = tem;
10767
              continue;
10768
            }
10769
 
10770
          /* If just looking at the sign bit, reverse the sense of the
10771
             comparison.  */
10772
          if (sign_bit_comparison_p)
10773
            {
10774
              op0 = XEXP (op0, 0);
10775
              code = (code == GE ? LT : GE);
10776
              continue;
10777
            }
10778
          break;
10779
 
10780
        case NEG:
10781
          /* If testing for equality, we can take the NEG of the constant.  */
10782
          if (equality_comparison_p
10783
              && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
10784
            {
10785
              op0 = XEXP (op0, 0);
10786
              op1 = tem;
10787
              continue;
10788
            }
10789
 
10790
          /* The remaining cases only apply to comparisons with zero.  */
10791
          if (const_op != 0)
10792
            break;
10793
 
10794
          /* When X is ABS or is known positive,
10795
             (neg X) is < 0 if and only if X != 0.  */
10796
 
10797
          if (sign_bit_comparison_p
10798
              && (GET_CODE (XEXP (op0, 0)) == ABS
10799
                  || (mode_width <= HOST_BITS_PER_WIDE_INT
10800
                      && (nonzero_bits (XEXP (op0, 0), mode)
10801
                          & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
10802
            {
10803
              op0 = XEXP (op0, 0);
10804
              code = (code == LT ? NE : EQ);
10805
              continue;
10806
            }
10807
 
10808
          /* If we have NEG of something whose two high-order bits are the
10809
             same, we know that "(-a) < 0" is equivalent to "a > 0".  */
10810
          if (num_sign_bit_copies (op0, mode) >= 2)
10811
            {
10812
              op0 = XEXP (op0, 0);
10813
              code = swap_condition (code);
10814
              continue;
10815
            }
10816
          break;
10817
 
10818
        case ROTATE:
10819
          /* If we are testing equality and our count is a constant, we
10820
             can perform the inverse operation on our RHS.  */
10821
          if (equality_comparison_p && CONST_INT_P (XEXP (op0, 1))
10822
              && (tem = simplify_binary_operation (ROTATERT, mode,
10823
                                                   op1, XEXP (op0, 1))) != 0)
10824
            {
10825
              op0 = XEXP (op0, 0);
10826
              op1 = tem;
10827
              continue;
10828
            }
10829
 
10830
          /* If we are doing a < 0 or >= 0 comparison, it means we are testing
10831
             a particular bit.  Convert it to an AND of a constant of that
10832
             bit.  This will be converted into a ZERO_EXTRACT.  */
10833
          if (const_op == 0 && sign_bit_comparison_p
10834
              && CONST_INT_P (XEXP (op0, 1))
10835
              && mode_width <= HOST_BITS_PER_WIDE_INT)
10836
            {
10837
              op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10838
                                            ((HOST_WIDE_INT) 1
10839
                                             << (mode_width - 1
10840
                                                 - INTVAL (XEXP (op0, 1)))));
10841
              code = (code == LT ? NE : EQ);
10842
              continue;
10843
            }
10844
 
10845
          /* Fall through.  */
10846
 
10847
        case ABS:
10848
          /* ABS is ignorable inside an equality comparison with zero.  */
10849
          if (const_op == 0 && equality_comparison_p)
10850
            {
10851
              op0 = XEXP (op0, 0);
10852
              continue;
10853
            }
10854
          break;
10855
 
10856
        case SIGN_EXTEND:
10857
          /* Can simplify (compare (zero/sign_extend FOO) CONST) to
10858
             (compare FOO CONST) if CONST fits in FOO's mode and we
10859
             are either testing inequality or have an unsigned
10860
             comparison with ZERO_EXTEND or a signed comparison with
10861
             SIGN_EXTEND.  But don't do it if we don't have a compare
10862
             insn of the given mode, since we'd have to revert it
10863
             later on, and then we wouldn't know whether to sign- or
10864
             zero-extend.  */
10865
          mode = GET_MODE (XEXP (op0, 0));
10866
          if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
10867
              && ! unsigned_comparison_p
10868
              && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10869
              && ((unsigned HOST_WIDE_INT) const_op
10870
                  < (((unsigned HOST_WIDE_INT) 1
10871
                      << (GET_MODE_BITSIZE (mode) - 1))))
10872
              && have_insn_for (COMPARE, mode))
10873
            {
10874
              op0 = XEXP (op0, 0);
10875
              continue;
10876
            }
10877
          break;
10878
 
10879
        case SUBREG:
10880
          /* Check for the case where we are comparing A - C1 with C2, that is
10881
 
10882
               (subreg:MODE (plus (A) (-C1))) op (C2)
10883
 
10884
             with C1 a constant, and try to lift the SUBREG, i.e. to do the
10885
             comparison in the wider mode.  One of the following two conditions
10886
             must be true in order for this to be valid:
10887
 
10888
               1. The mode extension results in the same bit pattern being added
10889
                  on both sides and the comparison is equality or unsigned.  As
10890
                  C2 has been truncated to fit in MODE, the pattern can only be
10891
                  all 0s or all 1s.
10892
 
10893
               2. The mode extension results in the sign bit being copied on
10894
                  each side.
10895
 
10896
             The difficulty here is that we have predicates for A but not for
10897
             (A - C1) so we need to check that C1 is within proper bounds so
10898
             as to perturbate A as little as possible.  */
10899
 
10900
          if (mode_width <= HOST_BITS_PER_WIDE_INT
10901
              && subreg_lowpart_p (op0)
10902
              && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) > mode_width
10903
              && GET_CODE (SUBREG_REG (op0)) == PLUS
10904
              && CONST_INT_P (XEXP (SUBREG_REG (op0), 1)))
10905
            {
10906
              enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
10907
              rtx a = XEXP (SUBREG_REG (op0), 0);
10908
              HOST_WIDE_INT c1 = -INTVAL (XEXP (SUBREG_REG (op0), 1));
10909
 
10910
              if ((c1 > 0
10911
                   && (unsigned HOST_WIDE_INT) c1
10912
                       < (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)
10913
                   && (equality_comparison_p || unsigned_comparison_p)
10914
                   /* (A - C1) zero-extends if it is positive and sign-extends
10915
                      if it is negative, C2 both zero- and sign-extends.  */
10916
                   && ((0 == (nonzero_bits (a, inner_mode)
10917
                              & ~GET_MODE_MASK (mode))
10918
                        && const_op >= 0)
10919
                       /* (A - C1) sign-extends if it is positive and 1-extends
10920
                          if it is negative, C2 both sign- and 1-extends.  */
10921
                       || (num_sign_bit_copies (a, inner_mode)
10922
                           > (unsigned int) (GET_MODE_BITSIZE (inner_mode)
10923
                                             - mode_width)
10924
                           && const_op < 0)))
10925
                  || ((unsigned HOST_WIDE_INT) c1
10926
                       < (unsigned HOST_WIDE_INT) 1 << (mode_width - 2)
10927
                      /* (A - C1) always sign-extends, like C2.  */
10928
                      && num_sign_bit_copies (a, inner_mode)
10929
                         > (unsigned int) (GET_MODE_BITSIZE (inner_mode)
10930
                                           - (mode_width - 1))))
10931
                {
10932
                  op0 = SUBREG_REG (op0);
10933
                  continue;
10934
                }
10935
            }
10936
 
10937
          /* If the inner mode is narrower and we are extracting the low part,
10938
             we can treat the SUBREG as if it were a ZERO_EXTEND.  */
10939
          if (subreg_lowpart_p (op0)
10940
              && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
10941
            /* Fall through */ ;
10942
          else
10943
            break;
10944
 
10945
          /* ... fall through ...  */
10946
 
10947
        case ZERO_EXTEND:
10948
          mode = GET_MODE (XEXP (op0, 0));
10949
          if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
10950
              && (unsigned_comparison_p || equality_comparison_p)
10951
              && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10952
              && ((unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode))
10953
              && have_insn_for (COMPARE, mode))
10954
            {
10955
              op0 = XEXP (op0, 0);
10956
              continue;
10957
            }
10958
          break;
10959
 
10960
        case PLUS:
10961
          /* (eq (plus X A) B) -> (eq X (minus B A)).  We can only do
10962
             this for equality comparisons due to pathological cases involving
10963
             overflows.  */
10964
          if (equality_comparison_p
10965
              && 0 != (tem = simplify_binary_operation (MINUS, mode,
10966
                                                        op1, XEXP (op0, 1))))
10967
            {
10968
              op0 = XEXP (op0, 0);
10969
              op1 = tem;
10970
              continue;
10971
            }
10972
 
10973
          /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0.  */
10974
          if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
10975
              && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
10976
            {
10977
              op0 = XEXP (XEXP (op0, 0), 0);
10978
              code = (code == LT ? EQ : NE);
10979
              continue;
10980
            }
10981
          break;
10982
 
10983
        case MINUS:
10984
          /* We used to optimize signed comparisons against zero, but that
10985
             was incorrect.  Unsigned comparisons against zero (GTU, LEU)
10986
             arrive here as equality comparisons, or (GEU, LTU) are
10987
             optimized away.  No need to special-case them.  */
10988
 
10989
          /* (eq (minus A B) C) -> (eq A (plus B C)) or
10990
             (eq B (minus A C)), whichever simplifies.  We can only do
10991
             this for equality comparisons due to pathological cases involving
10992
             overflows.  */
10993
          if (equality_comparison_p
10994
              && 0 != (tem = simplify_binary_operation (PLUS, mode,
10995
                                                        XEXP (op0, 1), op1)))
10996
            {
10997
              op0 = XEXP (op0, 0);
10998
              op1 = tem;
10999
              continue;
11000
            }
11001
 
11002
          if (equality_comparison_p
11003
              && 0 != (tem = simplify_binary_operation (MINUS, mode,
11004
                                                        XEXP (op0, 0), op1)))
11005
            {
11006
              op0 = XEXP (op0, 1);
11007
              op1 = tem;
11008
              continue;
11009
            }
11010
 
11011
          /* The sign bit of (minus (ashiftrt X C) X), where C is the number
11012
             of bits in X minus 1, is one iff X > 0.  */
11013
          if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
11014
              && CONST_INT_P (XEXP (XEXP (op0, 0), 1))
11015
              && (unsigned HOST_WIDE_INT) INTVAL (XEXP (XEXP (op0, 0), 1))
11016
                 == mode_width - 1
11017
              && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
11018
            {
11019
              op0 = XEXP (op0, 1);
11020
              code = (code == GE ? LE : GT);
11021
              continue;
11022
            }
11023
          break;
11024
 
11025
        case XOR:
11026
          /* (eq (xor A B) C) -> (eq A (xor B C)).  This is a simplification
11027
             if C is zero or B is a constant.  */
11028
          if (equality_comparison_p
11029
              && 0 != (tem = simplify_binary_operation (XOR, mode,
11030
                                                        XEXP (op0, 1), op1)))
11031
            {
11032
              op0 = XEXP (op0, 0);
11033
              op1 = tem;
11034
              continue;
11035
            }
11036
          break;
11037
 
11038
        case EQ:  case NE:
11039
        case UNEQ:  case LTGT:
11040
        case LT:  case LTU:  case UNLT:  case LE:  case LEU:  case UNLE:
11041
        case GT:  case GTU:  case UNGT:  case GE:  case GEU:  case UNGE:
11042
        case UNORDERED: case ORDERED:
11043
          /* We can't do anything if OP0 is a condition code value, rather
11044
             than an actual data value.  */
11045
          if (const_op != 0
11046
              || CC0_P (XEXP (op0, 0))
11047
              || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
11048
            break;
11049
 
11050
          /* Get the two operands being compared.  */
11051
          if (GET_CODE (XEXP (op0, 0)) == COMPARE)
11052
            tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
11053
          else
11054
            tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
11055
 
11056
          /* Check for the cases where we simply want the result of the
11057
             earlier test or the opposite of that result.  */
11058
          if (code == NE || code == EQ
11059
              || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
11060
                  && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
11061
                  && (STORE_FLAG_VALUE
11062
                      & (((HOST_WIDE_INT) 1
11063
                          << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
11064
                  && (code == LT || code == GE)))
11065
            {
11066
              enum rtx_code new_code;
11067
              if (code == LT || code == NE)
11068
                new_code = GET_CODE (op0);
11069
              else
11070
                new_code = reversed_comparison_code (op0, NULL);
11071
 
11072
              if (new_code != UNKNOWN)
11073
                {
11074
                  code = new_code;
11075
                  op0 = tem;
11076
                  op1 = tem1;
11077
                  continue;
11078
                }
11079
            }
11080
          break;
11081
 
11082
        case IOR:
11083
          /* The sign bit of (ior (plus X (const_int -1)) X) is nonzero
11084
             iff X <= 0.  */
11085
          if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
11086
              && XEXP (XEXP (op0, 0), 1) == constm1_rtx
11087
              && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
11088
            {
11089
              op0 = XEXP (op0, 1);
11090
              code = (code == GE ? GT : LE);
11091
              continue;
11092
            }
11093
          break;
11094
 
11095
        case AND:
11096
          /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1).  This
11097
             will be converted to a ZERO_EXTRACT later.  */
11098
          if (const_op == 0 && equality_comparison_p
11099
              && GET_CODE (XEXP (op0, 0)) == ASHIFT
11100
              && XEXP (XEXP (op0, 0), 0) == const1_rtx)
11101
            {
11102
              op0 = simplify_and_const_int
11103
                (NULL_RTX, mode, gen_rtx_LSHIFTRT (mode,
11104
                                                   XEXP (op0, 1),
11105
                                                   XEXP (XEXP (op0, 0), 1)),
11106
                 (HOST_WIDE_INT) 1);
11107
              continue;
11108
            }
11109
 
11110
          /* If we are comparing (and (lshiftrt X C1) C2) for equality with
11111
             zero and X is a comparison and C1 and C2 describe only bits set
11112
             in STORE_FLAG_VALUE, we can compare with X.  */
11113
          if (const_op == 0 && equality_comparison_p
11114
              && mode_width <= HOST_BITS_PER_WIDE_INT
11115
              && CONST_INT_P (XEXP (op0, 1))
11116
              && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
11117
              && CONST_INT_P (XEXP (XEXP (op0, 0), 1))
11118
              && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
11119
              && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
11120
            {
11121
              mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
11122
                      << INTVAL (XEXP (XEXP (op0, 0), 1)));
11123
              if ((~STORE_FLAG_VALUE & mask) == 0
11124
                  && (COMPARISON_P (XEXP (XEXP (op0, 0), 0))
11125
                      || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
11126
                          && COMPARISON_P (tem))))
11127
                {
11128
                  op0 = XEXP (XEXP (op0, 0), 0);
11129
                  continue;
11130
                }
11131
            }
11132
 
11133
          /* If we are doing an equality comparison of an AND of a bit equal
11134
             to the sign bit, replace this with a LT or GE comparison of
11135
             the underlying value.  */
11136
          if (equality_comparison_p
11137
              && const_op == 0
11138
              && CONST_INT_P (XEXP (op0, 1))
11139
              && mode_width <= HOST_BITS_PER_WIDE_INT
11140
              && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
11141
                  == (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)))
11142
            {
11143
              op0 = XEXP (op0, 0);
11144
              code = (code == EQ ? GE : LT);
11145
              continue;
11146
            }
11147
 
11148
          /* If this AND operation is really a ZERO_EXTEND from a narrower
11149
             mode, the constant fits within that mode, and this is either an
11150
             equality or unsigned comparison, try to do this comparison in
11151
             the narrower mode.
11152
 
11153
             Note that in:
11154
 
11155
             (ne:DI (and:DI (reg:DI 4) (const_int 0xffffffff)) (const_int 0))
11156
             -> (ne:DI (reg:SI 4) (const_int 0))
11157
 
11158
             unless TRULY_NOOP_TRUNCATION allows it or the register is
11159
             known to hold a value of the required mode the
11160
             transformation is invalid.  */
11161
          if ((equality_comparison_p || unsigned_comparison_p)
11162
              && CONST_INT_P (XEXP (op0, 1))
11163
              && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
11164
                                   & GET_MODE_MASK (mode))
11165
                                  + 1)) >= 0
11166
              && const_op >> i == 0
11167
              && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode
11168
              && (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (tmode),
11169
                                         GET_MODE_BITSIZE (GET_MODE (op0)))
11170
                  || (REG_P (XEXP (op0, 0))
11171
                      && reg_truncated_to_mode (tmode, XEXP (op0, 0)))))
11172
            {
11173
              op0 = gen_lowpart (tmode, XEXP (op0, 0));
11174
              continue;
11175
            }
11176
 
11177
          /* If this is (and:M1 (subreg:M2 X 0) (const_int C1)) where C1
11178
             fits in both M1 and M2 and the SUBREG is either paradoxical
11179
             or represents the low part, permute the SUBREG and the AND
11180
             and try again.  */
11181
          if (GET_CODE (XEXP (op0, 0)) == SUBREG)
11182
            {
11183
              unsigned HOST_WIDE_INT c1;
11184
              tmode = GET_MODE (SUBREG_REG (XEXP (op0, 0)));
11185
              /* Require an integral mode, to avoid creating something like
11186
                 (AND:SF ...).  */
11187
              if (SCALAR_INT_MODE_P (tmode)
11188
                  /* It is unsafe to commute the AND into the SUBREG if the
11189
                     SUBREG is paradoxical and WORD_REGISTER_OPERATIONS is
11190
                     not defined.  As originally written the upper bits
11191
                     have a defined value due to the AND operation.
11192
                     However, if we commute the AND inside the SUBREG then
11193
                     they no longer have defined values and the meaning of
11194
                     the code has been changed.  */
11195
                  && (0
11196
#ifdef WORD_REGISTER_OPERATIONS
11197
                      || (mode_width > GET_MODE_BITSIZE (tmode)
11198
                          && mode_width <= BITS_PER_WORD)
11199
#endif
11200
                      || (mode_width <= GET_MODE_BITSIZE (tmode)
11201
                          && subreg_lowpart_p (XEXP (op0, 0))))
11202
                  && CONST_INT_P (XEXP (op0, 1))
11203
                  && mode_width <= HOST_BITS_PER_WIDE_INT
11204
                  && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT
11205
                  && ((c1 = INTVAL (XEXP (op0, 1))) & ~mask) == 0
11206
                  && (c1 & ~GET_MODE_MASK (tmode)) == 0
11207
                  && c1 != mask
11208
                  && c1 != GET_MODE_MASK (tmode))
11209
                {
11210
                  op0 = simplify_gen_binary (AND, tmode,
11211
                                             SUBREG_REG (XEXP (op0, 0)),
11212
                                             gen_int_mode (c1, tmode));
11213
                  op0 = gen_lowpart (mode, op0);
11214
                  continue;
11215
                }
11216
            }
11217
 
11218
          /* Convert (ne (and (not X) 1) 0) to (eq (and X 1) 0).  */
11219
          if (const_op == 0 && equality_comparison_p
11220
              && XEXP (op0, 1) == const1_rtx
11221
              && GET_CODE (XEXP (op0, 0)) == NOT)
11222
            {
11223
              op0 = simplify_and_const_int
11224
                (NULL_RTX, mode, XEXP (XEXP (op0, 0), 0), (HOST_WIDE_INT) 1);
11225
              code = (code == NE ? EQ : NE);
11226
              continue;
11227
            }
11228
 
11229
          /* Convert (ne (and (lshiftrt (not X)) 1) 0) to
11230
             (eq (and (lshiftrt X) 1) 0).
11231
             Also handle the case where (not X) is expressed using xor.  */
11232
          if (const_op == 0 && equality_comparison_p
11233
              && XEXP (op0, 1) == const1_rtx
11234
              && GET_CODE (XEXP (op0, 0)) == LSHIFTRT)
11235
            {
11236
              rtx shift_op = XEXP (XEXP (op0, 0), 0);
11237
              rtx shift_count = XEXP (XEXP (op0, 0), 1);
11238
 
11239
              if (GET_CODE (shift_op) == NOT
11240
                  || (GET_CODE (shift_op) == XOR
11241
                      && CONST_INT_P (XEXP (shift_op, 1))
11242
                      && CONST_INT_P (shift_count)
11243
                      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
11244
                      && (INTVAL (XEXP (shift_op, 1))
11245
                          == (HOST_WIDE_INT) 1 << INTVAL (shift_count))))
11246
                {
11247
                  op0 = simplify_and_const_int
11248
                    (NULL_RTX, mode,
11249
                     gen_rtx_LSHIFTRT (mode, XEXP (shift_op, 0), shift_count),
11250
                     (HOST_WIDE_INT) 1);
11251
                  code = (code == NE ? EQ : NE);
11252
                  continue;
11253
                }
11254
            }
11255
          break;
11256
 
11257
        case ASHIFT:
11258
          /* If we have (compare (ashift FOO N) (const_int C)) and
11259
             the high order N bits of FOO (N+1 if an inequality comparison)
11260
             are known to be zero, we can do this by comparing FOO with C
11261
             shifted right N bits so long as the low-order N bits of C are
11262
             zero.  */
11263
          if (CONST_INT_P (XEXP (op0, 1))
11264
              && INTVAL (XEXP (op0, 1)) >= 0
11265
              && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
11266
                  < HOST_BITS_PER_WIDE_INT)
11267
              && ((const_op
11268
                   & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
11269
              && mode_width <= HOST_BITS_PER_WIDE_INT
11270
              && (nonzero_bits (XEXP (op0, 0), mode)
11271
                  & ~(mask >> (INTVAL (XEXP (op0, 1))
11272
                               + ! equality_comparison_p))) == 0)
11273
            {
11274
              /* We must perform a logical shift, not an arithmetic one,
11275
                 as we want the top N bits of C to be zero.  */
11276
              unsigned HOST_WIDE_INT temp = const_op & GET_MODE_MASK (mode);
11277
 
11278
              temp >>= INTVAL (XEXP (op0, 1));
11279
              op1 = gen_int_mode (temp, mode);
11280
              op0 = XEXP (op0, 0);
11281
              continue;
11282
            }
11283
 
11284
          /* If we are doing a sign bit comparison, it means we are testing
11285
             a particular bit.  Convert it to the appropriate AND.  */
11286
          if (sign_bit_comparison_p && CONST_INT_P (XEXP (op0, 1))
11287
              && mode_width <= HOST_BITS_PER_WIDE_INT)
11288
            {
11289
              op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
11290
                                            ((HOST_WIDE_INT) 1
11291
                                             << (mode_width - 1
11292
                                                 - INTVAL (XEXP (op0, 1)))));
11293
              code = (code == LT ? NE : EQ);
11294
              continue;
11295
            }
11296
 
11297
          /* If this an equality comparison with zero and we are shifting
11298
             the low bit to the sign bit, we can convert this to an AND of the
11299
             low-order bit.  */
11300
          if (const_op == 0 && equality_comparison_p
11301
              && CONST_INT_P (XEXP (op0, 1))
11302
              && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
11303
                 == mode_width - 1)
11304
            {
11305
              op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
11306
                                            (HOST_WIDE_INT) 1);
11307
              continue;
11308
            }
11309
          break;
11310
 
11311
        case ASHIFTRT:
11312
          /* If this is an equality comparison with zero, we can do this
11313
             as a logical shift, which might be much simpler.  */
11314
          if (equality_comparison_p && const_op == 0
11315
              && CONST_INT_P (XEXP (op0, 1)))
11316
            {
11317
              op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
11318
                                          XEXP (op0, 0),
11319
                                          INTVAL (XEXP (op0, 1)));
11320
              continue;
11321
            }
11322
 
11323
          /* If OP0 is a sign extension and CODE is not an unsigned comparison,
11324
             do the comparison in a narrower mode.  */
11325
          if (! unsigned_comparison_p
11326
              && CONST_INT_P (XEXP (op0, 1))
11327
              && GET_CODE (XEXP (op0, 0)) == ASHIFT
11328
              && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
11329
              && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
11330
                                         MODE_INT, 1)) != BLKmode
11331
              && (((unsigned HOST_WIDE_INT) const_op
11332
                   + (GET_MODE_MASK (tmode) >> 1) + 1)
11333
                  <= GET_MODE_MASK (tmode)))
11334
            {
11335
              op0 = gen_lowpart (tmode, XEXP (XEXP (op0, 0), 0));
11336
              continue;
11337
            }
11338
 
11339
          /* Likewise if OP0 is a PLUS of a sign extension with a
11340
             constant, which is usually represented with the PLUS
11341
             between the shifts.  */
11342
          if (! unsigned_comparison_p
11343
              && CONST_INT_P (XEXP (op0, 1))
11344
              && GET_CODE (XEXP (op0, 0)) == PLUS
11345
              && CONST_INT_P (XEXP (XEXP (op0, 0), 1))
11346
              && GET_CODE (XEXP (XEXP (op0, 0), 0)) == ASHIFT
11347
              && XEXP (op0, 1) == XEXP (XEXP (XEXP (op0, 0), 0), 1)
11348
              && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
11349
                                         MODE_INT, 1)) != BLKmode
11350
              && (((unsigned HOST_WIDE_INT) const_op
11351
                   + (GET_MODE_MASK (tmode) >> 1) + 1)
11352
                  <= GET_MODE_MASK (tmode)))
11353
            {
11354
              rtx inner = XEXP (XEXP (XEXP (op0, 0), 0), 0);
11355
              rtx add_const = XEXP (XEXP (op0, 0), 1);
11356
              rtx new_const = simplify_gen_binary (ASHIFTRT, GET_MODE (op0),
11357
                                                   add_const, XEXP (op0, 1));
11358
 
11359
              op0 = simplify_gen_binary (PLUS, tmode,
11360
                                         gen_lowpart (tmode, inner),
11361
                                         new_const);
11362
              continue;
11363
            }
11364
 
11365
          /* ... fall through ...  */
11366
        case LSHIFTRT:
11367
          /* If we have (compare (xshiftrt FOO N) (const_int C)) and
11368
             the low order N bits of FOO are known to be zero, we can do this
11369
             by comparing FOO with C shifted left N bits so long as no
11370
             overflow occurs.  */
11371
          if (CONST_INT_P (XEXP (op0, 1))
11372
              && INTVAL (XEXP (op0, 1)) >= 0
11373
              && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
11374
              && mode_width <= HOST_BITS_PER_WIDE_INT
11375
              && (nonzero_bits (XEXP (op0, 0), mode)
11376
                  & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
11377
              && (((unsigned HOST_WIDE_INT) const_op
11378
                   + (GET_CODE (op0) != LSHIFTRT
11379
                      ? ((GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1)) >> 1)
11380
                         + 1)
11381
                      : 0))
11382
                  <= GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1))))
11383
            {
11384
              /* If the shift was logical, then we must make the condition
11385
                 unsigned.  */
11386
              if (GET_CODE (op0) == LSHIFTRT)
11387
                code = unsigned_condition (code);
11388
 
11389
              const_op <<= INTVAL (XEXP (op0, 1));
11390
              op1 = GEN_INT (const_op);
11391
              op0 = XEXP (op0, 0);
11392
              continue;
11393
            }
11394
 
11395
          /* If we are using this shift to extract just the sign bit, we
11396
             can replace this with an LT or GE comparison.  */
11397
          if (const_op == 0
11398
              && (equality_comparison_p || sign_bit_comparison_p)
11399
              && CONST_INT_P (XEXP (op0, 1))
11400
              && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
11401
                 == mode_width - 1)
11402
            {
11403
              op0 = XEXP (op0, 0);
11404
              code = (code == NE || code == GT ? LT : GE);
11405
              continue;
11406
            }
11407
          break;
11408
 
11409
        default:
11410
          break;
11411
        }
11412
 
11413
      break;
11414
    }
11415
 
11416
  /* Now make any compound operations involved in this comparison.  Then,
11417
     check for an outmost SUBREG on OP0 that is not doing anything or is
11418
     paradoxical.  The latter transformation must only be performed when
11419
     it is known that the "extra" bits will be the same in op0 and op1 or
11420
     that they don't matter.  There are three cases to consider:
11421
 
11422
     1. SUBREG_REG (op0) is a register.  In this case the bits are don't
11423
     care bits and we can assume they have any convenient value.  So
11424
     making the transformation is safe.
11425
 
11426
     2. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is not defined.
11427
     In this case the upper bits of op0 are undefined.  We should not make
11428
     the simplification in that case as we do not know the contents of
11429
     those bits.
11430
 
11431
     3. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is defined and not
11432
     UNKNOWN.  In that case we know those bits are zeros or ones.  We must
11433
     also be sure that they are the same as the upper bits of op1.
11434
 
11435
     We can never remove a SUBREG for a non-equality comparison because
11436
     the sign bit is in a different place in the underlying object.  */
11437
 
11438
  op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
11439
  op1 = make_compound_operation (op1, SET);
11440
 
11441
  if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
11442
      && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
11443
      && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op0))) == MODE_INT
11444
      && (code == NE || code == EQ))
11445
    {
11446
      if (GET_MODE_SIZE (GET_MODE (op0))
11447
          > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))
11448
        {
11449
          /* For paradoxical subregs, allow case 1 as above.  Case 3 isn't
11450
             implemented.  */
11451
          if (REG_P (SUBREG_REG (op0)))
11452
            {
11453
              op0 = SUBREG_REG (op0);
11454
              op1 = gen_lowpart (GET_MODE (op0), op1);
11455
            }
11456
        }
11457
      else if ((GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
11458
                <= HOST_BITS_PER_WIDE_INT)
11459
               && (nonzero_bits (SUBREG_REG (op0),
11460
                                 GET_MODE (SUBREG_REG (op0)))
11461
                   & ~GET_MODE_MASK (GET_MODE (op0))) == 0)
11462
        {
11463
          tem = gen_lowpart (GET_MODE (SUBREG_REG (op0)), op1);
11464
 
11465
          if ((nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
11466
               & ~GET_MODE_MASK (GET_MODE (op0))) == 0)
11467
            op0 = SUBREG_REG (op0), op1 = tem;
11468
        }
11469
    }
11470
 
11471
  /* We now do the opposite procedure: Some machines don't have compare
11472
     insns in all modes.  If OP0's mode is an integer mode smaller than a
11473
     word and we can't do a compare in that mode, see if there is a larger
11474
     mode for which we can do the compare.  There are a number of cases in
11475
     which we can use the wider mode.  */
11476
 
11477
  mode = GET_MODE (op0);
11478
  if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
11479
      && GET_MODE_SIZE (mode) < UNITS_PER_WORD
11480
      && ! have_insn_for (COMPARE, mode))
11481
    for (tmode = GET_MODE_WIDER_MODE (mode);
11482
         (tmode != VOIDmode
11483
          && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
11484
         tmode = GET_MODE_WIDER_MODE (tmode))
11485
      if (have_insn_for (COMPARE, tmode))
11486
        {
11487
          int zero_extended;
11488
 
11489
          /* If this is a test for negative, we can make an explicit
11490
             test of the sign bit.  Test this first so we can use
11491
             a paradoxical subreg to extend OP0.  */
11492
 
11493
          if (op1 == const0_rtx && (code == LT || code == GE)
11494
              && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
11495
            {
11496
              op0 = simplify_gen_binary (AND, tmode,
11497
                                         gen_lowpart (tmode, op0),
11498
                                         GEN_INT ((HOST_WIDE_INT) 1
11499
                                                  << (GET_MODE_BITSIZE (mode)
11500
                                                      - 1)));
11501
              code = (code == LT) ? NE : EQ;
11502
              break;
11503
            }
11504
 
11505
          /* If the only nonzero bits in OP0 and OP1 are those in the
11506
             narrower mode and this is an equality or unsigned comparison,
11507
             we can use the wider mode.  Similarly for sign-extended
11508
             values, in which case it is true for all comparisons.  */
11509
          zero_extended = ((code == EQ || code == NE
11510
                            || code == GEU || code == GTU
11511
                            || code == LEU || code == LTU)
11512
                           && (nonzero_bits (op0, tmode)
11513
                               & ~GET_MODE_MASK (mode)) == 0
11514
                           && ((CONST_INT_P (op1)
11515
                                || (nonzero_bits (op1, tmode)
11516
                                    & ~GET_MODE_MASK (mode)) == 0)));
11517
 
11518
          if (zero_extended
11519
              || ((num_sign_bit_copies (op0, tmode)
11520
                   > (unsigned int) (GET_MODE_BITSIZE (tmode)
11521
                                     - GET_MODE_BITSIZE (mode)))
11522
                  && (num_sign_bit_copies (op1, tmode)
11523
                      > (unsigned int) (GET_MODE_BITSIZE (tmode)
11524
                                        - GET_MODE_BITSIZE (mode)))))
11525
            {
11526
              /* If OP0 is an AND and we don't have an AND in MODE either,
11527
                 make a new AND in the proper mode.  */
11528
              if (GET_CODE (op0) == AND
11529
                  && !have_insn_for (AND, mode))
11530
                op0 = simplify_gen_binary (AND, tmode,
11531
                                           gen_lowpart (tmode,
11532
                                                        XEXP (op0, 0)),
11533
                                           gen_lowpart (tmode,
11534
                                                        XEXP (op0, 1)));
11535
              else
11536
                {
11537
                  if (zero_extended)
11538
                    {
11539
                      op0 = simplify_gen_unary (ZERO_EXTEND, tmode, op0, mode);
11540
                      op1 = simplify_gen_unary (ZERO_EXTEND, tmode, op1, mode);
11541
                    }
11542
                  else
11543
                    {
11544
                      op0 = simplify_gen_unary (SIGN_EXTEND, tmode, op0, mode);
11545
                      op1 = simplify_gen_unary (SIGN_EXTEND, tmode, op1, mode);
11546
                    }
11547
                  break;
11548
                }
11549
            }
11550
        }
11551
 
11552
#ifdef CANONICALIZE_COMPARISON
11553
  /* If this machine only supports a subset of valid comparisons, see if we
11554
     can convert an unsupported one into a supported one.  */
11555
  CANONICALIZE_COMPARISON (code, op0, op1);
11556
#endif
11557
 
11558
  *pop0 = op0;
11559
  *pop1 = op1;
11560
 
11561
  return code;
11562
}
11563
 
11564
/* Utility function for record_value_for_reg.  Count number of
11565
   rtxs in X.  */
11566
static int
11567
count_rtxs (rtx x)
11568
{
11569
  enum rtx_code code = GET_CODE (x);
11570
  const char *fmt;
11571
  int i, j, ret = 1;
11572
 
11573
  if (GET_RTX_CLASS (code) == '2'
11574
      || GET_RTX_CLASS (code) == 'c')
11575
    {
11576
      rtx x0 = XEXP (x, 0);
11577
      rtx x1 = XEXP (x, 1);
11578
 
11579
      if (x0 == x1)
11580
        return 1 + 2 * count_rtxs (x0);
11581
 
11582
      if ((GET_RTX_CLASS (GET_CODE (x1)) == '2'
11583
           || GET_RTX_CLASS (GET_CODE (x1)) == 'c')
11584
          && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
11585
        return 2 + 2 * count_rtxs (x0)
11586
               + count_rtxs (x == XEXP (x1, 0)
11587
                             ? XEXP (x1, 1) : XEXP (x1, 0));
11588
 
11589
      if ((GET_RTX_CLASS (GET_CODE (x0)) == '2'
11590
           || GET_RTX_CLASS (GET_CODE (x0)) == 'c')
11591
          && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
11592
        return 2 + 2 * count_rtxs (x1)
11593
               + count_rtxs (x == XEXP (x0, 0)
11594
                             ? XEXP (x0, 1) : XEXP (x0, 0));
11595
    }
11596
 
11597
  fmt = GET_RTX_FORMAT (code);
11598
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11599
    if (fmt[i] == 'e')
11600
      ret += count_rtxs (XEXP (x, i));
11601
    else if (fmt[i] == 'E')
11602
      for (j = 0; j < XVECLEN (x, i); j++)
11603
        ret += count_rtxs (XVECEXP (x, i, j));
11604
 
11605
  return ret;
11606
}
11607
 
11608
/* Utility function for following routine.  Called when X is part of a value
11609
   being stored into last_set_value.  Sets last_set_table_tick
11610
   for each register mentioned.  Similar to mention_regs in cse.c  */
11611
 
11612
static void
11613
update_table_tick (rtx x)
11614
{
11615
  enum rtx_code code = GET_CODE (x);
11616
  const char *fmt = GET_RTX_FORMAT (code);
11617
  int i, j;
11618
 
11619
  if (code == REG)
11620
    {
11621
      unsigned int regno = REGNO (x);
11622
      unsigned int endregno = END_REGNO (x);
11623
      unsigned int r;
11624
 
11625
      for (r = regno; r < endregno; r++)
11626
        {
11627
          reg_stat_type *rsp = VEC_index (reg_stat_type, reg_stat, r);
11628
          rsp->last_set_table_tick = label_tick;
11629
        }
11630
 
11631
      return;
11632
    }
11633
 
11634
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11635
    if (fmt[i] == 'e')
11636
      {
11637
        /* Check for identical subexpressions.  If x contains
11638
           identical subexpression we only have to traverse one of
11639
           them.  */
11640
        if (i == 0 && ARITHMETIC_P (x))
11641
          {
11642
            /* Note that at this point x1 has already been
11643
               processed.  */
11644
            rtx x0 = XEXP (x, 0);
11645
            rtx x1 = XEXP (x, 1);
11646
 
11647
            /* If x0 and x1 are identical then there is no need to
11648
               process x0.  */
11649
            if (x0 == x1)
11650
              break;
11651
 
11652
            /* If x0 is identical to a subexpression of x1 then while
11653
               processing x1, x0 has already been processed.  Thus we
11654
               are done with x.  */
11655
            if (ARITHMETIC_P (x1)
11656
                && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
11657
              break;
11658
 
11659
            /* If x1 is identical to a subexpression of x0 then we
11660
               still have to process the rest of x0.  */
11661
            if (ARITHMETIC_P (x0)
11662
                && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
11663
              {
11664
                update_table_tick (XEXP (x0, x1 == XEXP (x0, 0) ? 1 : 0));
11665
                break;
11666
              }
11667
          }
11668
 
11669
        update_table_tick (XEXP (x, i));
11670
      }
11671
    else if (fmt[i] == 'E')
11672
      for (j = 0; j < XVECLEN (x, i); j++)
11673
        update_table_tick (XVECEXP (x, i, j));
11674
}
11675
 
11676
/* Record that REG is set to VALUE in insn INSN.  If VALUE is zero, we
11677
   are saying that the register is clobbered and we no longer know its
11678
   value.  If INSN is zero, don't update reg_stat[].last_set; this is
11679
   only permitted with VALUE also zero and is used to invalidate the
11680
   register.  */
11681
 
11682
static void
11683
record_value_for_reg (rtx reg, rtx insn, rtx value)
11684
{
11685
  unsigned int regno = REGNO (reg);
11686
  unsigned int endregno = END_REGNO (reg);
11687
  unsigned int i;
11688
  reg_stat_type *rsp;
11689
 
11690
  /* If VALUE contains REG and we have a previous value for REG, substitute
11691
     the previous value.  */
11692
  if (value && insn && reg_overlap_mentioned_p (reg, value))
11693
    {
11694
      rtx tem;
11695
 
11696
      /* Set things up so get_last_value is allowed to see anything set up to
11697
         our insn.  */
11698
      subst_low_luid = DF_INSN_LUID (insn);
11699
      tem = get_last_value (reg);
11700
 
11701
      /* If TEM is simply a binary operation with two CLOBBERs as operands,
11702
         it isn't going to be useful and will take a lot of time to process,
11703
         so just use the CLOBBER.  */
11704
 
11705
      if (tem)
11706
        {
11707
          if (ARITHMETIC_P (tem)
11708
              && GET_CODE (XEXP (tem, 0)) == CLOBBER
11709
              && GET_CODE (XEXP (tem, 1)) == CLOBBER)
11710
            tem = XEXP (tem, 0);
11711
          else if (count_occurrences (value, reg, 1) >= 2)
11712
            {
11713
              /* If there are two or more occurrences of REG in VALUE,
11714
                 prevent the value from growing too much.  */
11715
              if (count_rtxs (tem) > MAX_LAST_VALUE_RTL)
11716
                tem = gen_rtx_CLOBBER (GET_MODE (tem), const0_rtx);
11717
            }
11718
 
11719
          value = replace_rtx (copy_rtx (value), reg, tem);
11720
        }
11721
    }
11722
 
11723
  /* For each register modified, show we don't know its value, that
11724
     we don't know about its bitwise content, that its value has been
11725
     updated, and that we don't know the location of the death of the
11726
     register.  */
11727
  for (i = regno; i < endregno; i++)
11728
    {
11729
      rsp = VEC_index (reg_stat_type, reg_stat, i);
11730
 
11731
      if (insn)
11732
        rsp->last_set = insn;
11733
 
11734
      rsp->last_set_value = 0;
11735
      rsp->last_set_mode = VOIDmode;
11736
      rsp->last_set_nonzero_bits = 0;
11737
      rsp->last_set_sign_bit_copies = 0;
11738
      rsp->last_death = 0;
11739
      rsp->truncated_to_mode = VOIDmode;
11740
    }
11741
 
11742
  /* Mark registers that are being referenced in this value.  */
11743
  if (value)
11744
    update_table_tick (value);
11745
 
11746
  /* Now update the status of each register being set.
11747
     If someone is using this register in this block, set this register
11748
     to invalid since we will get confused between the two lives in this
11749
     basic block.  This makes using this register always invalid.  In cse, we
11750
     scan the table to invalidate all entries using this register, but this
11751
     is too much work for us.  */
11752
 
11753
  for (i = regno; i < endregno; i++)
11754
    {
11755
      rsp = VEC_index (reg_stat_type, reg_stat, i);
11756
      rsp->last_set_label = label_tick;
11757
      if (!insn
11758
          || (value && rsp->last_set_table_tick >= label_tick_ebb_start))
11759
        rsp->last_set_invalid = 1;
11760
      else
11761
        rsp->last_set_invalid = 0;
11762
    }
11763
 
11764
  /* The value being assigned might refer to X (like in "x++;").  In that
11765
     case, we must replace it with (clobber (const_int 0)) to prevent
11766
     infinite loops.  */
11767
  rsp = VEC_index (reg_stat_type, reg_stat, regno);
11768
  if (value && !get_last_value_validate (&value, insn, label_tick, 0))
11769
    {
11770
      value = copy_rtx (value);
11771
      if (!get_last_value_validate (&value, insn, label_tick, 1))
11772
        value = 0;
11773
    }
11774
 
11775
  /* For the main register being modified, update the value, the mode, the
11776
     nonzero bits, and the number of sign bit copies.  */
11777
 
11778
  rsp->last_set_value = value;
11779
 
11780
  if (value)
11781
    {
11782
      enum machine_mode mode = GET_MODE (reg);
11783
      subst_low_luid = DF_INSN_LUID (insn);
11784
      rsp->last_set_mode = mode;
11785
      if (GET_MODE_CLASS (mode) == MODE_INT
11786
          && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
11787
        mode = nonzero_bits_mode;
11788
      rsp->last_set_nonzero_bits = nonzero_bits (value, mode);
11789
      rsp->last_set_sign_bit_copies
11790
        = num_sign_bit_copies (value, GET_MODE (reg));
11791
    }
11792
}
11793
 
11794
/* Called via note_stores from record_dead_and_set_regs to handle one
11795
   SET or CLOBBER in an insn.  DATA is the instruction in which the
11796
   set is occurring.  */
11797
 
11798
static void
11799
record_dead_and_set_regs_1 (rtx dest, const_rtx setter, void *data)
11800
{
11801
  rtx record_dead_insn = (rtx) data;
11802
 
11803
  if (GET_CODE (dest) == SUBREG)
11804
    dest = SUBREG_REG (dest);
11805
 
11806
  if (!record_dead_insn)
11807
    {
11808
      if (REG_P (dest))
11809
        record_value_for_reg (dest, NULL_RTX, NULL_RTX);
11810
      return;
11811
    }
11812
 
11813
  if (REG_P (dest))
11814
    {
11815
      /* If we are setting the whole register, we know its value.  Otherwise
11816
         show that we don't know the value.  We can handle SUBREG in
11817
         some cases.  */
11818
      if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
11819
        record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
11820
      else if (GET_CODE (setter) == SET
11821
               && GET_CODE (SET_DEST (setter)) == SUBREG
11822
               && SUBREG_REG (SET_DEST (setter)) == dest
11823
               && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
11824
               && subreg_lowpart_p (SET_DEST (setter)))
11825
        record_value_for_reg (dest, record_dead_insn,
11826
                              gen_lowpart (GET_MODE (dest),
11827
                                                       SET_SRC (setter)));
11828
      else
11829
        record_value_for_reg (dest, record_dead_insn, NULL_RTX);
11830
    }
11831
  else if (MEM_P (dest)
11832
           /* Ignore pushes, they clobber nothing.  */
11833
           && ! push_operand (dest, GET_MODE (dest)))
11834
    mem_last_set = DF_INSN_LUID (record_dead_insn);
11835
}
11836
 
11837
/* Update the records of when each REG was most recently set or killed
11838
   for the things done by INSN.  This is the last thing done in processing
11839
   INSN in the combiner loop.
11840
 
11841
   We update reg_stat[], in particular fields last_set, last_set_value,
11842
   last_set_mode, last_set_nonzero_bits, last_set_sign_bit_copies,
11843
   last_death, and also the similar information mem_last_set (which insn
11844
   most recently modified memory) and last_call_luid (which insn was the
11845
   most recent subroutine call).  */
11846
 
11847
static void
11848
record_dead_and_set_regs (rtx insn)
11849
{
11850
  rtx link;
11851
  unsigned int i;
11852
 
11853
  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
11854
    {
11855
      if (REG_NOTE_KIND (link) == REG_DEAD
11856
          && REG_P (XEXP (link, 0)))
11857
        {
11858
          unsigned int regno = REGNO (XEXP (link, 0));
11859
          unsigned int endregno = END_REGNO (XEXP (link, 0));
11860
 
11861
          for (i = regno; i < endregno; i++)
11862
            {
11863
              reg_stat_type *rsp;
11864
 
11865
              rsp = VEC_index (reg_stat_type, reg_stat, i);
11866
              rsp->last_death = insn;
11867
            }
11868
        }
11869
      else if (REG_NOTE_KIND (link) == REG_INC)
11870
        record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
11871
    }
11872
 
11873
  if (CALL_P (insn))
11874
    {
11875
      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
11876
        if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
11877
          {
11878
            reg_stat_type *rsp;
11879
 
11880
            rsp = VEC_index (reg_stat_type, reg_stat, i);
11881
            rsp->last_set_invalid = 1;
11882
            rsp->last_set = insn;
11883
            rsp->last_set_value = 0;
11884
            rsp->last_set_mode = VOIDmode;
11885
            rsp->last_set_nonzero_bits = 0;
11886
            rsp->last_set_sign_bit_copies = 0;
11887
            rsp->last_death = 0;
11888
            rsp->truncated_to_mode = VOIDmode;
11889
          }
11890
 
11891
      last_call_luid = mem_last_set = DF_INSN_LUID (insn);
11892
 
11893
      /* We can't combine into a call pattern.  Remember, though, that
11894
         the return value register is set at this LUID.  We could
11895
         still replace a register with the return value from the
11896
         wrong subroutine call!  */
11897
      note_stores (PATTERN (insn), record_dead_and_set_regs_1, NULL_RTX);
11898
    }
11899
  else
11900
    note_stores (PATTERN (insn), record_dead_and_set_regs_1, insn);
11901
}
11902
 
11903
/* If a SUBREG has the promoted bit set, it is in fact a property of the
11904
   register present in the SUBREG, so for each such SUBREG go back and
11905
   adjust nonzero and sign bit information of the registers that are
11906
   known to have some zero/sign bits set.
11907
 
11908
   This is needed because when combine blows the SUBREGs away, the
11909
   information on zero/sign bits is lost and further combines can be
11910
   missed because of that.  */
11911
 
11912
static void
11913
record_promoted_value (rtx insn, rtx subreg)
11914
{
11915
  rtx links, set;
11916
  unsigned int regno = REGNO (SUBREG_REG (subreg));
11917
  enum machine_mode mode = GET_MODE (subreg);
11918
 
11919
  if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
11920
    return;
11921
 
11922
  for (links = LOG_LINKS (insn); links;)
11923
    {
11924
      reg_stat_type *rsp;
11925
 
11926
      insn = XEXP (links, 0);
11927
      set = single_set (insn);
11928
 
11929
      if (! set || !REG_P (SET_DEST (set))
11930
          || REGNO (SET_DEST (set)) != regno
11931
          || GET_MODE (SET_DEST (set)) != GET_MODE (SUBREG_REG (subreg)))
11932
        {
11933
          links = XEXP (links, 1);
11934
          continue;
11935
        }
11936
 
11937
      rsp = VEC_index (reg_stat_type, reg_stat, regno);
11938
      if (rsp->last_set == insn)
11939
        {
11940
          if (SUBREG_PROMOTED_UNSIGNED_P (subreg) > 0)
11941
            rsp->last_set_nonzero_bits &= GET_MODE_MASK (mode);
11942
        }
11943
 
11944
      if (REG_P (SET_SRC (set)))
11945
        {
11946
          regno = REGNO (SET_SRC (set));
11947
          links = LOG_LINKS (insn);
11948
        }
11949
      else
11950
        break;
11951
    }
11952
}
11953
 
11954
/* Check if X, a register, is known to contain a value already
11955
   truncated to MODE.  In this case we can use a subreg to refer to
11956
   the truncated value even though in the generic case we would need
11957
   an explicit truncation.  */
11958
 
11959
static bool
11960
reg_truncated_to_mode (enum machine_mode mode, const_rtx x)
11961
{
11962
  reg_stat_type *rsp = VEC_index (reg_stat_type, reg_stat, REGNO (x));
11963
  enum machine_mode truncated = rsp->truncated_to_mode;
11964
 
11965
  if (truncated == 0
11966
      || rsp->truncation_label < label_tick_ebb_start)
11967
    return false;
11968
  if (GET_MODE_SIZE (truncated) <= GET_MODE_SIZE (mode))
11969
    return true;
11970
  if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
11971
                             GET_MODE_BITSIZE (truncated)))
11972
    return true;
11973
  return false;
11974
}
11975
 
11976
/* Callback for for_each_rtx.  If *P is a hard reg or a subreg record the mode
11977
   that the register is accessed in.  For non-TRULY_NOOP_TRUNCATION targets we
11978
   might be able to turn a truncate into a subreg using this information.
11979
   Return -1 if traversing *P is complete or 0 otherwise.  */
11980
 
11981
static int
11982
record_truncated_value (rtx *p, void *data ATTRIBUTE_UNUSED)
11983
{
11984
  rtx x = *p;
11985
  enum machine_mode truncated_mode;
11986
  reg_stat_type *rsp;
11987
 
11988
  if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x)))
11989
    {
11990
      enum machine_mode original_mode = GET_MODE (SUBREG_REG (x));
11991
      truncated_mode = GET_MODE (x);
11992
 
11993
      if (GET_MODE_SIZE (original_mode) <= GET_MODE_SIZE (truncated_mode))
11994
        return -1;
11995
 
11996
      if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (truncated_mode),
11997
                                 GET_MODE_BITSIZE (original_mode)))
11998
        return -1;
11999
 
12000
      x = SUBREG_REG (x);
12001
    }
12002
  /* ??? For hard-regs we now record everything.  We might be able to
12003
     optimize this using last_set_mode.  */
12004
  else if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
12005
    truncated_mode = GET_MODE (x);
12006
  else
12007
    return 0;
12008
 
12009
  rsp = VEC_index (reg_stat_type, reg_stat, REGNO (x));
12010
  if (rsp->truncated_to_mode == 0
12011
      || rsp->truncation_label < label_tick_ebb_start
12012
      || (GET_MODE_SIZE (truncated_mode)
12013
          < GET_MODE_SIZE (rsp->truncated_to_mode)))
12014
    {
12015
      rsp->truncated_to_mode = truncated_mode;
12016
      rsp->truncation_label = label_tick;
12017
    }
12018
 
12019
  return -1;
12020
}
12021
 
12022
/* Callback for note_uses.  Find hardregs and subregs of pseudos and
12023
   the modes they are used in.  This can help truning TRUNCATEs into
12024
   SUBREGs.  */
12025
 
12026
static void
12027
record_truncated_values (rtx *x, void *data ATTRIBUTE_UNUSED)
12028
{
12029
  for_each_rtx (x, record_truncated_value, NULL);
12030
}
12031
 
12032
/* Scan X for promoted SUBREGs.  For each one found,
12033
   note what it implies to the registers used in it.  */
12034
 
12035
static void
12036
check_promoted_subreg (rtx insn, rtx x)
12037
{
12038
  if (GET_CODE (x) == SUBREG
12039
      && SUBREG_PROMOTED_VAR_P (x)
12040
      && REG_P (SUBREG_REG (x)))
12041
    record_promoted_value (insn, x);
12042
  else
12043
    {
12044
      const char *format = GET_RTX_FORMAT (GET_CODE (x));
12045
      int i, j;
12046
 
12047
      for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
12048
        switch (format[i])
12049
          {
12050
          case 'e':
12051
            check_promoted_subreg (insn, XEXP (x, i));
12052
            break;
12053
          case 'V':
12054
          case 'E':
12055
            if (XVEC (x, i) != 0)
12056
              for (j = 0; j < XVECLEN (x, i); j++)
12057
                check_promoted_subreg (insn, XVECEXP (x, i, j));
12058
            break;
12059
          }
12060
    }
12061
}
12062
 
12063
/* Verify that all the registers and memory references mentioned in *LOC are
12064
   still valid.  *LOC was part of a value set in INSN when label_tick was
12065
   equal to TICK.  Return 0 if some are not.  If REPLACE is nonzero, replace
12066
   the invalid references with (clobber (const_int 0)) and return 1.  This
12067
   replacement is useful because we often can get useful information about
12068
   the form of a value (e.g., if it was produced by a shift that always
12069
   produces -1 or 0) even though we don't know exactly what registers it
12070
   was produced from.  */
12071
 
12072
static int
12073
get_last_value_validate (rtx *loc, rtx insn, int tick, int replace)
12074
{
12075
  rtx x = *loc;
12076
  const char *fmt = GET_RTX_FORMAT (GET_CODE (x));
12077
  int len = GET_RTX_LENGTH (GET_CODE (x));
12078
  int i, j;
12079
 
12080
  if (REG_P (x))
12081
    {
12082
      unsigned int regno = REGNO (x);
12083
      unsigned int endregno = END_REGNO (x);
12084
      unsigned int j;
12085
 
12086
      for (j = regno; j < endregno; j++)
12087
        {
12088
          reg_stat_type *rsp = VEC_index (reg_stat_type, reg_stat, j);
12089
          if (rsp->last_set_invalid
12090
              /* If this is a pseudo-register that was only set once and not
12091
                 live at the beginning of the function, it is always valid.  */
12092
              || (! (regno >= FIRST_PSEUDO_REGISTER
12093
                     && REG_N_SETS (regno) == 1
12094
                     && (!REGNO_REG_SET_P
12095
                         (DF_LR_IN (ENTRY_BLOCK_PTR->next_bb), regno)))
12096
                  && rsp->last_set_label > tick))
12097
          {
12098
            if (replace)
12099
              *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
12100
            return replace;
12101
          }
12102
        }
12103
 
12104
      return 1;
12105
    }
12106
  /* If this is a memory reference, make sure that there were no stores after
12107
     it that might have clobbered the value.  We don't have alias info, so we
12108
     assume any store invalidates it.  Moreover, we only have local UIDs, so
12109
     we also assume that there were stores in the intervening basic blocks.  */
12110
  else if (MEM_P (x) && !MEM_READONLY_P (x)
12111
           && (tick != label_tick || DF_INSN_LUID (insn) <= mem_last_set))
12112
    {
12113
      if (replace)
12114
        *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
12115
      return replace;
12116
    }
12117
 
12118
  for (i = 0; i < len; i++)
12119
    {
12120
      if (fmt[i] == 'e')
12121
        {
12122
          /* Check for identical subexpressions.  If x contains
12123
             identical subexpression we only have to traverse one of
12124
             them.  */
12125
          if (i == 1 && ARITHMETIC_P (x))
12126
            {
12127
              /* Note that at this point x0 has already been checked
12128
                 and found valid.  */
12129
              rtx x0 = XEXP (x, 0);
12130
              rtx x1 = XEXP (x, 1);
12131
 
12132
              /* If x0 and x1 are identical then x is also valid.  */
12133
              if (x0 == x1)
12134
                return 1;
12135
 
12136
              /* If x1 is identical to a subexpression of x0 then
12137
                 while checking x0, x1 has already been checked.  Thus
12138
                 it is valid and so as x.  */
12139
              if (ARITHMETIC_P (x0)
12140
                  && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
12141
                return 1;
12142
 
12143
              /* If x0 is identical to a subexpression of x1 then x is
12144
                 valid iff the rest of x1 is valid.  */
12145
              if (ARITHMETIC_P (x1)
12146
                  && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
12147
                return
12148
                  get_last_value_validate (&XEXP (x1,
12149
                                                  x0 == XEXP (x1, 0) ? 1 : 0),
12150
                                           insn, tick, replace);
12151
            }
12152
 
12153
          if (get_last_value_validate (&XEXP (x, i), insn, tick,
12154
                                       replace) == 0)
12155
            return 0;
12156
        }
12157
      else if (fmt[i] == 'E')
12158
        for (j = 0; j < XVECLEN (x, i); j++)
12159
          if (get_last_value_validate (&XVECEXP (x, i, j),
12160
                                       insn, tick, replace) == 0)
12161
            return 0;
12162
    }
12163
 
12164
  /* If we haven't found a reason for it to be invalid, it is valid.  */
12165
  return 1;
12166
}
12167
 
12168
/* Get the last value assigned to X, if known.  Some registers
12169
   in the value may be replaced with (clobber (const_int 0)) if their value
12170
   is known longer known reliably.  */
12171
 
12172
static rtx
12173
get_last_value (const_rtx x)
12174
{
12175
  unsigned int regno;
12176
  rtx value;
12177
  reg_stat_type *rsp;
12178
 
12179
  /* If this is a non-paradoxical SUBREG, get the value of its operand and
12180
     then convert it to the desired mode.  If this is a paradoxical SUBREG,
12181
     we cannot predict what values the "extra" bits might have.  */
12182
  if (GET_CODE (x) == SUBREG
12183
      && subreg_lowpart_p (x)
12184
      && (GET_MODE_SIZE (GET_MODE (x))
12185
          <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
12186
      && (value = get_last_value (SUBREG_REG (x))) != 0)
12187
    return gen_lowpart (GET_MODE (x), value);
12188
 
12189
  if (!REG_P (x))
12190
    return 0;
12191
 
12192
  regno = REGNO (x);
12193
  rsp = VEC_index (reg_stat_type, reg_stat, regno);
12194
  value = rsp->last_set_value;
12195
 
12196
  /* If we don't have a value, or if it isn't for this basic block and
12197
     it's either a hard register, set more than once, or it's a live
12198
     at the beginning of the function, return 0.
12199
 
12200
     Because if it's not live at the beginning of the function then the reg
12201
     is always set before being used (is never used without being set).
12202
     And, if it's set only once, and it's always set before use, then all
12203
     uses must have the same last value, even if it's not from this basic
12204
     block.  */
12205
 
12206
  if (value == 0
12207
      || (rsp->last_set_label < label_tick_ebb_start
12208
          && (regno < FIRST_PSEUDO_REGISTER
12209
              || REG_N_SETS (regno) != 1
12210
              || REGNO_REG_SET_P
12211
                 (DF_LR_IN (ENTRY_BLOCK_PTR->next_bb), regno))))
12212
    return 0;
12213
 
12214
  /* If the value was set in a later insn than the ones we are processing,
12215
     we can't use it even if the register was only set once.  */
12216
  if (rsp->last_set_label == label_tick
12217
      && DF_INSN_LUID (rsp->last_set) >= subst_low_luid)
12218
    return 0;
12219
 
12220
  /* If the value has all its registers valid, return it.  */
12221
  if (get_last_value_validate (&value, rsp->last_set, rsp->last_set_label, 0))
12222
    return value;
12223
 
12224
  /* Otherwise, make a copy and replace any invalid register with
12225
     (clobber (const_int 0)).  If that fails for some reason, return 0.  */
12226
 
12227
  value = copy_rtx (value);
12228
  if (get_last_value_validate (&value, rsp->last_set, rsp->last_set_label, 1))
12229
    return value;
12230
 
12231
  return 0;
12232
}
12233
 
12234
/* Return nonzero if expression X refers to a REG or to memory
12235
   that is set in an instruction more recent than FROM_LUID.  */
12236
 
12237
static int
12238
use_crosses_set_p (const_rtx x, int from_luid)
12239
{
12240
  const char *fmt;
12241
  int i;
12242
  enum rtx_code code = GET_CODE (x);
12243
 
12244
  if (code == REG)
12245
    {
12246
      unsigned int regno = REGNO (x);
12247
      unsigned endreg = END_REGNO (x);
12248
 
12249
#ifdef PUSH_ROUNDING
12250
      /* Don't allow uses of the stack pointer to be moved,
12251
         because we don't know whether the move crosses a push insn.  */
12252
      if (regno == STACK_POINTER_REGNUM && PUSH_ARGS)
12253
        return 1;
12254
#endif
12255
      for (; regno < endreg; regno++)
12256
        {
12257
          reg_stat_type *rsp = VEC_index (reg_stat_type, reg_stat, regno);
12258
          if (rsp->last_set
12259
              && rsp->last_set_label == label_tick
12260
              && DF_INSN_LUID (rsp->last_set) > from_luid)
12261
            return 1;
12262
        }
12263
      return 0;
12264
    }
12265
 
12266
  if (code == MEM && mem_last_set > from_luid)
12267
    return 1;
12268
 
12269
  fmt = GET_RTX_FORMAT (code);
12270
 
12271
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
12272
    {
12273
      if (fmt[i] == 'E')
12274
        {
12275
          int j;
12276
          for (j = XVECLEN (x, i) - 1; j >= 0; j--)
12277
            if (use_crosses_set_p (XVECEXP (x, i, j), from_luid))
12278
              return 1;
12279
        }
12280
      else if (fmt[i] == 'e'
12281
               && use_crosses_set_p (XEXP (x, i), from_luid))
12282
        return 1;
12283
    }
12284
  return 0;
12285
}
12286
 
12287
/* Define three variables used for communication between the following
12288
   routines.  */
12289
 
12290
static unsigned int reg_dead_regno, reg_dead_endregno;
12291
static int reg_dead_flag;
12292
 
12293
/* Function called via note_stores from reg_dead_at_p.
12294
 
12295
   If DEST is within [reg_dead_regno, reg_dead_endregno), set
12296
   reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET.  */
12297
 
12298
static void
12299
reg_dead_at_p_1 (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
12300
{
12301
  unsigned int regno, endregno;
12302
 
12303
  if (!REG_P (dest))
12304
    return;
12305
 
12306
  regno = REGNO (dest);
12307
  endregno = END_REGNO (dest);
12308
  if (reg_dead_endregno > regno && reg_dead_regno < endregno)
12309
    reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
12310
}
12311
 
12312
/* Return nonzero if REG is known to be dead at INSN.
12313
 
12314
   We scan backwards from INSN.  If we hit a REG_DEAD note or a CLOBBER
12315
   referencing REG, it is dead.  If we hit a SET referencing REG, it is
12316
   live.  Otherwise, see if it is live or dead at the start of the basic
12317
   block we are in.  Hard regs marked as being live in NEWPAT_USED_REGS
12318
   must be assumed to be always live.  */
12319
 
12320
static int
12321
reg_dead_at_p (rtx reg, rtx insn)
12322
{
12323
  basic_block block;
12324
  unsigned int i;
12325
 
12326
  /* Set variables for reg_dead_at_p_1.  */
12327
  reg_dead_regno = REGNO (reg);
12328
  reg_dead_endregno = END_REGNO (reg);
12329
 
12330
  reg_dead_flag = 0;
12331
 
12332
  /* Check that reg isn't mentioned in NEWPAT_USED_REGS.  For fixed registers
12333
     we allow the machine description to decide whether use-and-clobber
12334
     patterns are OK.  */
12335
  if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
12336
    {
12337
      for (i = reg_dead_regno; i < reg_dead_endregno; i++)
12338
        if (!fixed_regs[i] && TEST_HARD_REG_BIT (newpat_used_regs, i))
12339
          return 0;
12340
    }
12341
 
12342
  /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, or
12343
     beginning of basic block.  */
12344
  block = BLOCK_FOR_INSN (insn);
12345
  for (;;)
12346
    {
12347
      if (INSN_P (insn))
12348
        {
12349
          note_stores (PATTERN (insn), reg_dead_at_p_1, NULL);
12350
          if (reg_dead_flag)
12351
            return reg_dead_flag == 1 ? 1 : 0;
12352
 
12353
          if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
12354
            return 1;
12355
        }
12356
 
12357
      if (insn == BB_HEAD (block))
12358
        break;
12359
 
12360
      insn = PREV_INSN (insn);
12361
    }
12362
 
12363
  /* Look at live-in sets for the basic block that we were in.  */
12364
  for (i = reg_dead_regno; i < reg_dead_endregno; i++)
12365
    if (REGNO_REG_SET_P (df_get_live_in (block), i))
12366
      return 0;
12367
 
12368
  return 1;
12369
}
12370
 
12371
/* Note hard registers in X that are used.  */
12372
 
12373
static void
12374
mark_used_regs_combine (rtx x)
12375
{
12376
  RTX_CODE code = GET_CODE (x);
12377
  unsigned int regno;
12378
  int i;
12379
 
12380
  switch (code)
12381
    {
12382
    case LABEL_REF:
12383
    case SYMBOL_REF:
12384
    case CONST_INT:
12385
    case CONST:
12386
    case CONST_DOUBLE:
12387
    case CONST_VECTOR:
12388
    case PC:
12389
    case ADDR_VEC:
12390
    case ADDR_DIFF_VEC:
12391
    case ASM_INPUT:
12392
#ifdef HAVE_cc0
12393
    /* CC0 must die in the insn after it is set, so we don't need to take
12394
       special note of it here.  */
12395
    case CC0:
12396
#endif
12397
      return;
12398
 
12399
    case CLOBBER:
12400
      /* If we are clobbering a MEM, mark any hard registers inside the
12401
         address as used.  */
12402
      if (MEM_P (XEXP (x, 0)))
12403
        mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
12404
      return;
12405
 
12406
    case REG:
12407
      regno = REGNO (x);
12408
      /* A hard reg in a wide mode may really be multiple registers.
12409
         If so, mark all of them just like the first.  */
12410
      if (regno < FIRST_PSEUDO_REGISTER)
12411
        {
12412
          /* None of this applies to the stack, frame or arg pointers.  */
12413
          if (regno == STACK_POINTER_REGNUM
12414
#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
12415
              || regno == HARD_FRAME_POINTER_REGNUM
12416
#endif
12417
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
12418
              || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
12419
#endif
12420
              || regno == FRAME_POINTER_REGNUM)
12421
            return;
12422
 
12423
          add_to_hard_reg_set (&newpat_used_regs, GET_MODE (x), regno);
12424
        }
12425
      return;
12426
 
12427
    case SET:
12428
      {
12429
        /* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
12430
           the address.  */
12431
        rtx testreg = SET_DEST (x);
12432
 
12433
        while (GET_CODE (testreg) == SUBREG
12434
               || GET_CODE (testreg) == ZERO_EXTRACT
12435
               || GET_CODE (testreg) == STRICT_LOW_PART)
12436
          testreg = XEXP (testreg, 0);
12437
 
12438
        if (MEM_P (testreg))
12439
          mark_used_regs_combine (XEXP (testreg, 0));
12440
 
12441
        mark_used_regs_combine (SET_SRC (x));
12442
      }
12443
      return;
12444
 
12445
    default:
12446
      break;
12447
    }
12448
 
12449
  /* Recursively scan the operands of this expression.  */
12450
 
12451
  {
12452
    const char *fmt = GET_RTX_FORMAT (code);
12453
 
12454
    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
12455
      {
12456
        if (fmt[i] == 'e')
12457
          mark_used_regs_combine (XEXP (x, i));
12458
        else if (fmt[i] == 'E')
12459
          {
12460
            int j;
12461
 
12462
            for (j = 0; j < XVECLEN (x, i); j++)
12463
              mark_used_regs_combine (XVECEXP (x, i, j));
12464
          }
12465
      }
12466
  }
12467
}
12468
 
12469
/* Remove register number REGNO from the dead registers list of INSN.
12470
 
12471
   Return the note used to record the death, if there was one.  */
12472
 
12473
rtx
12474
remove_death (unsigned int regno, rtx insn)
12475
{
12476
  rtx note = find_regno_note (insn, REG_DEAD, regno);
12477
 
12478
  if (note)
12479
    remove_note (insn, note);
12480
 
12481
  return note;
12482
}
12483
 
12484
/* For each register (hardware or pseudo) used within expression X, if its
12485
   death is in an instruction with luid between FROM_LUID (inclusive) and
12486
   TO_INSN (exclusive), put a REG_DEAD note for that register in the
12487
   list headed by PNOTES.
12488
 
12489
   That said, don't move registers killed by maybe_kill_insn.
12490
 
12491
   This is done when X is being merged by combination into TO_INSN.  These
12492
   notes will then be distributed as needed.  */
12493
 
12494
static void
12495
move_deaths (rtx x, rtx maybe_kill_insn, int from_luid, rtx to_insn,
12496
             rtx *pnotes)
12497
{
12498
  const char *fmt;
12499
  int len, i;
12500
  enum rtx_code code = GET_CODE (x);
12501
 
12502
  if (code == REG)
12503
    {
12504
      unsigned int regno = REGNO (x);
12505
      rtx where_dead = VEC_index (reg_stat_type, reg_stat, regno)->last_death;
12506
 
12507
      /* Don't move the register if it gets killed in between from and to.  */
12508
      if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
12509
          && ! reg_referenced_p (x, maybe_kill_insn))
12510
        return;
12511
 
12512
      if (where_dead
12513
          && BLOCK_FOR_INSN (where_dead) == BLOCK_FOR_INSN (to_insn)
12514
          && DF_INSN_LUID (where_dead) >= from_luid
12515
          && DF_INSN_LUID (where_dead) < DF_INSN_LUID (to_insn))
12516
        {
12517
          rtx note = remove_death (regno, where_dead);
12518
 
12519
          /* It is possible for the call above to return 0.  This can occur
12520
             when last_death points to I2 or I1 that we combined with.
12521
             In that case make a new note.
12522
 
12523
             We must also check for the case where X is a hard register
12524
             and NOTE is a death note for a range of hard registers
12525
             including X.  In that case, we must put REG_DEAD notes for
12526
             the remaining registers in place of NOTE.  */
12527
 
12528
          if (note != 0 && regno < FIRST_PSEUDO_REGISTER
12529
              && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
12530
                  > GET_MODE_SIZE (GET_MODE (x))))
12531
            {
12532
              unsigned int deadregno = REGNO (XEXP (note, 0));
12533
              unsigned int deadend = END_HARD_REGNO (XEXP (note, 0));
12534
              unsigned int ourend = END_HARD_REGNO (x);
12535
              unsigned int i;
12536
 
12537
              for (i = deadregno; i < deadend; i++)
12538
                if (i < regno || i >= ourend)
12539
                  add_reg_note (where_dead, REG_DEAD, regno_reg_rtx[i]);
12540
            }
12541
 
12542
          /* If we didn't find any note, or if we found a REG_DEAD note that
12543
             covers only part of the given reg, and we have a multi-reg hard
12544
             register, then to be safe we must check for REG_DEAD notes
12545
             for each register other than the first.  They could have
12546
             their own REG_DEAD notes lying around.  */
12547
          else if ((note == 0
12548
                    || (note != 0
12549
                        && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
12550
                            < GET_MODE_SIZE (GET_MODE (x)))))
12551
                   && regno < FIRST_PSEUDO_REGISTER
12552
                   && hard_regno_nregs[regno][GET_MODE (x)] > 1)
12553
            {
12554
              unsigned int ourend = END_HARD_REGNO (x);
12555
              unsigned int i, offset;
12556
              rtx oldnotes = 0;
12557
 
12558
              if (note)
12559
                offset = hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))];
12560
              else
12561
                offset = 1;
12562
 
12563
              for (i = regno + offset; i < ourend; i++)
12564
                move_deaths (regno_reg_rtx[i],
12565
                             maybe_kill_insn, from_luid, to_insn, &oldnotes);
12566
            }
12567
 
12568
          if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
12569
            {
12570
              XEXP (note, 1) = *pnotes;
12571
              *pnotes = note;
12572
            }
12573
          else
12574
            *pnotes = alloc_reg_note (REG_DEAD, x, *pnotes);
12575
        }
12576
 
12577
      return;
12578
    }
12579
 
12580
  else if (GET_CODE (x) == SET)
12581
    {
12582
      rtx dest = SET_DEST (x);
12583
 
12584
      move_deaths (SET_SRC (x), maybe_kill_insn, from_luid, to_insn, pnotes);
12585
 
12586
      /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
12587
         that accesses one word of a multi-word item, some
12588
         piece of everything register in the expression is used by
12589
         this insn, so remove any old death.  */
12590
      /* ??? So why do we test for equality of the sizes?  */
12591
 
12592
      if (GET_CODE (dest) == ZERO_EXTRACT
12593
          || GET_CODE (dest) == STRICT_LOW_PART
12594
          || (GET_CODE (dest) == SUBREG
12595
              && (((GET_MODE_SIZE (GET_MODE (dest))
12596
                    + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
12597
                  == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
12598
                       + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
12599
        {
12600
          move_deaths (dest, maybe_kill_insn, from_luid, to_insn, pnotes);
12601
          return;
12602
        }
12603
 
12604
      /* If this is some other SUBREG, we know it replaces the entire
12605
         value, so use that as the destination.  */
12606
      if (GET_CODE (dest) == SUBREG)
12607
        dest = SUBREG_REG (dest);
12608
 
12609
      /* If this is a MEM, adjust deaths of anything used in the address.
12610
         For a REG (the only other possibility), the entire value is
12611
         being replaced so the old value is not used in this insn.  */
12612
 
12613
      if (MEM_P (dest))
12614
        move_deaths (XEXP (dest, 0), maybe_kill_insn, from_luid,
12615
                     to_insn, pnotes);
12616
      return;
12617
    }
12618
 
12619
  else if (GET_CODE (x) == CLOBBER)
12620
    return;
12621
 
12622
  len = GET_RTX_LENGTH (code);
12623
  fmt = GET_RTX_FORMAT (code);
12624
 
12625
  for (i = 0; i < len; i++)
12626
    {
12627
      if (fmt[i] == 'E')
12628
        {
12629
          int j;
12630
          for (j = XVECLEN (x, i) - 1; j >= 0; j--)
12631
            move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_luid,
12632
                         to_insn, pnotes);
12633
        }
12634
      else if (fmt[i] == 'e')
12635
        move_deaths (XEXP (x, i), maybe_kill_insn, from_luid, to_insn, pnotes);
12636
    }
12637
}
12638
 
12639
/* Return 1 if X is the target of a bit-field assignment in BODY, the
12640
   pattern of an insn.  X must be a REG.  */
12641
 
12642
static int
12643
reg_bitfield_target_p (rtx x, rtx body)
12644
{
12645
  int i;
12646
 
12647
  if (GET_CODE (body) == SET)
12648
    {
12649
      rtx dest = SET_DEST (body);
12650
      rtx target;
12651
      unsigned int regno, tregno, endregno, endtregno;
12652
 
12653
      if (GET_CODE (dest) == ZERO_EXTRACT)
12654
        target = XEXP (dest, 0);
12655
      else if (GET_CODE (dest) == STRICT_LOW_PART)
12656
        target = SUBREG_REG (XEXP (dest, 0));
12657
      else
12658
        return 0;
12659
 
12660
      if (GET_CODE (target) == SUBREG)
12661
        target = SUBREG_REG (target);
12662
 
12663
      if (!REG_P (target))
12664
        return 0;
12665
 
12666
      tregno = REGNO (target), regno = REGNO (x);
12667
      if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
12668
        return target == x;
12669
 
12670
      endtregno = end_hard_regno (GET_MODE (target), tregno);
12671
      endregno = end_hard_regno (GET_MODE (x), regno);
12672
 
12673
      return endregno > tregno && regno < endtregno;
12674
    }
12675
 
12676
  else if (GET_CODE (body) == PARALLEL)
12677
    for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
12678
      if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
12679
        return 1;
12680
 
12681
  return 0;
12682
}
12683
 
12684
/* Return the next insn after INSN that is neither a NOTE nor a
12685
   DEBUG_INSN.  This routine does not look inside SEQUENCEs.  */
12686
 
12687
static rtx
12688
next_nonnote_nondebug_insn (rtx insn)
12689
{
12690
  while (insn)
12691
    {
12692
      insn = NEXT_INSN (insn);
12693
      if (insn == 0)
12694
        break;
12695
      if (NOTE_P (insn))
12696
        continue;
12697
      if (DEBUG_INSN_P (insn))
12698
        continue;
12699
      break;
12700
    }
12701
 
12702
  return insn;
12703
}
12704
 
12705
 
12706
 
12707
/* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
12708
   as appropriate.  I3 and I2 are the insns resulting from the combination
12709
   insns including FROM (I2 may be zero).
12710
 
12711
   ELIM_I2 and ELIM_I1 are either zero or registers that we know will
12712
   not need REG_DEAD notes because they are being substituted for.  This
12713
   saves searching in the most common cases.
12714
 
12715
   Each note in the list is either ignored or placed on some insns, depending
12716
   on the type of note.  */
12717
 
12718
static void
12719
distribute_notes (rtx notes, rtx from_insn, rtx i3, rtx i2, rtx elim_i2,
12720
                  rtx elim_i1)
12721
{
12722
  rtx note, next_note;
12723
  rtx tem;
12724
 
12725
  for (note = notes; note; note = next_note)
12726
    {
12727
      rtx place = 0, place2 = 0;
12728
 
12729
      next_note = XEXP (note, 1);
12730
      switch (REG_NOTE_KIND (note))
12731
        {
12732
        case REG_BR_PROB:
12733
        case REG_BR_PRED:
12734
          /* Doesn't matter much where we put this, as long as it's somewhere.
12735
             It is preferable to keep these notes on branches, which is most
12736
             likely to be i3.  */
12737
          place = i3;
12738
          break;
12739
 
12740
        case REG_VALUE_PROFILE:
12741
          /* Just get rid of this note, as it is unused later anyway.  */
12742
          break;
12743
 
12744
        case REG_NON_LOCAL_GOTO:
12745
          if (JUMP_P (i3))
12746
            place = i3;
12747
          else
12748
            {
12749
              gcc_assert (i2 && JUMP_P (i2));
12750
              place = i2;
12751
            }
12752
          break;
12753
 
12754
        case REG_EH_REGION:
12755
          /* These notes must remain with the call or trapping instruction.  */
12756
          if (CALL_P (i3))
12757
            place = i3;
12758
          else if (i2 && CALL_P (i2))
12759
            place = i2;
12760
          else
12761
            {
12762
              gcc_assert (flag_non_call_exceptions);
12763
              if (may_trap_p (i3))
12764
                place = i3;
12765
              else if (i2 && may_trap_p (i2))
12766
                place = i2;
12767
              /* ??? Otherwise assume we've combined things such that we
12768
                 can now prove that the instructions can't trap.  Drop the
12769
                 note in this case.  */
12770
            }
12771
          break;
12772
 
12773
        case REG_NORETURN:
12774
        case REG_SETJMP:
12775
          /* These notes must remain with the call.  It should not be
12776
             possible for both I2 and I3 to be a call.  */
12777
          if (CALL_P (i3))
12778
            place = i3;
12779
          else
12780
            {
12781
              gcc_assert (i2 && CALL_P (i2));
12782
              place = i2;
12783
            }
12784
          break;
12785
 
12786
        case REG_UNUSED:
12787
          /* Any clobbers for i3 may still exist, and so we must process
12788
             REG_UNUSED notes from that insn.
12789
 
12790
             Any clobbers from i2 or i1 can only exist if they were added by
12791
             recog_for_combine.  In that case, recog_for_combine created the
12792
             necessary REG_UNUSED notes.  Trying to keep any original
12793
             REG_UNUSED notes from these insns can cause incorrect output
12794
             if it is for the same register as the original i3 dest.
12795
             In that case, we will notice that the register is set in i3,
12796
             and then add a REG_UNUSED note for the destination of i3, which
12797
             is wrong.  However, it is possible to have REG_UNUSED notes from
12798
             i2 or i1 for register which were both used and clobbered, so
12799
             we keep notes from i2 or i1 if they will turn into REG_DEAD
12800
             notes.  */
12801
 
12802
          /* If this register is set or clobbered in I3, put the note there
12803
             unless there is one already.  */
12804
          if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
12805
            {
12806
              if (from_insn != i3)
12807
                break;
12808
 
12809
              if (! (REG_P (XEXP (note, 0))
12810
                     ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
12811
                     : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
12812
                place = i3;
12813
            }
12814
          /* Otherwise, if this register is used by I3, then this register
12815
             now dies here, so we must put a REG_DEAD note here unless there
12816
             is one already.  */
12817
          else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
12818
                   && ! (REG_P (XEXP (note, 0))
12819
                         ? find_regno_note (i3, REG_DEAD,
12820
                                            REGNO (XEXP (note, 0)))
12821
                         : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
12822
            {
12823
              PUT_REG_NOTE_KIND (note, REG_DEAD);
12824
              place = i3;
12825
            }
12826
          break;
12827
 
12828
        case REG_EQUAL:
12829
        case REG_EQUIV:
12830
        case REG_NOALIAS:
12831
          /* These notes say something about results of an insn.  We can
12832
             only support them if they used to be on I3 in which case they
12833
             remain on I3.  Otherwise they are ignored.
12834
 
12835
             If the note refers to an expression that is not a constant, we
12836
             must also ignore the note since we cannot tell whether the
12837
             equivalence is still true.  It might be possible to do
12838
             slightly better than this (we only have a problem if I2DEST
12839
             or I1DEST is present in the expression), but it doesn't
12840
             seem worth the trouble.  */
12841
 
12842
          if (from_insn == i3
12843
              && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
12844
            place = i3;
12845
          break;
12846
 
12847
        case REG_INC:
12848
          /* These notes say something about how a register is used.  They must
12849
             be present on any use of the register in I2 or I3.  */
12850
          if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
12851
            place = i3;
12852
 
12853
          if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
12854
            {
12855
              if (place)
12856
                place2 = i2;
12857
              else
12858
                place = i2;
12859
            }
12860
          break;
12861
 
12862
        case REG_LABEL_TARGET:
12863
        case REG_LABEL_OPERAND:
12864
          /* This can show up in several ways -- either directly in the
12865
             pattern, or hidden off in the constant pool with (or without?)
12866
             a REG_EQUAL note.  */
12867
          /* ??? Ignore the without-reg_equal-note problem for now.  */
12868
          if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3))
12869
              || ((tem = find_reg_note (i3, REG_EQUAL, NULL_RTX))
12870
                  && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12871
                  && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0)))
12872
            place = i3;
12873
 
12874
          if (i2
12875
              && (reg_mentioned_p (XEXP (note, 0), PATTERN (i2))
12876
                  || ((tem = find_reg_note (i2, REG_EQUAL, NULL_RTX))
12877
                      && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12878
                      && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0))))
12879
            {
12880
              if (place)
12881
                place2 = i2;
12882
              else
12883
                place = i2;
12884
            }
12885
 
12886
          /* For REG_LABEL_TARGET on a JUMP_P, we prefer to put the note
12887
             as a JUMP_LABEL or decrement LABEL_NUSES if it's already
12888
             there.  */
12889
          if (place && JUMP_P (place)
12890
              && REG_NOTE_KIND (note) == REG_LABEL_TARGET
12891
              && (JUMP_LABEL (place) == NULL
12892
                  || JUMP_LABEL (place) == XEXP (note, 0)))
12893
            {
12894
              rtx label = JUMP_LABEL (place);
12895
 
12896
              if (!label)
12897
                JUMP_LABEL (place) = XEXP (note, 0);
12898
              else if (LABEL_P (label))
12899
                LABEL_NUSES (label)--;
12900
            }
12901
 
12902
          if (place2 && JUMP_P (place2)
12903
              && REG_NOTE_KIND (note) == REG_LABEL_TARGET
12904
              && (JUMP_LABEL (place2) == NULL
12905
                  || JUMP_LABEL (place2) == XEXP (note, 0)))
12906
            {
12907
              rtx label = JUMP_LABEL (place2);
12908
 
12909
              if (!label)
12910
                JUMP_LABEL (place2) = XEXP (note, 0);
12911
              else if (LABEL_P (label))
12912
                LABEL_NUSES (label)--;
12913
              place2 = 0;
12914
            }
12915
          break;
12916
 
12917
        case REG_NONNEG:
12918
          /* This note says something about the value of a register prior
12919
             to the execution of an insn.  It is too much trouble to see
12920
             if the note is still correct in all situations.  It is better
12921
             to simply delete it.  */
12922
          break;
12923
 
12924
        case REG_DEAD:
12925
          /* If we replaced the right hand side of FROM_INSN with a
12926
             REG_EQUAL note, the original use of the dying register
12927
             will not have been combined into I3 and I2.  In such cases,
12928
             FROM_INSN is guaranteed to be the first of the combined
12929
             instructions, so we simply need to search back before
12930
             FROM_INSN for the previous use or set of this register,
12931
             then alter the notes there appropriately.
12932
 
12933
             If the register is used as an input in I3, it dies there.
12934
             Similarly for I2, if it is nonzero and adjacent to I3.
12935
 
12936
             If the register is not used as an input in either I3 or I2
12937
             and it is not one of the registers we were supposed to eliminate,
12938
             there are two possibilities.  We might have a non-adjacent I2
12939
             or we might have somehow eliminated an additional register
12940
             from a computation.  For example, we might have had A & B where
12941
             we discover that B will always be zero.  In this case we will
12942
             eliminate the reference to A.
12943
 
12944
             In both cases, we must search to see if we can find a previous
12945
             use of A and put the death note there.  */
12946
 
12947
          if (from_insn
12948
              && from_insn == i2mod
12949
              && !reg_overlap_mentioned_p (XEXP (note, 0), i2mod_new_rhs))
12950
            tem = from_insn;
12951
          else
12952
            {
12953
              if (from_insn
12954
                  && CALL_P (from_insn)
12955
                  && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
12956
                place = from_insn;
12957
              else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
12958
                place = i3;
12959
              else if (i2 != 0 && next_nonnote_nondebug_insn (i2) == i3
12960
                       && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12961
                place = i2;
12962
              else if ((rtx_equal_p (XEXP (note, 0), elim_i2)
12963
                        && !(i2mod
12964
                             && reg_overlap_mentioned_p (XEXP (note, 0),
12965
                                                         i2mod_old_rhs)))
12966
                       || rtx_equal_p (XEXP (note, 0), elim_i1))
12967
                break;
12968
              tem = i3;
12969
            }
12970
 
12971
          if (place == 0)
12972
            {
12973
              basic_block bb = this_basic_block;
12974
 
12975
              for (tem = PREV_INSN (tem); place == 0; tem = PREV_INSN (tem))
12976
                {
12977
                  if (!NONDEBUG_INSN_P (tem))
12978
                    {
12979
                      if (tem == BB_HEAD (bb))
12980
                        break;
12981
                      continue;
12982
                    }
12983
 
12984
                  /* If the register is being set at TEM, see if that is all
12985
                     TEM is doing.  If so, delete TEM.  Otherwise, make this
12986
                     into a REG_UNUSED note instead. Don't delete sets to
12987
                     global register vars.  */
12988
                  if ((REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER
12989
                       || !global_regs[REGNO (XEXP (note, 0))])
12990
                      && reg_set_p (XEXP (note, 0), PATTERN (tem)))
12991
                    {
12992
                      rtx set = single_set (tem);
12993
                      rtx inner_dest = 0;
12994
#ifdef HAVE_cc0
12995
                      rtx cc0_setter = NULL_RTX;
12996
#endif
12997
 
12998
                      if (set != 0)
12999
                        for (inner_dest = SET_DEST (set);
13000
                             (GET_CODE (inner_dest) == STRICT_LOW_PART
13001
                              || GET_CODE (inner_dest) == SUBREG
13002
                              || GET_CODE (inner_dest) == ZERO_EXTRACT);
13003
                             inner_dest = XEXP (inner_dest, 0))
13004
                          ;
13005
 
13006
                      /* Verify that it was the set, and not a clobber that
13007
                         modified the register.
13008
 
13009
                         CC0 targets must be careful to maintain setter/user
13010
                         pairs.  If we cannot delete the setter due to side
13011
                         effects, mark the user with an UNUSED note instead
13012
                         of deleting it.  */
13013
 
13014
                      if (set != 0 && ! side_effects_p (SET_SRC (set))
13015
                          && rtx_equal_p (XEXP (note, 0), inner_dest)
13016
#ifdef HAVE_cc0
13017
                          && (! reg_mentioned_p (cc0_rtx, SET_SRC (set))
13018
                              || ((cc0_setter = prev_cc0_setter (tem)) != NULL
13019
                                  && sets_cc0_p (PATTERN (cc0_setter)) > 0))
13020
#endif
13021
                          )
13022
                        {
13023
                          /* Move the notes and links of TEM elsewhere.
13024
                             This might delete other dead insns recursively.
13025
                             First set the pattern to something that won't use
13026
                             any register.  */
13027
                          rtx old_notes = REG_NOTES (tem);
13028
 
13029
                          PATTERN (tem) = pc_rtx;
13030
                          REG_NOTES (tem) = NULL;
13031
 
13032
                          distribute_notes (old_notes, tem, tem, NULL_RTX,
13033
                                            NULL_RTX, NULL_RTX);
13034
                          distribute_links (LOG_LINKS (tem));
13035
 
13036
                          SET_INSN_DELETED (tem);
13037
                          if (tem == i2)
13038
                            i2 = NULL_RTX;
13039
 
13040
#ifdef HAVE_cc0
13041
                          /* Delete the setter too.  */
13042
                          if (cc0_setter)
13043
                            {
13044
                              PATTERN (cc0_setter) = pc_rtx;
13045
                              old_notes = REG_NOTES (cc0_setter);
13046
                              REG_NOTES (cc0_setter) = NULL;
13047
 
13048
                              distribute_notes (old_notes, cc0_setter,
13049
                                                cc0_setter, NULL_RTX,
13050
                                                NULL_RTX, NULL_RTX);
13051
                              distribute_links (LOG_LINKS (cc0_setter));
13052
 
13053
                              SET_INSN_DELETED (cc0_setter);
13054
                              if (cc0_setter == i2)
13055
                                i2 = NULL_RTX;
13056
                            }
13057
#endif
13058
                        }
13059
                      else
13060
                        {
13061
                          PUT_REG_NOTE_KIND (note, REG_UNUSED);
13062
 
13063
                          /*  If there isn't already a REG_UNUSED note, put one
13064
                              here.  Do not place a REG_DEAD note, even if
13065
                              the register is also used here; that would not
13066
                              match the algorithm used in lifetime analysis
13067
                              and can cause the consistency check in the
13068
                              scheduler to fail.  */
13069
                          if (! find_regno_note (tem, REG_UNUSED,
13070
                                                 REGNO (XEXP (note, 0))))
13071
                            place = tem;
13072
                          break;
13073
                        }
13074
                    }
13075
                  else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
13076
                           || (CALL_P (tem)
13077
                               && find_reg_fusage (tem, USE, XEXP (note, 0))))
13078
                    {
13079
                      place = tem;
13080
 
13081
                      /* If we are doing a 3->2 combination, and we have a
13082
                         register which formerly died in i3 and was not used
13083
                         by i2, which now no longer dies in i3 and is used in
13084
                         i2 but does not die in i2, and place is between i2
13085
                         and i3, then we may need to move a link from place to
13086
                         i2.  */
13087
                      if (i2 && DF_INSN_LUID (place) > DF_INSN_LUID (i2)
13088
                          && from_insn
13089
                          && DF_INSN_LUID (from_insn) > DF_INSN_LUID (i2)
13090
                          && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
13091
                        {
13092
                          rtx links = LOG_LINKS (place);
13093
                          LOG_LINKS (place) = 0;
13094
                          distribute_links (links);
13095
                        }
13096
                      break;
13097
                    }
13098
 
13099
                  if (tem == BB_HEAD (bb))
13100
                    break;
13101
                }
13102
 
13103
            }
13104
 
13105
          /* If the register is set or already dead at PLACE, we needn't do
13106
             anything with this note if it is still a REG_DEAD note.
13107
             We check here if it is set at all, not if is it totally replaced,
13108
             which is what `dead_or_set_p' checks, so also check for it being
13109
             set partially.  */
13110
 
13111
          if (place && REG_NOTE_KIND (note) == REG_DEAD)
13112
            {
13113
              unsigned int regno = REGNO (XEXP (note, 0));
13114
              reg_stat_type *rsp = VEC_index (reg_stat_type, reg_stat, regno);
13115
 
13116
              if (dead_or_set_p (place, XEXP (note, 0))
13117
                  || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
13118
                {
13119
                  /* Unless the register previously died in PLACE, clear
13120
                     last_death.  [I no longer understand why this is
13121
                     being done.] */
13122
                  if (rsp->last_death != place)
13123
                    rsp->last_death = 0;
13124
                  place = 0;
13125
                }
13126
              else
13127
                rsp->last_death = place;
13128
 
13129
              /* If this is a death note for a hard reg that is occupying
13130
                 multiple registers, ensure that we are still using all
13131
                 parts of the object.  If we find a piece of the object
13132
                 that is unused, we must arrange for an appropriate REG_DEAD
13133
                 note to be added for it.  However, we can't just emit a USE
13134
                 and tag the note to it, since the register might actually
13135
                 be dead; so we recourse, and the recursive call then finds
13136
                 the previous insn that used this register.  */
13137
 
13138
              if (place && regno < FIRST_PSEUDO_REGISTER
13139
                  && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] > 1)
13140
                {
13141
                  unsigned int endregno = END_HARD_REGNO (XEXP (note, 0));
13142
                  int all_used = 1;
13143
                  unsigned int i;
13144
 
13145
                  for (i = regno; i < endregno; i++)
13146
                    if ((! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
13147
                         && ! find_regno_fusage (place, USE, i))
13148
                        || dead_or_set_regno_p (place, i))
13149
                      all_used = 0;
13150
 
13151
                  if (! all_used)
13152
                    {
13153
                      /* Put only REG_DEAD notes for pieces that are
13154
                         not already dead or set.  */
13155
 
13156
                      for (i = regno; i < endregno;
13157
                           i += hard_regno_nregs[i][reg_raw_mode[i]])
13158
                        {
13159
                          rtx piece = regno_reg_rtx[i];
13160
                          basic_block bb = this_basic_block;
13161
 
13162
                          if (! dead_or_set_p (place, piece)
13163
                              && ! reg_bitfield_target_p (piece,
13164
                                                          PATTERN (place)))
13165
                            {
13166
                              rtx new_note = alloc_reg_note (REG_DEAD, piece,
13167
                                                             NULL_RTX);
13168
 
13169
                              distribute_notes (new_note, place, place,
13170
                                                NULL_RTX, NULL_RTX, NULL_RTX);
13171
                            }
13172
                          else if (! refers_to_regno_p (i, i + 1,
13173
                                                        PATTERN (place), 0)
13174
                                   && ! find_regno_fusage (place, USE, i))
13175
                            for (tem = PREV_INSN (place); ;
13176
                                 tem = PREV_INSN (tem))
13177
                              {
13178
                                if (!NONDEBUG_INSN_P (tem))
13179
                                  {
13180
                                    if (tem == BB_HEAD (bb))
13181
                                      break;
13182
                                    continue;
13183
                                  }
13184
                                if (dead_or_set_p (tem, piece)
13185
                                    || reg_bitfield_target_p (piece,
13186
                                                              PATTERN (tem)))
13187
                                  {
13188
                                    add_reg_note (tem, REG_UNUSED, piece);
13189
                                    break;
13190
                                  }
13191
                              }
13192
 
13193
                        }
13194
 
13195
                      place = 0;
13196
                    }
13197
                }
13198
            }
13199
          break;
13200
 
13201
        default:
13202
          /* Any other notes should not be present at this point in the
13203
             compilation.  */
13204
          gcc_unreachable ();
13205
        }
13206
 
13207
      if (place)
13208
        {
13209
          XEXP (note, 1) = REG_NOTES (place);
13210
          REG_NOTES (place) = note;
13211
        }
13212
 
13213
      if (place2)
13214
        add_reg_note (place2, REG_NOTE_KIND (note), XEXP (note, 0));
13215
    }
13216
}
13217
 
13218
/* Similarly to above, distribute the LOG_LINKS that used to be present on
13219
   I3, I2, and I1 to new locations.  This is also called to add a link
13220
   pointing at I3 when I3's destination is changed.  */
13221
 
13222
static void
13223
distribute_links (rtx links)
13224
{
13225
  rtx link, next_link;
13226
 
13227
  for (link = links; link; link = next_link)
13228
    {
13229
      rtx place = 0;
13230
      rtx insn;
13231
      rtx set, reg;
13232
 
13233
      next_link = XEXP (link, 1);
13234
 
13235
      /* If the insn that this link points to is a NOTE or isn't a single
13236
         set, ignore it.  In the latter case, it isn't clear what we
13237
         can do other than ignore the link, since we can't tell which
13238
         register it was for.  Such links wouldn't be used by combine
13239
         anyway.
13240
 
13241
         It is not possible for the destination of the target of the link to
13242
         have been changed by combine.  The only potential of this is if we
13243
         replace I3, I2, and I1 by I3 and I2.  But in that case the
13244
         destination of I2 also remains unchanged.  */
13245
 
13246
      if (NOTE_P (XEXP (link, 0))
13247
          || (set = single_set (XEXP (link, 0))) == 0)
13248
        continue;
13249
 
13250
      reg = SET_DEST (set);
13251
      while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
13252
             || GET_CODE (reg) == STRICT_LOW_PART)
13253
        reg = XEXP (reg, 0);
13254
 
13255
      /* A LOG_LINK is defined as being placed on the first insn that uses
13256
         a register and points to the insn that sets the register.  Start
13257
         searching at the next insn after the target of the link and stop
13258
         when we reach a set of the register or the end of the basic block.
13259
 
13260
         Note that this correctly handles the link that used to point from
13261
         I3 to I2.  Also note that not much searching is typically done here
13262
         since most links don't point very far away.  */
13263
 
13264
      for (insn = NEXT_INSN (XEXP (link, 0));
13265
           (insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR
13266
                     || BB_HEAD (this_basic_block->next_bb) != insn));
13267
           insn = NEXT_INSN (insn))
13268
        if (DEBUG_INSN_P (insn))
13269
          continue;
13270
        else if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
13271
          {
13272
            if (reg_referenced_p (reg, PATTERN (insn)))
13273
              place = insn;
13274
            break;
13275
          }
13276
        else if (CALL_P (insn)
13277
                 && find_reg_fusage (insn, USE, reg))
13278
          {
13279
            place = insn;
13280
            break;
13281
          }
13282
        else if (INSN_P (insn) && reg_set_p (reg, insn))
13283
          break;
13284
 
13285
      /* If we found a place to put the link, place it there unless there
13286
         is already a link to the same insn as LINK at that point.  */
13287
 
13288
      if (place)
13289
        {
13290
          rtx link2;
13291
 
13292
          for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
13293
            if (XEXP (link2, 0) == XEXP (link, 0))
13294
              break;
13295
 
13296
          if (link2 == 0)
13297
            {
13298
              XEXP (link, 1) = LOG_LINKS (place);
13299
              LOG_LINKS (place) = link;
13300
 
13301
              /* Set added_links_insn to the earliest insn we added a
13302
                 link to.  */
13303
              if (added_links_insn == 0
13304
                  || DF_INSN_LUID (added_links_insn) > DF_INSN_LUID (place))
13305
                added_links_insn = place;
13306
            }
13307
        }
13308
    }
13309
}
13310
 
13311
/* Subroutine of unmentioned_reg_p and callback from for_each_rtx.
13312
   Check whether the expression pointer to by LOC is a register or
13313
   memory, and if so return 1 if it isn't mentioned in the rtx EXPR.
13314
   Otherwise return zero.  */
13315
 
13316
static int
13317
unmentioned_reg_p_1 (rtx *loc, void *expr)
13318
{
13319
  rtx x = *loc;
13320
 
13321
  if (x != NULL_RTX
13322
      && (REG_P (x) || MEM_P (x))
13323
      && ! reg_mentioned_p (x, (rtx) expr))
13324
    return 1;
13325
  return 0;
13326
}
13327
 
13328
/* Check for any register or memory mentioned in EQUIV that is not
13329
   mentioned in EXPR.  This is used to restrict EQUIV to "specializations"
13330
   of EXPR where some registers may have been replaced by constants.  */
13331
 
13332
static bool
13333
unmentioned_reg_p (rtx equiv, rtx expr)
13334
{
13335
  return for_each_rtx (&equiv, unmentioned_reg_p_1, expr);
13336
}
13337
 
13338
void
13339
dump_combine_stats (FILE *file)
13340
{
13341
  fprintf
13342
    (file,
13343
     ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
13344
     combine_attempts, combine_merges, combine_extras, combine_successes);
13345
}
13346
 
13347
void
13348
dump_combine_total_stats (FILE *file)
13349
{
13350
  fprintf
13351
    (file,
13352
     "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
13353
     total_attempts, total_merges, total_extras, total_successes);
13354
}
13355
 
13356
static bool
13357
gate_handle_combine (void)
13358
{
13359
  return (optimize > 0);
13360
}
13361
 
13362
/* Try combining insns through substitution.  */
13363
static unsigned int
13364
rest_of_handle_combine (void)
13365
{
13366
  int rebuild_jump_labels_after_combine;
13367
 
13368
  df_set_flags (DF_LR_RUN_DCE + DF_DEFER_INSN_RESCAN);
13369
  df_note_add_problem ();
13370
  df_analyze ();
13371
 
13372
  regstat_init_n_sets_and_refs ();
13373
 
13374
  rebuild_jump_labels_after_combine
13375
    = combine_instructions (get_insns (), max_reg_num ());
13376
 
13377
  /* Combining insns may have turned an indirect jump into a
13378
     direct jump.  Rebuild the JUMP_LABEL fields of jumping
13379
     instructions.  */
13380
  if (rebuild_jump_labels_after_combine)
13381
    {
13382
      timevar_push (TV_JUMP);
13383
      rebuild_jump_labels (get_insns ());
13384
      cleanup_cfg (0);
13385
      timevar_pop (TV_JUMP);
13386
    }
13387
 
13388
  regstat_free_n_sets_and_refs ();
13389
  return 0;
13390
}
13391
 
13392
struct rtl_opt_pass pass_combine =
13393
{
13394
 {
13395
  RTL_PASS,
13396
  "combine",                            /* name */
13397
  gate_handle_combine,                  /* gate */
13398
  rest_of_handle_combine,               /* execute */
13399
  NULL,                                 /* sub */
13400
  NULL,                                 /* next */
13401
  0,                                    /* static_pass_number */
13402
  TV_COMBINE,                           /* tv_id */
13403
  PROP_cfglayout,                       /* properties_required */
13404
  0,                                    /* properties_provided */
13405
  0,                                    /* properties_destroyed */
13406
  0,                                    /* todo_flags_start */
13407
  TODO_dump_func |
13408
  TODO_df_finish | TODO_verify_rtl_sharing |
13409
  TODO_ggc_collect,                     /* todo_flags_finish */
13410
 }
13411
};

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.