OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-stable/] [gcc-4.5.1/] [gcc/] [combine.c] - Blame information for rev 847

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 280 jeremybenn
/* Optimize by combining instructions for GNU compiler.
2
   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3
   1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4
   Free Software Foundation, Inc.
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify it under
9
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
12
 
13
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14
WARRANTY; without even the implied warranty of MERCHANTABILITY or
15
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16
for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
/* This module is essentially the "combiner" phase of the U. of Arizona
23
   Portable Optimizer, but redone to work on our list-structured
24
   representation for RTL instead of their string representation.
25
 
26
   The LOG_LINKS of each insn identify the most recent assignment
27
   to each REG used in the insn.  It is a list of previous insns,
28
   each of which contains a SET for a REG that is used in this insn
29
   and not used or set in between.  LOG_LINKs never cross basic blocks.
30
   They were set up by the preceding pass (lifetime analysis).
31
 
32
   We try to combine each pair of insns joined by a logical link.
33
   We also try to combine triples of insns A, B and C when
34
   C has a link back to B and B has a link back to A.
35
 
36
   LOG_LINKS does not have links for use of the CC0.  They don't
37
   need to, because the insn that sets the CC0 is always immediately
38
   before the insn that tests it.  So we always regard a branch
39
   insn as having a logical link to the preceding insn.  The same is true
40
   for an insn explicitly using CC0.
41
 
42
   We check (with use_crosses_set_p) to avoid combining in such a way
43
   as to move a computation to a place where its value would be different.
44
 
45
   Combination is done by mathematically substituting the previous
46
   insn(s) values for the regs they set into the expressions in
47
   the later insns that refer to these regs.  If the result is a valid insn
48
   for our target machine, according to the machine description,
49
   we install it, delete the earlier insns, and update the data flow
50
   information (LOG_LINKS and REG_NOTES) for what we did.
51
 
52
   There are a few exceptions where the dataflow information isn't
53
   completely updated (however this is only a local issue since it is
54
   regenerated before the next pass that uses it):
55
 
56
   - reg_live_length is not updated
57
   - reg_n_refs is not adjusted in the rare case when a register is
58
     no longer required in a computation
59
   - there are extremely rare cases (see distribute_notes) when a
60
     REG_DEAD note is lost
61
   - a LOG_LINKS entry that refers to an insn with multiple SETs may be
62
     removed because there is no way to know which register it was
63
     linking
64
 
65
   To simplify substitution, we combine only when the earlier insn(s)
66
   consist of only a single assignment.  To simplify updating afterward,
67
   we never combine when a subroutine call appears in the middle.
68
 
69
   Since we do not represent assignments to CC0 explicitly except when that
70
   is all an insn does, there is no LOG_LINKS entry in an insn that uses
71
   the condition code for the insn that set the condition code.
72
   Fortunately, these two insns must be consecutive.
73
   Therefore, every JUMP_INSN is taken to have an implicit logical link
74
   to the preceding insn.  This is not quite right, since non-jumps can
75
   also use the condition code; but in practice such insns would not
76
   combine anyway.  */
77
 
78
#include "config.h"
79
#include "system.h"
80
#include "coretypes.h"
81
#include "tm.h"
82
#include "rtl.h"
83
#include "tree.h"
84
#include "tm_p.h"
85
#include "flags.h"
86
#include "regs.h"
87
#include "hard-reg-set.h"
88
#include "basic-block.h"
89
#include "insn-config.h"
90
#include "function.h"
91
/* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
92
#include "expr.h"
93
#include "insn-attr.h"
94
#include "recog.h"
95
#include "real.h"
96
#include "toplev.h"
97
#include "target.h"
98
#include "optabs.h"
99
#include "insn-codes.h"
100
#include "rtlhooks-def.h"
101
/* Include output.h for dump_file.  */
102
#include "output.h"
103
#include "params.h"
104
#include "timevar.h"
105
#include "tree-pass.h"
106
#include "df.h"
107
#include "cgraph.h"
108
 
109
/* Number of attempts to combine instructions in this function.  */
110
 
111
static int combine_attempts;
112
 
113
/* Number of attempts that got as far as substitution in this function.  */
114
 
115
static int combine_merges;
116
 
117
/* Number of instructions combined with added SETs in this function.  */
118
 
119
static int combine_extras;
120
 
121
/* Number of instructions combined in this function.  */
122
 
123
static int combine_successes;
124
 
125
/* Totals over entire compilation.  */
126
 
127
static int total_attempts, total_merges, total_extras, total_successes;
128
 
129
/* combine_instructions may try to replace the right hand side of the
130
   second instruction with the value of an associated REG_EQUAL note
131
   before throwing it at try_combine.  That is problematic when there
132
   is a REG_DEAD note for a register used in the old right hand side
133
   and can cause distribute_notes to do wrong things.  This is the
134
   second instruction if it has been so modified, null otherwise.  */
135
 
136
static rtx i2mod;
137
 
138
/* When I2MOD is nonnull, this is a copy of the old right hand side.  */
139
 
140
static rtx i2mod_old_rhs;
141
 
142
/* When I2MOD is nonnull, this is a copy of the new right hand side.  */
143
 
144
static rtx i2mod_new_rhs;
145
 
146
typedef struct reg_stat_struct {
147
  /* Record last point of death of (hard or pseudo) register n.  */
148
  rtx                           last_death;
149
 
150
  /* Record last point of modification of (hard or pseudo) register n.  */
151
  rtx                           last_set;
152
 
153
  /* The next group of fields allows the recording of the last value assigned
154
     to (hard or pseudo) register n.  We use this information to see if an
155
     operation being processed is redundant given a prior operation performed
156
     on the register.  For example, an `and' with a constant is redundant if
157
     all the zero bits are already known to be turned off.
158
 
159
     We use an approach similar to that used by cse, but change it in the
160
     following ways:
161
 
162
     (1) We do not want to reinitialize at each label.
163
     (2) It is useful, but not critical, to know the actual value assigned
164
         to a register.  Often just its form is helpful.
165
 
166
     Therefore, we maintain the following fields:
167
 
168
     last_set_value             the last value assigned
169
     last_set_label             records the value of label_tick when the
170
                                register was assigned
171
     last_set_table_tick        records the value of label_tick when a
172
                                value using the register is assigned
173
     last_set_invalid           set to nonzero when it is not valid
174
                                to use the value of this register in some
175
                                register's value
176
 
177
     To understand the usage of these tables, it is important to understand
178
     the distinction between the value in last_set_value being valid and
179
     the register being validly contained in some other expression in the
180
     table.
181
 
182
     (The next two parameters are out of date).
183
 
184
     reg_stat[i].last_set_value is valid if it is nonzero, and either
185
     reg_n_sets[i] is 1 or reg_stat[i].last_set_label == label_tick.
186
 
187
     Register I may validly appear in any expression returned for the value
188
     of another register if reg_n_sets[i] is 1.  It may also appear in the
189
     value for register J if reg_stat[j].last_set_invalid is zero, or
190
     reg_stat[i].last_set_label < reg_stat[j].last_set_label.
191
 
192
     If an expression is found in the table containing a register which may
193
     not validly appear in an expression, the register is replaced by
194
     something that won't match, (clobber (const_int 0)).  */
195
 
196
  /* Record last value assigned to (hard or pseudo) register n.  */
197
 
198
  rtx                           last_set_value;
199
 
200
  /* Record the value of label_tick when an expression involving register n
201
     is placed in last_set_value.  */
202
 
203
  int                           last_set_table_tick;
204
 
205
  /* Record the value of label_tick when the value for register n is placed in
206
     last_set_value.  */
207
 
208
  int                           last_set_label;
209
 
210
  /* These fields are maintained in parallel with last_set_value and are
211
     used to store the mode in which the register was last set, the bits
212
     that were known to be zero when it was last set, and the number of
213
     sign bits copies it was known to have when it was last set.  */
214
 
215
  unsigned HOST_WIDE_INT        last_set_nonzero_bits;
216
  char                          last_set_sign_bit_copies;
217
  ENUM_BITFIELD(machine_mode)   last_set_mode : 8;
218
 
219
  /* Set nonzero if references to register n in expressions should not be
220
     used.  last_set_invalid is set nonzero when this register is being
221
     assigned to and last_set_table_tick == label_tick.  */
222
 
223
  char                          last_set_invalid;
224
 
225
  /* Some registers that are set more than once and used in more than one
226
     basic block are nevertheless always set in similar ways.  For example,
227
     a QImode register may be loaded from memory in two places on a machine
228
     where byte loads zero extend.
229
 
230
     We record in the following fields if a register has some leading bits
231
     that are always equal to the sign bit, and what we know about the
232
     nonzero bits of a register, specifically which bits are known to be
233
     zero.
234
 
235
     If an entry is zero, it means that we don't know anything special.  */
236
 
237
  unsigned char                 sign_bit_copies;
238
 
239
  unsigned HOST_WIDE_INT        nonzero_bits;
240
 
241
  /* Record the value of the label_tick when the last truncation
242
     happened.  The field truncated_to_mode is only valid if
243
     truncation_label == label_tick.  */
244
 
245
  int                           truncation_label;
246
 
247
  /* Record the last truncation seen for this register.  If truncation
248
     is not a nop to this mode we might be able to save an explicit
249
     truncation if we know that value already contains a truncated
250
     value.  */
251
 
252
  ENUM_BITFIELD(machine_mode)   truncated_to_mode : 8;
253
} reg_stat_type;
254
 
255
DEF_VEC_O(reg_stat_type);
256
DEF_VEC_ALLOC_O(reg_stat_type,heap);
257
 
258
static VEC(reg_stat_type,heap) *reg_stat;
259
 
260
/* Record the luid of the last insn that invalidated memory
261
   (anything that writes memory, and subroutine calls, but not pushes).  */
262
 
263
static int mem_last_set;
264
 
265
/* Record the luid of the last CALL_INSN
266
   so we can tell whether a potential combination crosses any calls.  */
267
 
268
static int last_call_luid;
269
 
270
/* When `subst' is called, this is the insn that is being modified
271
   (by combining in a previous insn).  The PATTERN of this insn
272
   is still the old pattern partially modified and it should not be
273
   looked at, but this may be used to examine the successors of the insn
274
   to judge whether a simplification is valid.  */
275
 
276
static rtx subst_insn;
277
 
278
/* This is the lowest LUID that `subst' is currently dealing with.
279
   get_last_value will not return a value if the register was set at or
280
   after this LUID.  If not for this mechanism, we could get confused if
281
   I2 or I1 in try_combine were an insn that used the old value of a register
282
   to obtain a new value.  In that case, we might erroneously get the
283
   new value of the register when we wanted the old one.  */
284
 
285
static int subst_low_luid;
286
 
287
/* This contains any hard registers that are used in newpat; reg_dead_at_p
288
   must consider all these registers to be always live.  */
289
 
290
static HARD_REG_SET newpat_used_regs;
291
 
292
/* This is an insn to which a LOG_LINKS entry has been added.  If this
293
   insn is the earlier than I2 or I3, combine should rescan starting at
294
   that location.  */
295
 
296
static rtx added_links_insn;
297
 
298
/* Basic block in which we are performing combines.  */
299
static basic_block this_basic_block;
300
static bool optimize_this_for_speed_p;
301
 
302
 
303
/* Length of the currently allocated uid_insn_cost array.  */
304
 
305
static int max_uid_known;
306
 
307
/* The following array records the insn_rtx_cost for every insn
308
   in the instruction stream.  */
309
 
310
static int *uid_insn_cost;
311
 
312
/* The following array records the LOG_LINKS for every insn in the
313
   instruction stream as an INSN_LIST rtx.  */
314
 
315
static rtx *uid_log_links;
316
 
317
#define INSN_COST(INSN)         (uid_insn_cost[INSN_UID (INSN)])
318
#define LOG_LINKS(INSN)         (uid_log_links[INSN_UID (INSN)])
319
 
320
/* Incremented for each basic block.  */
321
 
322
static int label_tick;
323
 
324
/* Reset to label_tick for each extended basic block in scanning order.  */
325
 
326
static int label_tick_ebb_start;
327
 
328
/* Mode used to compute significance in reg_stat[].nonzero_bits.  It is the
329
   largest integer mode that can fit in HOST_BITS_PER_WIDE_INT.  */
330
 
331
static enum machine_mode nonzero_bits_mode;
332
 
333
/* Nonzero when reg_stat[].nonzero_bits and reg_stat[].sign_bit_copies can
334
   be safely used.  It is zero while computing them and after combine has
335
   completed.  This former test prevents propagating values based on
336
   previously set values, which can be incorrect if a variable is modified
337
   in a loop.  */
338
 
339
static int nonzero_sign_valid;
340
 
341
 
342
/* Record one modification to rtl structure
343
   to be undone by storing old_contents into *where.  */
344
 
345
enum undo_kind { UNDO_RTX, UNDO_INT, UNDO_MODE };
346
 
347
struct undo
348
{
349
  struct undo *next;
350
  enum undo_kind kind;
351
  union { rtx r; int i; enum machine_mode m; } old_contents;
352
  union { rtx *r; int *i; } where;
353
};
354
 
355
/* Record a bunch of changes to be undone, up to MAX_UNDO of them.
356
   num_undo says how many are currently recorded.
357
 
358
   other_insn is nonzero if we have modified some other insn in the process
359
   of working on subst_insn.  It must be verified too.  */
360
 
361
struct undobuf
362
{
363
  struct undo *undos;
364
  struct undo *frees;
365
  rtx other_insn;
366
};
367
 
368
static struct undobuf undobuf;
369
 
370
/* Number of times the pseudo being substituted for
371
   was found and replaced.  */
372
 
373
static int n_occurrences;
374
 
375
static rtx reg_nonzero_bits_for_combine (const_rtx, enum machine_mode, const_rtx,
376
                                         enum machine_mode,
377
                                         unsigned HOST_WIDE_INT,
378
                                         unsigned HOST_WIDE_INT *);
379
static rtx reg_num_sign_bit_copies_for_combine (const_rtx, enum machine_mode, const_rtx,
380
                                                enum machine_mode,
381
                                                unsigned int, unsigned int *);
382
static void do_SUBST (rtx *, rtx);
383
static void do_SUBST_INT (int *, int);
384
static void init_reg_last (void);
385
static void setup_incoming_promotions (rtx);
386
static void set_nonzero_bits_and_sign_copies (rtx, const_rtx, void *);
387
static int cant_combine_insn_p (rtx);
388
static int can_combine_p (rtx, rtx, rtx, rtx, rtx *, rtx *);
389
static int combinable_i3pat (rtx, rtx *, rtx, rtx, int, rtx *);
390
static int contains_muldiv (rtx);
391
static rtx try_combine (rtx, rtx, rtx, int *);
392
static void undo_all (void);
393
static void undo_commit (void);
394
static rtx *find_split_point (rtx *, rtx);
395
static rtx subst (rtx, rtx, rtx, int, int);
396
static rtx combine_simplify_rtx (rtx, enum machine_mode, int);
397
static rtx simplify_if_then_else (rtx);
398
static rtx simplify_set (rtx);
399
static rtx simplify_logical (rtx);
400
static rtx expand_compound_operation (rtx);
401
static const_rtx expand_field_assignment (const_rtx);
402
static rtx make_extraction (enum machine_mode, rtx, HOST_WIDE_INT,
403
                            rtx, unsigned HOST_WIDE_INT, int, int, int);
404
static rtx extract_left_shift (rtx, int);
405
static rtx make_compound_operation (rtx, enum rtx_code);
406
static int get_pos_from_mask (unsigned HOST_WIDE_INT,
407
                              unsigned HOST_WIDE_INT *);
408
static rtx canon_reg_for_combine (rtx, rtx);
409
static rtx force_to_mode (rtx, enum machine_mode,
410
                          unsigned HOST_WIDE_INT, int);
411
static rtx if_then_else_cond (rtx, rtx *, rtx *);
412
static rtx known_cond (rtx, enum rtx_code, rtx, rtx);
413
static int rtx_equal_for_field_assignment_p (rtx, rtx);
414
static rtx make_field_assignment (rtx);
415
static rtx apply_distributive_law (rtx);
416
static rtx distribute_and_simplify_rtx (rtx, int);
417
static rtx simplify_and_const_int_1 (enum machine_mode, rtx,
418
                                     unsigned HOST_WIDE_INT);
419
static rtx simplify_and_const_int (rtx, enum machine_mode, rtx,
420
                                   unsigned HOST_WIDE_INT);
421
static int merge_outer_ops (enum rtx_code *, HOST_WIDE_INT *, enum rtx_code,
422
                            HOST_WIDE_INT, enum machine_mode, int *);
423
static rtx simplify_shift_const_1 (enum rtx_code, enum machine_mode, rtx, int);
424
static rtx simplify_shift_const (rtx, enum rtx_code, enum machine_mode, rtx,
425
                                 int);
426
static int recog_for_combine (rtx *, rtx, rtx *);
427
static rtx gen_lowpart_for_combine (enum machine_mode, rtx);
428
static enum rtx_code simplify_comparison (enum rtx_code, rtx *, rtx *);
429
static void update_table_tick (rtx);
430
static void record_value_for_reg (rtx, rtx, rtx);
431
static void check_promoted_subreg (rtx, rtx);
432
static void record_dead_and_set_regs_1 (rtx, const_rtx, void *);
433
static void record_dead_and_set_regs (rtx);
434
static int get_last_value_validate (rtx *, rtx, int, int);
435
static rtx get_last_value (const_rtx);
436
static int use_crosses_set_p (const_rtx, int);
437
static void reg_dead_at_p_1 (rtx, const_rtx, void *);
438
static int reg_dead_at_p (rtx, rtx);
439
static void move_deaths (rtx, rtx, int, rtx, rtx *);
440
static int reg_bitfield_target_p (rtx, rtx);
441
static void distribute_notes (rtx, rtx, rtx, rtx, rtx, rtx);
442
static void distribute_links (rtx);
443
static void mark_used_regs_combine (rtx);
444
static void record_promoted_value (rtx, rtx);
445
static int unmentioned_reg_p_1 (rtx *, void *);
446
static bool unmentioned_reg_p (rtx, rtx);
447
static int record_truncated_value (rtx *, void *);
448
static void record_truncated_values (rtx *, void *);
449
static bool reg_truncated_to_mode (enum machine_mode, const_rtx);
450
static rtx gen_lowpart_or_truncate (enum machine_mode, rtx);
451
 
452
 
453
/* It is not safe to use ordinary gen_lowpart in combine.
454
   See comments in gen_lowpart_for_combine.  */
455
#undef RTL_HOOKS_GEN_LOWPART
456
#define RTL_HOOKS_GEN_LOWPART              gen_lowpart_for_combine
457
 
458
/* Our implementation of gen_lowpart never emits a new pseudo.  */
459
#undef RTL_HOOKS_GEN_LOWPART_NO_EMIT
460
#define RTL_HOOKS_GEN_LOWPART_NO_EMIT      gen_lowpart_for_combine
461
 
462
#undef RTL_HOOKS_REG_NONZERO_REG_BITS
463
#define RTL_HOOKS_REG_NONZERO_REG_BITS     reg_nonzero_bits_for_combine
464
 
465
#undef RTL_HOOKS_REG_NUM_SIGN_BIT_COPIES
466
#define RTL_HOOKS_REG_NUM_SIGN_BIT_COPIES  reg_num_sign_bit_copies_for_combine
467
 
468
#undef RTL_HOOKS_REG_TRUNCATED_TO_MODE
469
#define RTL_HOOKS_REG_TRUNCATED_TO_MODE    reg_truncated_to_mode
470
 
471
static const struct rtl_hooks combine_rtl_hooks = RTL_HOOKS_INITIALIZER;
472
 
473
 
474
/* Try to split PATTERN found in INSN.  This returns NULL_RTX if
475
   PATTERN can not be split.  Otherwise, it returns an insn sequence.
476
   This is a wrapper around split_insns which ensures that the
477
   reg_stat vector is made larger if the splitter creates a new
478
   register.  */
479
 
480
static rtx
481
combine_split_insns (rtx pattern, rtx insn)
482
{
483
  rtx ret;
484
  unsigned int nregs;
485
 
486
  ret = split_insns (pattern, insn);
487
  nregs = max_reg_num ();
488
  if (nregs > VEC_length (reg_stat_type, reg_stat))
489
    VEC_safe_grow_cleared (reg_stat_type, heap, reg_stat, nregs);
490
  return ret;
491
}
492
 
493
/* This is used by find_single_use to locate an rtx in LOC that
494
   contains exactly one use of DEST, which is typically either a REG
495
   or CC0.  It returns a pointer to the innermost rtx expression
496
   containing DEST.  Appearances of DEST that are being used to
497
   totally replace it are not counted.  */
498
 
499
static rtx *
500
find_single_use_1 (rtx dest, rtx *loc)
501
{
502
  rtx x = *loc;
503
  enum rtx_code code = GET_CODE (x);
504
  rtx *result = NULL;
505
  rtx *this_result;
506
  int i;
507
  const char *fmt;
508
 
509
  switch (code)
510
    {
511
    case CONST_INT:
512
    case CONST:
513
    case LABEL_REF:
514
    case SYMBOL_REF:
515
    case CONST_DOUBLE:
516
    case CONST_VECTOR:
517
    case CLOBBER:
518
      return 0;
519
 
520
    case SET:
521
      /* If the destination is anything other than CC0, PC, a REG or a SUBREG
522
         of a REG that occupies all of the REG, the insn uses DEST if
523
         it is mentioned in the destination or the source.  Otherwise, we
524
         need just check the source.  */
525
      if (GET_CODE (SET_DEST (x)) != CC0
526
          && GET_CODE (SET_DEST (x)) != PC
527
          && !REG_P (SET_DEST (x))
528
          && ! (GET_CODE (SET_DEST (x)) == SUBREG
529
                && REG_P (SUBREG_REG (SET_DEST (x)))
530
                && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
531
                      + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
532
                    == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
533
                         + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
534
        break;
535
 
536
      return find_single_use_1 (dest, &SET_SRC (x));
537
 
538
    case MEM:
539
    case SUBREG:
540
      return find_single_use_1 (dest, &XEXP (x, 0));
541
 
542
    default:
543
      break;
544
    }
545
 
546
  /* If it wasn't one of the common cases above, check each expression and
547
     vector of this code.  Look for a unique usage of DEST.  */
548
 
549
  fmt = GET_RTX_FORMAT (code);
550
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
551
    {
552
      if (fmt[i] == 'e')
553
        {
554
          if (dest == XEXP (x, i)
555
              || (REG_P (dest) && REG_P (XEXP (x, i))
556
                  && REGNO (dest) == REGNO (XEXP (x, i))))
557
            this_result = loc;
558
          else
559
            this_result = find_single_use_1 (dest, &XEXP (x, i));
560
 
561
          if (result == NULL)
562
            result = this_result;
563
          else if (this_result)
564
            /* Duplicate usage.  */
565
            return NULL;
566
        }
567
      else if (fmt[i] == 'E')
568
        {
569
          int j;
570
 
571
          for (j = XVECLEN (x, i) - 1; j >= 0; j--)
572
            {
573
              if (XVECEXP (x, i, j) == dest
574
                  || (REG_P (dest)
575
                      && REG_P (XVECEXP (x, i, j))
576
                      && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
577
                this_result = loc;
578
              else
579
                this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
580
 
581
              if (result == NULL)
582
                result = this_result;
583
              else if (this_result)
584
                return NULL;
585
            }
586
        }
587
    }
588
 
589
  return result;
590
}
591
 
592
 
593
/* See if DEST, produced in INSN, is used only a single time in the
594
   sequel.  If so, return a pointer to the innermost rtx expression in which
595
   it is used.
596
 
597
   If PLOC is nonzero, *PLOC is set to the insn containing the single use.
598
 
599
   If DEST is cc0_rtx, we look only at the next insn.  In that case, we don't
600
   care about REG_DEAD notes or LOG_LINKS.
601
 
602
   Otherwise, we find the single use by finding an insn that has a
603
   LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST.  If DEST is
604
   only referenced once in that insn, we know that it must be the first
605
   and last insn referencing DEST.  */
606
 
607
static rtx *
608
find_single_use (rtx dest, rtx insn, rtx *ploc)
609
{
610
  basic_block bb;
611
  rtx next;
612
  rtx *result;
613
  rtx link;
614
 
615
#ifdef HAVE_cc0
616
  if (dest == cc0_rtx)
617
    {
618
      next = NEXT_INSN (insn);
619
      if (next == 0
620
          || (!NONJUMP_INSN_P (next) && !JUMP_P (next)))
621
        return 0;
622
 
623
      result = find_single_use_1 (dest, &PATTERN (next));
624
      if (result && ploc)
625
        *ploc = next;
626
      return result;
627
    }
628
#endif
629
 
630
  if (!REG_P (dest))
631
    return 0;
632
 
633
  bb = BLOCK_FOR_INSN (insn);
634
  for (next = NEXT_INSN (insn);
635
       next && BLOCK_FOR_INSN (next) == bb;
636
       next = NEXT_INSN (next))
637
    if (INSN_P (next) && dead_or_set_p (next, dest))
638
      {
639
        for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
640
          if (XEXP (link, 0) == insn)
641
            break;
642
 
643
        if (link)
644
          {
645
            result = find_single_use_1 (dest, &PATTERN (next));
646
            if (ploc)
647
              *ploc = next;
648
            return result;
649
          }
650
      }
651
 
652
  return 0;
653
}
654
 
655
/* Substitute NEWVAL, an rtx expression, into INTO, a place in some
656
   insn.  The substitution can be undone by undo_all.  If INTO is already
657
   set to NEWVAL, do not record this change.  Because computing NEWVAL might
658
   also call SUBST, we have to compute it before we put anything into
659
   the undo table.  */
660
 
661
static void
662
do_SUBST (rtx *into, rtx newval)
663
{
664
  struct undo *buf;
665
  rtx oldval = *into;
666
 
667
  if (oldval == newval)
668
    return;
669
 
670
  /* We'd like to catch as many invalid transformations here as
671
     possible.  Unfortunately, there are way too many mode changes
672
     that are perfectly valid, so we'd waste too much effort for
673
     little gain doing the checks here.  Focus on catching invalid
674
     transformations involving integer constants.  */
675
  if (GET_MODE_CLASS (GET_MODE (oldval)) == MODE_INT
676
      && CONST_INT_P (newval))
677
    {
678
      /* Sanity check that we're replacing oldval with a CONST_INT
679
         that is a valid sign-extension for the original mode.  */
680
      gcc_assert (INTVAL (newval)
681
                  == trunc_int_for_mode (INTVAL (newval), GET_MODE (oldval)));
682
 
683
      /* Replacing the operand of a SUBREG or a ZERO_EXTEND with a
684
         CONST_INT is not valid, because after the replacement, the
685
         original mode would be gone.  Unfortunately, we can't tell
686
         when do_SUBST is called to replace the operand thereof, so we
687
         perform this test on oldval instead, checking whether an
688
         invalid replacement took place before we got here.  */
689
      gcc_assert (!(GET_CODE (oldval) == SUBREG
690
                    && CONST_INT_P (SUBREG_REG (oldval))));
691
      gcc_assert (!(GET_CODE (oldval) == ZERO_EXTEND
692
                    && CONST_INT_P (XEXP (oldval, 0))));
693
    }
694
 
695
  if (undobuf.frees)
696
    buf = undobuf.frees, undobuf.frees = buf->next;
697
  else
698
    buf = XNEW (struct undo);
699
 
700
  buf->kind = UNDO_RTX;
701
  buf->where.r = into;
702
  buf->old_contents.r = oldval;
703
  *into = newval;
704
 
705
  buf->next = undobuf.undos, undobuf.undos = buf;
706
}
707
 
708
#define SUBST(INTO, NEWVAL)     do_SUBST(&(INTO), (NEWVAL))
709
 
710
/* Similar to SUBST, but NEWVAL is an int expression.  Note that substitution
711
   for the value of a HOST_WIDE_INT value (including CONST_INT) is
712
   not safe.  */
713
 
714
static void
715
do_SUBST_INT (int *into, int newval)
716
{
717
  struct undo *buf;
718
  int oldval = *into;
719
 
720
  if (oldval == newval)
721
    return;
722
 
723
  if (undobuf.frees)
724
    buf = undobuf.frees, undobuf.frees = buf->next;
725
  else
726
    buf = XNEW (struct undo);
727
 
728
  buf->kind = UNDO_INT;
729
  buf->where.i = into;
730
  buf->old_contents.i = oldval;
731
  *into = newval;
732
 
733
  buf->next = undobuf.undos, undobuf.undos = buf;
734
}
735
 
736
#define SUBST_INT(INTO, NEWVAL)  do_SUBST_INT(&(INTO), (NEWVAL))
737
 
738
/* Similar to SUBST, but just substitute the mode.  This is used when
739
   changing the mode of a pseudo-register, so that any other
740
   references to the entry in the regno_reg_rtx array will change as
741
   well.  */
742
 
743
static void
744
do_SUBST_MODE (rtx *into, enum machine_mode newval)
745
{
746
  struct undo *buf;
747
  enum machine_mode oldval = GET_MODE (*into);
748
 
749
  if (oldval == newval)
750
    return;
751
 
752
  if (undobuf.frees)
753
    buf = undobuf.frees, undobuf.frees = buf->next;
754
  else
755
    buf = XNEW (struct undo);
756
 
757
  buf->kind = UNDO_MODE;
758
  buf->where.r = into;
759
  buf->old_contents.m = oldval;
760
  adjust_reg_mode (*into, newval);
761
 
762
  buf->next = undobuf.undos, undobuf.undos = buf;
763
}
764
 
765
#define SUBST_MODE(INTO, NEWVAL)  do_SUBST_MODE(&(INTO), (NEWVAL))
766
 
767
/* Subroutine of try_combine.  Determine whether the combine replacement
768
   patterns NEWPAT, NEWI2PAT and NEWOTHERPAT are cheaper according to
769
   insn_rtx_cost that the original instruction sequence I1, I2, I3 and
770
   undobuf.other_insn.  Note that I1 and/or NEWI2PAT may be NULL_RTX.
771
   NEWOTHERPAT and undobuf.other_insn may also both be NULL_RTX.  This
772
   function returns false, if the costs of all instructions can be
773
   estimated, and the replacements are more expensive than the original
774
   sequence.  */
775
 
776
static bool
777
combine_validate_cost (rtx i1, rtx i2, rtx i3, rtx newpat, rtx newi2pat,
778
                       rtx newotherpat)
779
{
780
  int i1_cost, i2_cost, i3_cost;
781
  int new_i2_cost, new_i3_cost;
782
  int old_cost, new_cost;
783
 
784
  /* Lookup the original insn_rtx_costs.  */
785
  i2_cost = INSN_COST (i2);
786
  i3_cost = INSN_COST (i3);
787
 
788
  if (i1)
789
    {
790
      i1_cost = INSN_COST (i1);
791
      old_cost = (i1_cost > 0 && i2_cost > 0 && i3_cost > 0)
792
                 ? i1_cost + i2_cost + i3_cost : 0;
793
    }
794
  else
795
    {
796
      old_cost = (i2_cost > 0 && i3_cost > 0) ? i2_cost + i3_cost : 0;
797
      i1_cost = 0;
798
    }
799
 
800
  /* Calculate the replacement insn_rtx_costs.  */
801
  new_i3_cost = insn_rtx_cost (newpat, optimize_this_for_speed_p);
802
  if (newi2pat)
803
    {
804
      new_i2_cost = insn_rtx_cost (newi2pat, optimize_this_for_speed_p);
805
      new_cost = (new_i2_cost > 0 && new_i3_cost > 0)
806
                 ? new_i2_cost + new_i3_cost : 0;
807
    }
808
  else
809
    {
810
      new_cost = new_i3_cost;
811
      new_i2_cost = 0;
812
    }
813
 
814
  if (undobuf.other_insn)
815
    {
816
      int old_other_cost, new_other_cost;
817
 
818
      old_other_cost = INSN_COST (undobuf.other_insn);
819
      new_other_cost = insn_rtx_cost (newotherpat, optimize_this_for_speed_p);
820
      if (old_other_cost > 0 && new_other_cost > 0)
821
        {
822
          old_cost += old_other_cost;
823
          new_cost += new_other_cost;
824
        }
825
      else
826
        old_cost = 0;
827
    }
828
 
829
  /* Disallow this recombination if both new_cost and old_cost are
830
     greater than zero, and new_cost is greater than old cost.  */
831
  if (old_cost > 0
832
      && new_cost > old_cost)
833
    {
834
      if (dump_file)
835
        {
836
          if (i1)
837
            {
838
              fprintf (dump_file,
839
                       "rejecting combination of insns %d, %d and %d\n",
840
                       INSN_UID (i1), INSN_UID (i2), INSN_UID (i3));
841
              fprintf (dump_file, "original costs %d + %d + %d = %d\n",
842
                       i1_cost, i2_cost, i3_cost, old_cost);
843
            }
844
          else
845
            {
846
              fprintf (dump_file,
847
                       "rejecting combination of insns %d and %d\n",
848
                       INSN_UID (i2), INSN_UID (i3));
849
              fprintf (dump_file, "original costs %d + %d = %d\n",
850
                       i2_cost, i3_cost, old_cost);
851
            }
852
 
853
          if (newi2pat)
854
            {
855
              fprintf (dump_file, "replacement costs %d + %d = %d\n",
856
                       new_i2_cost, new_i3_cost, new_cost);
857
            }
858
          else
859
            fprintf (dump_file, "replacement cost %d\n", new_cost);
860
        }
861
 
862
      return false;
863
    }
864
 
865
  /* Update the uid_insn_cost array with the replacement costs.  */
866
  INSN_COST (i2) = new_i2_cost;
867
  INSN_COST (i3) = new_i3_cost;
868
  if (i1)
869
    INSN_COST (i1) = 0;
870
 
871
  return true;
872
}
873
 
874
 
875
/* Delete any insns that copy a register to itself.  */
876
 
877
static void
878
delete_noop_moves (void)
879
{
880
  rtx insn, next;
881
  basic_block bb;
882
 
883
  FOR_EACH_BB (bb)
884
    {
885
      for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb)); insn = next)
886
        {
887
          next = NEXT_INSN (insn);
888
          if (INSN_P (insn) && noop_move_p (insn))
889
            {
890
              if (dump_file)
891
                fprintf (dump_file, "deleting noop move %d\n", INSN_UID (insn));
892
 
893
              delete_insn_and_edges (insn);
894
            }
895
        }
896
    }
897
}
898
 
899
 
900
/* Fill in log links field for all insns.  */
901
 
902
static void
903
create_log_links (void)
904
{
905
  basic_block bb;
906
  rtx *next_use, insn;
907
  df_ref *def_vec, *use_vec;
908
 
909
  next_use = XCNEWVEC (rtx, max_reg_num ());
910
 
911
  /* Pass through each block from the end, recording the uses of each
912
     register and establishing log links when def is encountered.
913
     Note that we do not clear next_use array in order to save time,
914
     so we have to test whether the use is in the same basic block as def.
915
 
916
     There are a few cases below when we do not consider the definition or
917
     usage -- these are taken from original flow.c did. Don't ask me why it is
918
     done this way; I don't know and if it works, I don't want to know.  */
919
 
920
  FOR_EACH_BB (bb)
921
    {
922
      FOR_BB_INSNS_REVERSE (bb, insn)
923
        {
924
          if (!NONDEBUG_INSN_P (insn))
925
            continue;
926
 
927
          /* Log links are created only once.  */
928
          gcc_assert (!LOG_LINKS (insn));
929
 
930
          for (def_vec = DF_INSN_DEFS (insn); *def_vec; def_vec++)
931
            {
932
              df_ref def = *def_vec;
933
              int regno = DF_REF_REGNO (def);
934
              rtx use_insn;
935
 
936
              if (!next_use[regno])
937
                continue;
938
 
939
              /* Do not consider if it is pre/post modification in MEM.  */
940
              if (DF_REF_FLAGS (def) & DF_REF_PRE_POST_MODIFY)
941
                continue;
942
 
943
              /* Do not make the log link for frame pointer.  */
944
              if ((regno == FRAME_POINTER_REGNUM
945
                   && (! reload_completed || frame_pointer_needed))
946
#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
947
                  || (regno == HARD_FRAME_POINTER_REGNUM
948
                      && (! reload_completed || frame_pointer_needed))
949
#endif
950
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
951
                  || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
952
#endif
953
                  )
954
                continue;
955
 
956
              use_insn = next_use[regno];
957
              if (BLOCK_FOR_INSN (use_insn) == bb)
958
                {
959
                  /* flow.c claimed:
960
 
961
                     We don't build a LOG_LINK for hard registers contained
962
                     in ASM_OPERANDs.  If these registers get replaced,
963
                     we might wind up changing the semantics of the insn,
964
                     even if reload can make what appear to be valid
965
                     assignments later.  */
966
                  if (regno >= FIRST_PSEUDO_REGISTER
967
                      || asm_noperands (PATTERN (use_insn)) < 0)
968
                    {
969
                      /* Don't add duplicate links between instructions.  */
970
                      rtx links;
971
                      for (links = LOG_LINKS (use_insn); links;
972
                           links = XEXP (links, 1))
973
                        if (insn == XEXP (links, 0))
974
                          break;
975
 
976
                      if (!links)
977
                        LOG_LINKS (use_insn) =
978
                          alloc_INSN_LIST (insn, LOG_LINKS (use_insn));
979
                    }
980
                }
981
              next_use[regno] = NULL_RTX;
982
            }
983
 
984
          for (use_vec = DF_INSN_USES (insn); *use_vec; use_vec++)
985
            {
986
              df_ref use = *use_vec;
987
              int regno = DF_REF_REGNO (use);
988
 
989
              /* Do not consider the usage of the stack pointer
990
                 by function call.  */
991
              if (DF_REF_FLAGS (use) & DF_REF_CALL_STACK_USAGE)
992
                continue;
993
 
994
              next_use[regno] = insn;
995
            }
996
        }
997
    }
998
 
999
  free (next_use);
1000
}
1001
 
1002
/* Clear LOG_LINKS fields of insns.  */
1003
 
1004
static void
1005
clear_log_links (void)
1006
{
1007
  rtx insn;
1008
 
1009
  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1010
    if (INSN_P (insn))
1011
      free_INSN_LIST_list (&LOG_LINKS (insn));
1012
}
1013
 
1014
/* Main entry point for combiner.  F is the first insn of the function.
1015
   NREGS is the first unused pseudo-reg number.
1016
 
1017
   Return nonzero if the combiner has turned an indirect jump
1018
   instruction into a direct jump.  */
1019
static int
1020
combine_instructions (rtx f, unsigned int nregs)
1021
{
1022
  rtx insn, next;
1023
#ifdef HAVE_cc0
1024
  rtx prev;
1025
#endif
1026
  rtx links, nextlinks;
1027
  rtx first;
1028
  basic_block last_bb;
1029
 
1030
  int new_direct_jump_p = 0;
1031
 
1032
  for (first = f; first && !INSN_P (first); )
1033
    first = NEXT_INSN (first);
1034
  if (!first)
1035
    return 0;
1036
 
1037
  combine_attempts = 0;
1038
  combine_merges = 0;
1039
  combine_extras = 0;
1040
  combine_successes = 0;
1041
 
1042
  rtl_hooks = combine_rtl_hooks;
1043
 
1044
  VEC_safe_grow_cleared (reg_stat_type, heap, reg_stat, nregs);
1045
 
1046
  init_recog_no_volatile ();
1047
 
1048
  /* Allocate array for insn info.  */
1049
  max_uid_known = get_max_uid ();
1050
  uid_log_links = XCNEWVEC (rtx, max_uid_known + 1);
1051
  uid_insn_cost = XCNEWVEC (int, max_uid_known + 1);
1052
 
1053
  nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1054
 
1055
  /* Don't use reg_stat[].nonzero_bits when computing it.  This can cause
1056
     problems when, for example, we have j <<= 1 in a loop.  */
1057
 
1058
  nonzero_sign_valid = 0;
1059
  label_tick = label_tick_ebb_start = 1;
1060
 
1061
  /* Scan all SETs and see if we can deduce anything about what
1062
     bits are known to be zero for some registers and how many copies
1063
     of the sign bit are known to exist for those registers.
1064
 
1065
     Also set any known values so that we can use it while searching
1066
     for what bits are known to be set.  */
1067
 
1068
  setup_incoming_promotions (first);
1069
  /* Allow the entry block and the first block to fall into the same EBB.
1070
     Conceptually the incoming promotions are assigned to the entry block.  */
1071
  last_bb = ENTRY_BLOCK_PTR;
1072
 
1073
  create_log_links ();
1074
  FOR_EACH_BB (this_basic_block)
1075
    {
1076
      optimize_this_for_speed_p = optimize_bb_for_speed_p (this_basic_block);
1077
      last_call_luid = 0;
1078
      mem_last_set = -1;
1079
 
1080
      label_tick++;
1081
      if (!single_pred_p (this_basic_block)
1082
          || single_pred (this_basic_block) != last_bb)
1083
        label_tick_ebb_start = label_tick;
1084
      last_bb = this_basic_block;
1085
 
1086
      FOR_BB_INSNS (this_basic_block, insn)
1087
        if (INSN_P (insn) && BLOCK_FOR_INSN (insn))
1088
          {
1089
            subst_low_luid = DF_INSN_LUID (insn);
1090
            subst_insn = insn;
1091
 
1092
            note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies,
1093
                         insn);
1094
            record_dead_and_set_regs (insn);
1095
 
1096
#ifdef AUTO_INC_DEC
1097
            for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
1098
              if (REG_NOTE_KIND (links) == REG_INC)
1099
                set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX,
1100
                                                  insn);
1101
#endif
1102
 
1103
            /* Record the current insn_rtx_cost of this instruction.  */
1104
            if (NONJUMP_INSN_P (insn))
1105
              INSN_COST (insn) = insn_rtx_cost (PATTERN (insn),
1106
                                                optimize_this_for_speed_p);
1107
            if (dump_file)
1108
              fprintf(dump_file, "insn_cost %d: %d\n",
1109
                    INSN_UID (insn), INSN_COST (insn));
1110
          }
1111
    }
1112
 
1113
  nonzero_sign_valid = 1;
1114
 
1115
  /* Now scan all the insns in forward order.  */
1116
  label_tick = label_tick_ebb_start = 1;
1117
  init_reg_last ();
1118
  setup_incoming_promotions (first);
1119
  last_bb = ENTRY_BLOCK_PTR;
1120
 
1121
  FOR_EACH_BB (this_basic_block)
1122
    {
1123
      optimize_this_for_speed_p = optimize_bb_for_speed_p (this_basic_block);
1124
      last_call_luid = 0;
1125
      mem_last_set = -1;
1126
 
1127
      label_tick++;
1128
      if (!single_pred_p (this_basic_block)
1129
          || single_pred (this_basic_block) != last_bb)
1130
        label_tick_ebb_start = label_tick;
1131
      last_bb = this_basic_block;
1132
 
1133
      rtl_profile_for_bb (this_basic_block);
1134
      for (insn = BB_HEAD (this_basic_block);
1135
           insn != NEXT_INSN (BB_END (this_basic_block));
1136
           insn = next ? next : NEXT_INSN (insn))
1137
        {
1138
          next = 0;
1139
          if (NONDEBUG_INSN_P (insn))
1140
            {
1141
              /* See if we know about function return values before this
1142
                 insn based upon SUBREG flags.  */
1143
              check_promoted_subreg (insn, PATTERN (insn));
1144
 
1145
              /* See if we can find hardregs and subreg of pseudos in
1146
                 narrower modes.  This could help turning TRUNCATEs
1147
                 into SUBREGs.  */
1148
              note_uses (&PATTERN (insn), record_truncated_values, NULL);
1149
 
1150
              /* Try this insn with each insn it links back to.  */
1151
 
1152
              for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
1153
                if ((next = try_combine (insn, XEXP (links, 0),
1154
                                         NULL_RTX, &new_direct_jump_p)) != 0)
1155
                  goto retry;
1156
 
1157
              /* Try each sequence of three linked insns ending with this one.  */
1158
 
1159
              for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
1160
                {
1161
                  rtx link = XEXP (links, 0);
1162
 
1163
                  /* If the linked insn has been replaced by a note, then there
1164
                     is no point in pursuing this chain any further.  */
1165
                  if (NOTE_P (link))
1166
                    continue;
1167
 
1168
                  for (nextlinks = LOG_LINKS (link);
1169
                       nextlinks;
1170
                       nextlinks = XEXP (nextlinks, 1))
1171
                    if ((next = try_combine (insn, link,
1172
                                             XEXP (nextlinks, 0),
1173
                                             &new_direct_jump_p)) != 0)
1174
                      goto retry;
1175
                }
1176
 
1177
#ifdef HAVE_cc0
1178
              /* Try to combine a jump insn that uses CC0
1179
                 with a preceding insn that sets CC0, and maybe with its
1180
                 logical predecessor as well.
1181
                 This is how we make decrement-and-branch insns.
1182
                 We need this special code because data flow connections
1183
                 via CC0 do not get entered in LOG_LINKS.  */
1184
 
1185
              if (JUMP_P (insn)
1186
                  && (prev = prev_nonnote_insn (insn)) != 0
1187
                  && NONJUMP_INSN_P (prev)
1188
                  && sets_cc0_p (PATTERN (prev)))
1189
                {
1190
                  if ((next = try_combine (insn, prev,
1191
                                           NULL_RTX, &new_direct_jump_p)) != 0)
1192
                    goto retry;
1193
 
1194
                  for (nextlinks = LOG_LINKS (prev); nextlinks;
1195
                       nextlinks = XEXP (nextlinks, 1))
1196
                    if ((next = try_combine (insn, prev,
1197
                                             XEXP (nextlinks, 0),
1198
                                             &new_direct_jump_p)) != 0)
1199
                      goto retry;
1200
                }
1201
 
1202
              /* Do the same for an insn that explicitly references CC0.  */
1203
              if (NONJUMP_INSN_P (insn)
1204
                  && (prev = prev_nonnote_insn (insn)) != 0
1205
                  && NONJUMP_INSN_P (prev)
1206
                  && sets_cc0_p (PATTERN (prev))
1207
                  && GET_CODE (PATTERN (insn)) == SET
1208
                  && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
1209
                {
1210
                  if ((next = try_combine (insn, prev,
1211
                                           NULL_RTX, &new_direct_jump_p)) != 0)
1212
                    goto retry;
1213
 
1214
                  for (nextlinks = LOG_LINKS (prev); nextlinks;
1215
                       nextlinks = XEXP (nextlinks, 1))
1216
                    if ((next = try_combine (insn, prev,
1217
                                             XEXP (nextlinks, 0),
1218
                                             &new_direct_jump_p)) != 0)
1219
                      goto retry;
1220
                }
1221
 
1222
              /* Finally, see if any of the insns that this insn links to
1223
                 explicitly references CC0.  If so, try this insn, that insn,
1224
                 and its predecessor if it sets CC0.  */
1225
              for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
1226
                if (NONJUMP_INSN_P (XEXP (links, 0))
1227
                    && GET_CODE (PATTERN (XEXP (links, 0))) == SET
1228
                    && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
1229
                    && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
1230
                    && NONJUMP_INSN_P (prev)
1231
                    && sets_cc0_p (PATTERN (prev))
1232
                    && (next = try_combine (insn, XEXP (links, 0),
1233
                                            prev, &new_direct_jump_p)) != 0)
1234
                  goto retry;
1235
#endif
1236
 
1237
              /* Try combining an insn with two different insns whose results it
1238
                 uses.  */
1239
              for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
1240
                for (nextlinks = XEXP (links, 1); nextlinks;
1241
                     nextlinks = XEXP (nextlinks, 1))
1242
                  if ((next = try_combine (insn, XEXP (links, 0),
1243
                                           XEXP (nextlinks, 0),
1244
                                           &new_direct_jump_p)) != 0)
1245
                    goto retry;
1246
 
1247
              /* Try this insn with each REG_EQUAL note it links back to.  */
1248
              for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
1249
                {
1250
                  rtx set, note;
1251
                  rtx temp = XEXP (links, 0);
1252
                  if ((set = single_set (temp)) != 0
1253
                      && (note = find_reg_equal_equiv_note (temp)) != 0
1254
                      && (note = XEXP (note, 0), GET_CODE (note)) != EXPR_LIST
1255
                      /* Avoid using a register that may already been marked
1256
                         dead by an earlier instruction.  */
1257
                      && ! unmentioned_reg_p (note, SET_SRC (set))
1258
                      && (GET_MODE (note) == VOIDmode
1259
                          ? SCALAR_INT_MODE_P (GET_MODE (SET_DEST (set)))
1260
                          : GET_MODE (SET_DEST (set)) == GET_MODE (note)))
1261
                    {
1262
                      /* Temporarily replace the set's source with the
1263
                         contents of the REG_EQUAL note.  The insn will
1264
                         be deleted or recognized by try_combine.  */
1265
                      rtx orig = SET_SRC (set);
1266
                      SET_SRC (set) = note;
1267
                      i2mod = temp;
1268
                      i2mod_old_rhs = copy_rtx (orig);
1269
                      i2mod_new_rhs = copy_rtx (note);
1270
                      next = try_combine (insn, i2mod, NULL_RTX,
1271
                                          &new_direct_jump_p);
1272
                      i2mod = NULL_RTX;
1273
                      if (next)
1274
                        goto retry;
1275
                      SET_SRC (set) = orig;
1276
                    }
1277
                }
1278
 
1279
              if (!NOTE_P (insn))
1280
                record_dead_and_set_regs (insn);
1281
 
1282
            retry:
1283
              ;
1284
            }
1285
        }
1286
    }
1287
 
1288
  default_rtl_profile ();
1289
  clear_log_links ();
1290
  clear_bb_flags ();
1291
  new_direct_jump_p |= purge_all_dead_edges ();
1292
  delete_noop_moves ();
1293
 
1294
  /* Clean up.  */
1295
  free (uid_log_links);
1296
  free (uid_insn_cost);
1297
  VEC_free (reg_stat_type, heap, reg_stat);
1298
 
1299
  {
1300
    struct undo *undo, *next;
1301
    for (undo = undobuf.frees; undo; undo = next)
1302
      {
1303
        next = undo->next;
1304
        free (undo);
1305
      }
1306
    undobuf.frees = 0;
1307
  }
1308
 
1309
  total_attempts += combine_attempts;
1310
  total_merges += combine_merges;
1311
  total_extras += combine_extras;
1312
  total_successes += combine_successes;
1313
 
1314
  nonzero_sign_valid = 0;
1315
  rtl_hooks = general_rtl_hooks;
1316
 
1317
  /* Make recognizer allow volatile MEMs again.  */
1318
  init_recog ();
1319
 
1320
  return new_direct_jump_p;
1321
}
1322
 
1323
/* Wipe the last_xxx fields of reg_stat in preparation for another pass.  */
1324
 
1325
static void
1326
init_reg_last (void)
1327
{
1328
  unsigned int i;
1329
  reg_stat_type *p;
1330
 
1331
  for (i = 0; VEC_iterate (reg_stat_type, reg_stat, i, p); ++i)
1332
    memset (p, 0, offsetof (reg_stat_type, sign_bit_copies));
1333
}
1334
 
1335
/* Set up any promoted values for incoming argument registers.  */
1336
 
1337
static void
1338
setup_incoming_promotions (rtx first)
1339
{
1340
  tree arg;
1341
  bool strictly_local = false;
1342
 
1343
  for (arg = DECL_ARGUMENTS (current_function_decl); arg;
1344
       arg = TREE_CHAIN (arg))
1345
    {
1346
      rtx x, reg = DECL_INCOMING_RTL (arg);
1347
      int uns1, uns3;
1348
      enum machine_mode mode1, mode2, mode3, mode4;
1349
 
1350
      /* Only continue if the incoming argument is in a register.  */
1351
      if (!REG_P (reg))
1352
        continue;
1353
 
1354
      /* Determine, if possible, whether all call sites of the current
1355
         function lie within the current compilation unit.  (This does
1356
         take into account the exporting of a function via taking its
1357
         address, and so forth.)  */
1358
      strictly_local = cgraph_local_info (current_function_decl)->local;
1359
 
1360
      /* The mode and signedness of the argument before any promotions happen
1361
         (equal to the mode of the pseudo holding it at that stage).  */
1362
      mode1 = TYPE_MODE (TREE_TYPE (arg));
1363
      uns1 = TYPE_UNSIGNED (TREE_TYPE (arg));
1364
 
1365
      /* The mode and signedness of the argument after any source language and
1366
         TARGET_PROMOTE_PROTOTYPES-driven promotions.  */
1367
      mode2 = TYPE_MODE (DECL_ARG_TYPE (arg));
1368
      uns3 = TYPE_UNSIGNED (DECL_ARG_TYPE (arg));
1369
 
1370
      /* The mode and signedness of the argument as it is actually passed,
1371
         after any TARGET_PROMOTE_FUNCTION_ARGS-driven ABI promotions.  */
1372
      mode3 = promote_function_mode (DECL_ARG_TYPE (arg), mode2, &uns3,
1373
                                     TREE_TYPE (cfun->decl), 0);
1374
 
1375
      /* The mode of the register in which the argument is being passed.  */
1376
      mode4 = GET_MODE (reg);
1377
 
1378
      /* Eliminate sign extensions in the callee when:
1379
         (a) A mode promotion has occurred;  */
1380
      if (mode1 == mode3)
1381
        continue;
1382
      /* (b) The mode of the register is the same as the mode of
1383
             the argument as it is passed; */
1384
      if (mode3 != mode4)
1385
        continue;
1386
      /* (c) There's no language level extension;  */
1387
      if (mode1 == mode2)
1388
        ;
1389
      /* (c.1) All callers are from the current compilation unit.  If that's
1390
         the case we don't have to rely on an ABI, we only have to know
1391
         what we're generating right now, and we know that we will do the
1392
         mode1 to mode2 promotion with the given sign.  */
1393
      else if (!strictly_local)
1394
        continue;
1395
      /* (c.2) The combination of the two promotions is useful.  This is
1396
         true when the signs match, or if the first promotion is unsigned.
1397
         In the later case, (sign_extend (zero_extend x)) is the same as
1398
         (zero_extend (zero_extend x)), so make sure to force UNS3 true.  */
1399
      else if (uns1)
1400
        uns3 = true;
1401
      else if (uns3)
1402
        continue;
1403
 
1404
      /* Record that the value was promoted from mode1 to mode3,
1405
         so that any sign extension at the head of the current
1406
         function may be eliminated.  */
1407
      x = gen_rtx_CLOBBER (mode1, const0_rtx);
1408
      x = gen_rtx_fmt_e ((uns3 ? ZERO_EXTEND : SIGN_EXTEND), mode3, x);
1409
      record_value_for_reg (reg, first, x);
1410
    }
1411
}
1412
 
1413
/* Called via note_stores.  If X is a pseudo that is narrower than
1414
   HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
1415
 
1416
   If we are setting only a portion of X and we can't figure out what
1417
   portion, assume all bits will be used since we don't know what will
1418
   be happening.
1419
 
1420
   Similarly, set how many bits of X are known to be copies of the sign bit
1421
   at all locations in the function.  This is the smallest number implied
1422
   by any set of X.  */
1423
 
1424
static void
1425
set_nonzero_bits_and_sign_copies (rtx x, const_rtx set, void *data)
1426
{
1427
  rtx insn = (rtx) data;
1428
  unsigned int num;
1429
 
1430
  if (REG_P (x)
1431
      && REGNO (x) >= FIRST_PSEUDO_REGISTER
1432
      /* If this register is undefined at the start of the file, we can't
1433
         say what its contents were.  */
1434
      && ! REGNO_REG_SET_P
1435
           (DF_LR_IN (ENTRY_BLOCK_PTR->next_bb), REGNO (x))
1436
      && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
1437
    {
1438
      reg_stat_type *rsp = VEC_index (reg_stat_type, reg_stat, REGNO (x));
1439
 
1440
      if (set == 0 || GET_CODE (set) == CLOBBER)
1441
        {
1442
          rsp->nonzero_bits = GET_MODE_MASK (GET_MODE (x));
1443
          rsp->sign_bit_copies = 1;
1444
          return;
1445
        }
1446
 
1447
      /* If this register is being initialized using itself, and the
1448
         register is uninitialized in this basic block, and there are
1449
         no LOG_LINKS which set the register, then part of the
1450
         register is uninitialized.  In that case we can't assume
1451
         anything about the number of nonzero bits.
1452
 
1453
         ??? We could do better if we checked this in
1454
         reg_{nonzero_bits,num_sign_bit_copies}_for_combine.  Then we
1455
         could avoid making assumptions about the insn which initially
1456
         sets the register, while still using the information in other
1457
         insns.  We would have to be careful to check every insn
1458
         involved in the combination.  */
1459
 
1460
      if (insn
1461
          && reg_referenced_p (x, PATTERN (insn))
1462
          && !REGNO_REG_SET_P (DF_LR_IN (BLOCK_FOR_INSN (insn)),
1463
                               REGNO (x)))
1464
        {
1465
          rtx link;
1466
 
1467
          for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
1468
            {
1469
              if (dead_or_set_p (XEXP (link, 0), x))
1470
                break;
1471
            }
1472
          if (!link)
1473
            {
1474
              rsp->nonzero_bits = GET_MODE_MASK (GET_MODE (x));
1475
              rsp->sign_bit_copies = 1;
1476
              return;
1477
            }
1478
        }
1479
 
1480
      /* If this is a complex assignment, see if we can convert it into a
1481
         simple assignment.  */
1482
      set = expand_field_assignment (set);
1483
 
1484
      /* If this is a simple assignment, or we have a paradoxical SUBREG,
1485
         set what we know about X.  */
1486
 
1487
      if (SET_DEST (set) == x
1488
          || (GET_CODE (SET_DEST (set)) == SUBREG
1489
              && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
1490
                  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
1491
              && SUBREG_REG (SET_DEST (set)) == x))
1492
        {
1493
          rtx src = SET_SRC (set);
1494
 
1495
#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
1496
          /* If X is narrower than a word and SRC is a non-negative
1497
             constant that would appear negative in the mode of X,
1498
             sign-extend it for use in reg_stat[].nonzero_bits because some
1499
             machines (maybe most) will actually do the sign-extension
1500
             and this is the conservative approach.
1501
 
1502
             ??? For 2.5, try to tighten up the MD files in this regard
1503
             instead of this kludge.  */
1504
 
1505
          if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
1506
              && CONST_INT_P (src)
1507
              && INTVAL (src) > 0
1508
              && 0 != (INTVAL (src)
1509
                       & ((HOST_WIDE_INT) 1
1510
                          << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
1511
            src = GEN_INT (INTVAL (src)
1512
                           | ((HOST_WIDE_INT) (-1)
1513
                              << GET_MODE_BITSIZE (GET_MODE (x))));
1514
#endif
1515
 
1516
          /* Don't call nonzero_bits if it cannot change anything.  */
1517
          if (rsp->nonzero_bits != ~(unsigned HOST_WIDE_INT) 0)
1518
            rsp->nonzero_bits |= nonzero_bits (src, nonzero_bits_mode);
1519
          num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
1520
          if (rsp->sign_bit_copies == 0
1521
              || rsp->sign_bit_copies > num)
1522
            rsp->sign_bit_copies = num;
1523
        }
1524
      else
1525
        {
1526
          rsp->nonzero_bits = GET_MODE_MASK (GET_MODE (x));
1527
          rsp->sign_bit_copies = 1;
1528
        }
1529
    }
1530
}
1531
 
1532
/* See if INSN can be combined into I3.  PRED and SUCC are optionally
1533
   insns that were previously combined into I3 or that will be combined
1534
   into the merger of INSN and I3.
1535
 
1536
   Return 0 if the combination is not allowed for any reason.
1537
 
1538
   If the combination is allowed, *PDEST will be set to the single
1539
   destination of INSN and *PSRC to the single source, and this function
1540
   will return 1.  */
1541
 
1542
static int
1543
can_combine_p (rtx insn, rtx i3, rtx pred ATTRIBUTE_UNUSED, rtx succ,
1544
               rtx *pdest, rtx *psrc)
1545
{
1546
  int i;
1547
  const_rtx set = 0;
1548
  rtx src, dest;
1549
  rtx p;
1550
#ifdef AUTO_INC_DEC
1551
  rtx link;
1552
#endif
1553
  int all_adjacent = (succ ? (next_active_insn (insn) == succ
1554
                              && next_active_insn (succ) == i3)
1555
                      : next_active_insn (insn) == i3);
1556
 
1557
  /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
1558
     or a PARALLEL consisting of such a SET and CLOBBERs.
1559
 
1560
     If INSN has CLOBBER parallel parts, ignore them for our processing.
1561
     By definition, these happen during the execution of the insn.  When it
1562
     is merged with another insn, all bets are off.  If they are, in fact,
1563
     needed and aren't also supplied in I3, they may be added by
1564
     recog_for_combine.  Otherwise, it won't match.
1565
 
1566
     We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
1567
     note.
1568
 
1569
     Get the source and destination of INSN.  If more than one, can't
1570
     combine.  */
1571
 
1572
  if (GET_CODE (PATTERN (insn)) == SET)
1573
    set = PATTERN (insn);
1574
  else if (GET_CODE (PATTERN (insn)) == PARALLEL
1575
           && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1576
    {
1577
      for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1578
        {
1579
          rtx elt = XVECEXP (PATTERN (insn), 0, i);
1580
 
1581
          switch (GET_CODE (elt))
1582
            {
1583
            /* This is important to combine floating point insns
1584
               for the SH4 port.  */
1585
            case USE:
1586
              /* Combining an isolated USE doesn't make sense.
1587
                 We depend here on combinable_i3pat to reject them.  */
1588
              /* The code below this loop only verifies that the inputs of
1589
                 the SET in INSN do not change.  We call reg_set_between_p
1590
                 to verify that the REG in the USE does not change between
1591
                 I3 and INSN.
1592
                 If the USE in INSN was for a pseudo register, the matching
1593
                 insn pattern will likely match any register; combining this
1594
                 with any other USE would only be safe if we knew that the
1595
                 used registers have identical values, or if there was
1596
                 something to tell them apart, e.g. different modes.  For
1597
                 now, we forgo such complicated tests and simply disallow
1598
                 combining of USES of pseudo registers with any other USE.  */
1599
              if (REG_P (XEXP (elt, 0))
1600
                  && GET_CODE (PATTERN (i3)) == PARALLEL)
1601
                {
1602
                  rtx i3pat = PATTERN (i3);
1603
                  int i = XVECLEN (i3pat, 0) - 1;
1604
                  unsigned int regno = REGNO (XEXP (elt, 0));
1605
 
1606
                  do
1607
                    {
1608
                      rtx i3elt = XVECEXP (i3pat, 0, i);
1609
 
1610
                      if (GET_CODE (i3elt) == USE
1611
                          && REG_P (XEXP (i3elt, 0))
1612
                          && (REGNO (XEXP (i3elt, 0)) == regno
1613
                              ? reg_set_between_p (XEXP (elt, 0),
1614
                                                   PREV_INSN (insn), i3)
1615
                              : regno >= FIRST_PSEUDO_REGISTER))
1616
                        return 0;
1617
                    }
1618
                  while (--i >= 0);
1619
                }
1620
              break;
1621
 
1622
              /* We can ignore CLOBBERs.  */
1623
            case CLOBBER:
1624
              break;
1625
 
1626
            case SET:
1627
              /* Ignore SETs whose result isn't used but not those that
1628
                 have side-effects.  */
1629
              if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
1630
                  && insn_nothrow_p (insn)
1631
                  && !side_effects_p (elt))
1632
                break;
1633
 
1634
              /* If we have already found a SET, this is a second one and
1635
                 so we cannot combine with this insn.  */
1636
              if (set)
1637
                return 0;
1638
 
1639
              set = elt;
1640
              break;
1641
 
1642
            default:
1643
              /* Anything else means we can't combine.  */
1644
              return 0;
1645
            }
1646
        }
1647
 
1648
      if (set == 0
1649
          /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
1650
             so don't do anything with it.  */
1651
          || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
1652
        return 0;
1653
    }
1654
  else
1655
    return 0;
1656
 
1657
  if (set == 0)
1658
    return 0;
1659
 
1660
  set = expand_field_assignment (set);
1661
  src = SET_SRC (set), dest = SET_DEST (set);
1662
 
1663
  /* Don't eliminate a store in the stack pointer.  */
1664
  if (dest == stack_pointer_rtx
1665
      /* Don't combine with an insn that sets a register to itself if it has
1666
         a REG_EQUAL note.  This may be part of a LIBCALL sequence.  */
1667
      || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
1668
      /* Can't merge an ASM_OPERANDS.  */
1669
      || GET_CODE (src) == ASM_OPERANDS
1670
      /* Can't merge a function call.  */
1671
      || GET_CODE (src) == CALL
1672
      /* Don't eliminate a function call argument.  */
1673
      || (CALL_P (i3)
1674
          && (find_reg_fusage (i3, USE, dest)
1675
              || (REG_P (dest)
1676
                  && REGNO (dest) < FIRST_PSEUDO_REGISTER
1677
                  && global_regs[REGNO (dest)])))
1678
      /* Don't substitute into an incremented register.  */
1679
      || FIND_REG_INC_NOTE (i3, dest)
1680
      || (succ && FIND_REG_INC_NOTE (succ, dest))
1681
      /* Don't substitute into a non-local goto, this confuses CFG.  */
1682
      || (JUMP_P (i3) && find_reg_note (i3, REG_NON_LOCAL_GOTO, NULL_RTX))
1683
      /* Make sure that DEST is not used after SUCC but before I3.  */
1684
      || (succ && ! all_adjacent
1685
          && reg_used_between_p (dest, succ, i3))
1686
      /* Make sure that the value that is to be substituted for the register
1687
         does not use any registers whose values alter in between.  However,
1688
         If the insns are adjacent, a use can't cross a set even though we
1689
         think it might (this can happen for a sequence of insns each setting
1690
         the same destination; last_set of that register might point to
1691
         a NOTE).  If INSN has a REG_EQUIV note, the register is always
1692
         equivalent to the memory so the substitution is valid even if there
1693
         are intervening stores.  Also, don't move a volatile asm or
1694
         UNSPEC_VOLATILE across any other insns.  */
1695
      || (! all_adjacent
1696
          && (((!MEM_P (src)
1697
                || ! find_reg_note (insn, REG_EQUIV, src))
1698
               && use_crosses_set_p (src, DF_INSN_LUID (insn)))
1699
              || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
1700
              || GET_CODE (src) == UNSPEC_VOLATILE))
1701
      /* Don't combine across a CALL_INSN, because that would possibly
1702
         change whether the life span of some REGs crosses calls or not,
1703
         and it is a pain to update that information.
1704
         Exception: if source is a constant, moving it later can't hurt.
1705
         Accept that as a special case.  */
1706
      || (DF_INSN_LUID (insn) < last_call_luid && ! CONSTANT_P (src)))
1707
    return 0;
1708
 
1709
  /* DEST must either be a REG or CC0.  */
1710
  if (REG_P (dest))
1711
    {
1712
      /* If register alignment is being enforced for multi-word items in all
1713
         cases except for parameters, it is possible to have a register copy
1714
         insn referencing a hard register that is not allowed to contain the
1715
         mode being copied and which would not be valid as an operand of most
1716
         insns.  Eliminate this problem by not combining with such an insn.
1717
 
1718
         Also, on some machines we don't want to extend the life of a hard
1719
         register.  */
1720
 
1721
      if (REG_P (src)
1722
          && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
1723
               && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
1724
              /* Don't extend the life of a hard register unless it is
1725
                 user variable (if we have few registers) or it can't
1726
                 fit into the desired register (meaning something special
1727
                 is going on).
1728
                 Also avoid substituting a return register into I3, because
1729
                 reload can't handle a conflict with constraints of other
1730
                 inputs.  */
1731
              || (REGNO (src) < FIRST_PSEUDO_REGISTER
1732
                  && ! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src)))))
1733
        return 0;
1734
    }
1735
  else if (GET_CODE (dest) != CC0)
1736
    return 0;
1737
 
1738
 
1739
  if (GET_CODE (PATTERN (i3)) == PARALLEL)
1740
    for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
1741
      if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER)
1742
        {
1743
          /* Don't substitute for a register intended as a clobberable
1744
             operand.  */
1745
          rtx reg = XEXP (XVECEXP (PATTERN (i3), 0, i), 0);
1746
          if (rtx_equal_p (reg, dest))
1747
            return 0;
1748
 
1749
          /* If the clobber represents an earlyclobber operand, we must not
1750
             substitute an expression containing the clobbered register.
1751
             As we do not analyze the constraint strings here, we have to
1752
             make the conservative assumption.  However, if the register is
1753
             a fixed hard reg, the clobber cannot represent any operand;
1754
             we leave it up to the machine description to either accept or
1755
             reject use-and-clobber patterns.  */
1756
          if (!REG_P (reg)
1757
              || REGNO (reg) >= FIRST_PSEUDO_REGISTER
1758
              || !fixed_regs[REGNO (reg)])
1759
            if (reg_overlap_mentioned_p (reg, src))
1760
              return 0;
1761
        }
1762
 
1763
  /* If INSN contains anything volatile, or is an `asm' (whether volatile
1764
     or not), reject, unless nothing volatile comes between it and I3 */
1765
 
1766
  if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
1767
    {
1768
      /* Make sure succ doesn't contain a volatile reference.  */
1769
      if (succ != 0 && volatile_refs_p (PATTERN (succ)))
1770
        return 0;
1771
 
1772
      for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1773
        if (INSN_P (p) && p != succ && volatile_refs_p (PATTERN (p)))
1774
          return 0;
1775
    }
1776
 
1777
  /* If INSN is an asm, and DEST is a hard register, reject, since it has
1778
     to be an explicit register variable, and was chosen for a reason.  */
1779
 
1780
  if (GET_CODE (src) == ASM_OPERANDS
1781
      && REG_P (dest) && REGNO (dest) < FIRST_PSEUDO_REGISTER)
1782
    return 0;
1783
 
1784
  /* If there are any volatile insns between INSN and I3, reject, because
1785
     they might affect machine state.  */
1786
 
1787
  for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1788
    if (INSN_P (p) && p != succ && volatile_insn_p (PATTERN (p)))
1789
      return 0;
1790
 
1791
  /* If INSN contains an autoincrement or autodecrement, make sure that
1792
     register is not used between there and I3, and not already used in
1793
     I3 either.  Neither must it be used in PRED or SUCC, if they exist.
1794
     Also insist that I3 not be a jump; if it were one
1795
     and the incremented register were spilled, we would lose.  */
1796
 
1797
#ifdef AUTO_INC_DEC
1798
  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1799
    if (REG_NOTE_KIND (link) == REG_INC
1800
        && (JUMP_P (i3)
1801
            || reg_used_between_p (XEXP (link, 0), insn, i3)
1802
            || (pred != NULL_RTX
1803
                && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (pred)))
1804
            || (succ != NULL_RTX
1805
                && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (succ)))
1806
            || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
1807
      return 0;
1808
#endif
1809
 
1810
#ifdef HAVE_cc0
1811
  /* Don't combine an insn that follows a CC0-setting insn.
1812
     An insn that uses CC0 must not be separated from the one that sets it.
1813
     We do, however, allow I2 to follow a CC0-setting insn if that insn
1814
     is passed as I1; in that case it will be deleted also.
1815
     We also allow combining in this case if all the insns are adjacent
1816
     because that would leave the two CC0 insns adjacent as well.
1817
     It would be more logical to test whether CC0 occurs inside I1 or I2,
1818
     but that would be much slower, and this ought to be equivalent.  */
1819
 
1820
  p = prev_nonnote_insn (insn);
1821
  if (p && p != pred && NONJUMP_INSN_P (p) && sets_cc0_p (PATTERN (p))
1822
      && ! all_adjacent)
1823
    return 0;
1824
#endif
1825
 
1826
  /* If we get here, we have passed all the tests and the combination is
1827
     to be allowed.  */
1828
 
1829
  *pdest = dest;
1830
  *psrc = src;
1831
 
1832
  return 1;
1833
}
1834
 
1835
/* LOC is the location within I3 that contains its pattern or the component
1836
   of a PARALLEL of the pattern.  We validate that it is valid for combining.
1837
 
1838
   One problem is if I3 modifies its output, as opposed to replacing it
1839
   entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1840
   so would produce an insn that is not equivalent to the original insns.
1841
 
1842
   Consider:
1843
 
1844
         (set (reg:DI 101) (reg:DI 100))
1845
         (set (subreg:SI (reg:DI 101) 0) <foo>)
1846
 
1847
   This is NOT equivalent to:
1848
 
1849
         (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
1850
                    (set (reg:DI 101) (reg:DI 100))])
1851
 
1852
   Not only does this modify 100 (in which case it might still be valid
1853
   if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
1854
 
1855
   We can also run into a problem if I2 sets a register that I1
1856
   uses and I1 gets directly substituted into I3 (not via I2).  In that
1857
   case, we would be getting the wrong value of I2DEST into I3, so we
1858
   must reject the combination.  This case occurs when I2 and I1 both
1859
   feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1860
   If I1_NOT_IN_SRC is nonzero, it means that finding I1 in the source
1861
   of a SET must prevent combination from occurring.
1862
 
1863
   Before doing the above check, we first try to expand a field assignment
1864
   into a set of logical operations.
1865
 
1866
   If PI3_DEST_KILLED is nonzero, it is a pointer to a location in which
1867
   we place a register that is both set and used within I3.  If more than one
1868
   such register is detected, we fail.
1869
 
1870
   Return 1 if the combination is valid, zero otherwise.  */
1871
 
1872
static int
1873
combinable_i3pat (rtx i3, rtx *loc, rtx i2dest, rtx i1dest,
1874
                  int i1_not_in_src, rtx *pi3dest_killed)
1875
{
1876
  rtx x = *loc;
1877
 
1878
  if (GET_CODE (x) == SET)
1879
    {
1880
      rtx set = x ;
1881
      rtx dest = SET_DEST (set);
1882
      rtx src = SET_SRC (set);
1883
      rtx inner_dest = dest;
1884
      rtx subdest;
1885
 
1886
      while (GET_CODE (inner_dest) == STRICT_LOW_PART
1887
             || GET_CODE (inner_dest) == SUBREG
1888
             || GET_CODE (inner_dest) == ZERO_EXTRACT)
1889
        inner_dest = XEXP (inner_dest, 0);
1890
 
1891
      /* Check for the case where I3 modifies its output, as discussed
1892
         above.  We don't want to prevent pseudos from being combined
1893
         into the address of a MEM, so only prevent the combination if
1894
         i1 or i2 set the same MEM.  */
1895
      if ((inner_dest != dest &&
1896
           (!MEM_P (inner_dest)
1897
            || rtx_equal_p (i2dest, inner_dest)
1898
            || (i1dest && rtx_equal_p (i1dest, inner_dest)))
1899
           && (reg_overlap_mentioned_p (i2dest, inner_dest)
1900
               || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
1901
 
1902
          /* This is the same test done in can_combine_p except we can't test
1903
             all_adjacent; we don't have to, since this instruction will stay
1904
             in place, thus we are not considering increasing the lifetime of
1905
             INNER_DEST.
1906
 
1907
             Also, if this insn sets a function argument, combining it with
1908
             something that might need a spill could clobber a previous
1909
             function argument; the all_adjacent test in can_combine_p also
1910
             checks this; here, we do a more specific test for this case.  */
1911
 
1912
          || (REG_P (inner_dest)
1913
              && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1914
              && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1915
                                        GET_MODE (inner_dest))))
1916
          || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1917
        return 0;
1918
 
1919
      /* If DEST is used in I3, it is being killed in this insn, so
1920
         record that for later.  We have to consider paradoxical
1921
         subregs here, since they kill the whole register, but we
1922
         ignore partial subregs, STRICT_LOW_PART, etc.
1923
         Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1924
         STACK_POINTER_REGNUM, since these are always considered to be
1925
         live.  Similarly for ARG_POINTER_REGNUM if it is fixed.  */
1926
      subdest = dest;
1927
      if (GET_CODE (subdest) == SUBREG
1928
          && (GET_MODE_SIZE (GET_MODE (subdest))
1929
              >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (subdest)))))
1930
        subdest = SUBREG_REG (subdest);
1931
      if (pi3dest_killed
1932
          && REG_P (subdest)
1933
          && reg_referenced_p (subdest, PATTERN (i3))
1934
          && REGNO (subdest) != FRAME_POINTER_REGNUM
1935
#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1936
          && REGNO (subdest) != HARD_FRAME_POINTER_REGNUM
1937
#endif
1938
#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1939
          && (REGNO (subdest) != ARG_POINTER_REGNUM
1940
              || ! fixed_regs [REGNO (subdest)])
1941
#endif
1942
          && REGNO (subdest) != STACK_POINTER_REGNUM)
1943
        {
1944
          if (*pi3dest_killed)
1945
            return 0;
1946
 
1947
          *pi3dest_killed = subdest;
1948
        }
1949
    }
1950
 
1951
  else if (GET_CODE (x) == PARALLEL)
1952
    {
1953
      int i;
1954
 
1955
      for (i = 0; i < XVECLEN (x, 0); i++)
1956
        if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1957
                                i1_not_in_src, pi3dest_killed))
1958
          return 0;
1959
    }
1960
 
1961
  return 1;
1962
}
1963
 
1964
/* Return 1 if X is an arithmetic expression that contains a multiplication
1965
   and division.  We don't count multiplications by powers of two here.  */
1966
 
1967
static int
1968
contains_muldiv (rtx x)
1969
{
1970
  switch (GET_CODE (x))
1971
    {
1972
    case MOD:  case DIV:  case UMOD:  case UDIV:
1973
      return 1;
1974
 
1975
    case MULT:
1976
      return ! (CONST_INT_P (XEXP (x, 1))
1977
                && exact_log2 (INTVAL (XEXP (x, 1))) >= 0);
1978
    default:
1979
      if (BINARY_P (x))
1980
        return contains_muldiv (XEXP (x, 0))
1981
            || contains_muldiv (XEXP (x, 1));
1982
 
1983
      if (UNARY_P (x))
1984
        return contains_muldiv (XEXP (x, 0));
1985
 
1986
      return 0;
1987
    }
1988
}
1989
 
1990
/* Determine whether INSN can be used in a combination.  Return nonzero if
1991
   not.  This is used in try_combine to detect early some cases where we
1992
   can't perform combinations.  */
1993
 
1994
static int
1995
cant_combine_insn_p (rtx insn)
1996
{
1997
  rtx set;
1998
  rtx src, dest;
1999
 
2000
  /* If this isn't really an insn, we can't do anything.
2001
     This can occur when flow deletes an insn that it has merged into an
2002
     auto-increment address.  */
2003
  if (! INSN_P (insn))
2004
    return 1;
2005
 
2006
  /* Never combine loads and stores involving hard regs that are likely
2007
     to be spilled.  The register allocator can usually handle such
2008
     reg-reg moves by tying.  If we allow the combiner to make
2009
     substitutions of likely-spilled regs, reload might die.
2010
     As an exception, we allow combinations involving fixed regs; these are
2011
     not available to the register allocator so there's no risk involved.  */
2012
 
2013
  set = single_set (insn);
2014
  if (! set)
2015
    return 0;
2016
  src = SET_SRC (set);
2017
  dest = SET_DEST (set);
2018
  if (GET_CODE (src) == SUBREG)
2019
    src = SUBREG_REG (src);
2020
  if (GET_CODE (dest) == SUBREG)
2021
    dest = SUBREG_REG (dest);
2022
  if (REG_P (src) && REG_P (dest)
2023
      && ((REGNO (src) < FIRST_PSEUDO_REGISTER
2024
           && ! fixed_regs[REGNO (src)]
2025
           && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (src))))
2026
          || (REGNO (dest) < FIRST_PSEUDO_REGISTER
2027
              && ! fixed_regs[REGNO (dest)]
2028
              && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (dest))))))
2029
    return 1;
2030
 
2031
  return 0;
2032
}
2033
 
2034
struct likely_spilled_retval_info
2035
{
2036
  unsigned regno, nregs;
2037
  unsigned mask;
2038
};
2039
 
2040
/* Called via note_stores by likely_spilled_retval_p.  Remove from info->mask
2041
   hard registers that are known to be written to / clobbered in full.  */
2042
static void
2043
likely_spilled_retval_1 (rtx x, const_rtx set, void *data)
2044
{
2045
  struct likely_spilled_retval_info *const info =
2046
    (struct likely_spilled_retval_info *) data;
2047
  unsigned regno, nregs;
2048
  unsigned new_mask;
2049
 
2050
  if (!REG_P (XEXP (set, 0)))
2051
    return;
2052
  regno = REGNO (x);
2053
  if (regno >= info->regno + info->nregs)
2054
    return;
2055
  nregs = hard_regno_nregs[regno][GET_MODE (x)];
2056
  if (regno + nregs <= info->regno)
2057
    return;
2058
  new_mask = (2U << (nregs - 1)) - 1;
2059
  if (regno < info->regno)
2060
    new_mask >>= info->regno - regno;
2061
  else
2062
    new_mask <<= regno - info->regno;
2063
  info->mask &= ~new_mask;
2064
}
2065
 
2066
/* Return nonzero iff part of the return value is live during INSN, and
2067
   it is likely spilled.  This can happen when more than one insn is needed
2068
   to copy the return value, e.g. when we consider to combine into the
2069
   second copy insn for a complex value.  */
2070
 
2071
static int
2072
likely_spilled_retval_p (rtx insn)
2073
{
2074
  rtx use = BB_END (this_basic_block);
2075
  rtx reg, p;
2076
  unsigned regno, nregs;
2077
  /* We assume here that no machine mode needs more than
2078
     32 hard registers when the value overlaps with a register
2079
     for which FUNCTION_VALUE_REGNO_P is true.  */
2080
  unsigned mask;
2081
  struct likely_spilled_retval_info info;
2082
 
2083
  if (!NONJUMP_INSN_P (use) || GET_CODE (PATTERN (use)) != USE || insn == use)
2084
    return 0;
2085
  reg = XEXP (PATTERN (use), 0);
2086
  if (!REG_P (reg) || !FUNCTION_VALUE_REGNO_P (REGNO (reg)))
2087
    return 0;
2088
  regno = REGNO (reg);
2089
  nregs = hard_regno_nregs[regno][GET_MODE (reg)];
2090
  if (nregs == 1)
2091
    return 0;
2092
  mask = (2U << (nregs - 1)) - 1;
2093
 
2094
  /* Disregard parts of the return value that are set later.  */
2095
  info.regno = regno;
2096
  info.nregs = nregs;
2097
  info.mask = mask;
2098
  for (p = PREV_INSN (use); info.mask && p != insn; p = PREV_INSN (p))
2099
    if (INSN_P (p))
2100
      note_stores (PATTERN (p), likely_spilled_retval_1, &info);
2101
  mask = info.mask;
2102
 
2103
  /* Check if any of the (probably) live return value registers is
2104
     likely spilled.  */
2105
  nregs --;
2106
  do
2107
    {
2108
      if ((mask & 1 << nregs)
2109
          && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno + nregs)))
2110
        return 1;
2111
    } while (nregs--);
2112
  return 0;
2113
}
2114
 
2115
/* Adjust INSN after we made a change to its destination.
2116
 
2117
   Changing the destination can invalidate notes that say something about
2118
   the results of the insn and a LOG_LINK pointing to the insn.  */
2119
 
2120
static void
2121
adjust_for_new_dest (rtx insn)
2122
{
2123
  /* For notes, be conservative and simply remove them.  */
2124
  remove_reg_equal_equiv_notes (insn);
2125
 
2126
  /* The new insn will have a destination that was previously the destination
2127
     of an insn just above it.  Call distribute_links to make a LOG_LINK from
2128
     the next use of that destination.  */
2129
  distribute_links (gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX));
2130
 
2131
  df_insn_rescan (insn);
2132
}
2133
 
2134
/* Return TRUE if combine can reuse reg X in mode MODE.
2135
   ADDED_SETS is nonzero if the original set is still required.  */
2136
static bool
2137
can_change_dest_mode (rtx x, int added_sets, enum machine_mode mode)
2138
{
2139
  unsigned int regno;
2140
 
2141
  if (!REG_P(x))
2142
    return false;
2143
 
2144
  regno = REGNO (x);
2145
  /* Allow hard registers if the new mode is legal, and occupies no more
2146
     registers than the old mode.  */
2147
  if (regno < FIRST_PSEUDO_REGISTER)
2148
    return (HARD_REGNO_MODE_OK (regno, mode)
2149
            && (hard_regno_nregs[regno][GET_MODE (x)]
2150
                >= hard_regno_nregs[regno][mode]));
2151
 
2152
  /* Or a pseudo that is only used once.  */
2153
  return (REG_N_SETS (regno) == 1 && !added_sets
2154
          && !REG_USERVAR_P (x));
2155
}
2156
 
2157
 
2158
/* Check whether X, the destination of a set, refers to part of
2159
   the register specified by REG.  */
2160
 
2161
static bool
2162
reg_subword_p (rtx x, rtx reg)
2163
{
2164
  /* Check that reg is an integer mode register.  */
2165
  if (!REG_P (reg) || GET_MODE_CLASS (GET_MODE (reg)) != MODE_INT)
2166
    return false;
2167
 
2168
  if (GET_CODE (x) == STRICT_LOW_PART
2169
      || GET_CODE (x) == ZERO_EXTRACT)
2170
    x = XEXP (x, 0);
2171
 
2172
  return GET_CODE (x) == SUBREG
2173
         && SUBREG_REG (x) == reg
2174
         && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT;
2175
}
2176
 
2177
#ifdef AUTO_INC_DEC
2178
/* Replace auto-increment addressing modes with explicit operations to
2179
   access the same addresses without modifying the corresponding
2180
   registers.  If AFTER holds, SRC is meant to be reused after the
2181
   side effect, otherwise it is to be reused before that.  */
2182
 
2183
static rtx
2184
cleanup_auto_inc_dec (rtx src, bool after, enum machine_mode mem_mode)
2185
{
2186
  rtx x = src;
2187
  const RTX_CODE code = GET_CODE (x);
2188
  int i;
2189
  const char *fmt;
2190
 
2191
  switch (code)
2192
    {
2193
    case REG:
2194
    case CONST_INT:
2195
    case CONST_DOUBLE:
2196
    case CONST_FIXED:
2197
    case CONST_VECTOR:
2198
    case SYMBOL_REF:
2199
    case CODE_LABEL:
2200
    case PC:
2201
    case CC0:
2202
    case SCRATCH:
2203
      /* SCRATCH must be shared because they represent distinct values.  */
2204
      return x;
2205
    case CLOBBER:
2206
      if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2207
        return x;
2208
      break;
2209
 
2210
    case CONST:
2211
      if (shared_const_p (x))
2212
        return x;
2213
      break;
2214
 
2215
    case MEM:
2216
      mem_mode = GET_MODE (x);
2217
      break;
2218
 
2219
    case PRE_INC:
2220
    case PRE_DEC:
2221
    case POST_INC:
2222
    case POST_DEC:
2223
      gcc_assert (mem_mode != VOIDmode && mem_mode != BLKmode);
2224
      if (after == (code == PRE_INC || code == PRE_DEC))
2225
        x = cleanup_auto_inc_dec (XEXP (x, 0), after, mem_mode);
2226
      else
2227
        x = gen_rtx_PLUS (GET_MODE (x),
2228
                          cleanup_auto_inc_dec (XEXP (x, 0), after, mem_mode),
2229
                          GEN_INT ((code == PRE_INC || code == POST_INC)
2230
                                   ? GET_MODE_SIZE (mem_mode)
2231
                                   : -GET_MODE_SIZE (mem_mode)));
2232
      return x;
2233
 
2234
    case PRE_MODIFY:
2235
    case POST_MODIFY:
2236
      if (after == (code == PRE_MODIFY))
2237
        x = XEXP (x, 0);
2238
      else
2239
        x = XEXP (x, 1);
2240
      return cleanup_auto_inc_dec (x, after, mem_mode);
2241
 
2242
    default:
2243
      break;
2244
    }
2245
 
2246
  /* Copy the various flags, fields, and other information.  We assume
2247
     that all fields need copying, and then clear the fields that should
2248
     not be copied.  That is the sensible default behavior, and forces
2249
     us to explicitly document why we are *not* copying a flag.  */
2250
  x = shallow_copy_rtx (x);
2251
 
2252
  /* We do not copy the USED flag, which is used as a mark bit during
2253
     walks over the RTL.  */
2254
  RTX_FLAG (x, used) = 0;
2255
 
2256
  /* We do not copy FRAME_RELATED for INSNs.  */
2257
  if (INSN_P (x))
2258
    RTX_FLAG (x, frame_related) = 0;
2259
 
2260
  fmt = GET_RTX_FORMAT (code);
2261
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2262
    if (fmt[i] == 'e')
2263
      XEXP (x, i) = cleanup_auto_inc_dec (XEXP (x, i), after, mem_mode);
2264
    else if (fmt[i] == 'E' || fmt[i] == 'V')
2265
      {
2266
        int j;
2267
        XVEC (x, i) = rtvec_alloc (XVECLEN (x, i));
2268
        for (j = 0; j < XVECLEN (x, i); j++)
2269
          XVECEXP (x, i, j)
2270
            = cleanup_auto_inc_dec (XVECEXP (src, i, j), after, mem_mode);
2271
      }
2272
 
2273
  return x;
2274
}
2275
 
2276
/* Auxiliary data structure for propagate_for_debug_stmt.  */
2277
 
2278
struct rtx_subst_pair
2279
{
2280
  rtx to;
2281
  bool adjusted;
2282
  bool after;
2283
};
2284
 
2285
/* DATA points to an rtx_subst_pair.  Return the value that should be
2286
   substituted.  */
2287
 
2288
static rtx
2289
propagate_for_debug_subst (rtx from, const_rtx old_rtx, void *data)
2290
{
2291
  struct rtx_subst_pair *pair = (struct rtx_subst_pair *)data;
2292
 
2293
  if (!rtx_equal_p (from, old_rtx))
2294
    return NULL_RTX;
2295
  if (!pair->adjusted)
2296
    {
2297
      pair->adjusted = true;
2298
      pair->to = cleanup_auto_inc_dec (pair->to, pair->after, VOIDmode);
2299
      return pair->to;
2300
    }
2301
  return copy_rtx (pair->to);
2302
}
2303
#endif
2304
 
2305
/* Replace occurrences of DEST with SRC in DEBUG_INSNs between INSN
2306
   and LAST.  If MOVE holds, debug insns must also be moved past
2307
   LAST.  */
2308
 
2309
static void
2310
propagate_for_debug (rtx insn, rtx last, rtx dest, rtx src, bool move)
2311
{
2312
  rtx next, move_pos = move ? last : NULL_RTX, loc;
2313
 
2314
#ifdef AUTO_INC_DEC
2315
  struct rtx_subst_pair p;
2316
  p.to = src;
2317
  p.adjusted = false;
2318
  p.after = move;
2319
#endif
2320
 
2321
  next = NEXT_INSN (insn);
2322
  while (next != last)
2323
    {
2324
      insn = next;
2325
      next = NEXT_INSN (insn);
2326
      if (DEBUG_INSN_P (insn))
2327
        {
2328
#ifdef AUTO_INC_DEC
2329
          loc = simplify_replace_fn_rtx (INSN_VAR_LOCATION_LOC (insn),
2330
                                         dest, propagate_for_debug_subst, &p);
2331
#else
2332
          loc = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (insn), dest, src);
2333
#endif
2334
          if (loc == INSN_VAR_LOCATION_LOC (insn))
2335
            continue;
2336
          INSN_VAR_LOCATION_LOC (insn) = loc;
2337
          if (move_pos)
2338
            {
2339
              remove_insn (insn);
2340
              PREV_INSN (insn) = NEXT_INSN (insn) = NULL_RTX;
2341
              move_pos = emit_debug_insn_after (insn, move_pos);
2342
            }
2343
          else
2344
            df_insn_rescan (insn);
2345
        }
2346
    }
2347
}
2348
 
2349
/* Delete the unconditional jump INSN and adjust the CFG correspondingly.
2350
   Note that the INSN should be deleted *after* removing dead edges, so
2351
   that the kept edge is the fallthrough edge for a (set (pc) (pc))
2352
   but not for a (set (pc) (label_ref FOO)).  */
2353
 
2354
static void
2355
update_cfg_for_uncondjump (rtx insn)
2356
{
2357
  basic_block bb = BLOCK_FOR_INSN (insn);
2358
  bool at_end = (BB_END (bb) == insn);
2359
 
2360
  if (at_end)
2361
    purge_dead_edges (bb);
2362
 
2363
  delete_insn (insn);
2364
  if (at_end && EDGE_COUNT (bb->succs) == 1)
2365
    single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
2366
}
2367
 
2368
 
2369
/* Try to combine the insns I1 and I2 into I3.
2370
   Here I1 and I2 appear earlier than I3.
2371
   I1 can be zero; then we combine just I2 into I3.
2372
 
2373
   If we are combining three insns and the resulting insn is not recognized,
2374
   try splitting it into two insns.  If that happens, I2 and I3 are retained
2375
   and I1 is pseudo-deleted by turning it into a NOTE.  Otherwise, I1 and I2
2376
   are pseudo-deleted.
2377
 
2378
   Return 0 if the combination does not work.  Then nothing is changed.
2379
   If we did the combination, return the insn at which combine should
2380
   resume scanning.
2381
 
2382
   Set NEW_DIRECT_JUMP_P to a nonzero value if try_combine creates a
2383
   new direct jump instruction.  */
2384
 
2385
static rtx
2386
try_combine (rtx i3, rtx i2, rtx i1, int *new_direct_jump_p)
2387
{
2388
  /* New patterns for I3 and I2, respectively.  */
2389
  rtx newpat, newi2pat = 0;
2390
  rtvec newpat_vec_with_clobbers = 0;
2391
  int substed_i2 = 0, substed_i1 = 0;
2392
  /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead.  */
2393
  int added_sets_1, added_sets_2;
2394
  /* Total number of SETs to put into I3.  */
2395
  int total_sets;
2396
  /* Nonzero if I2's body now appears in I3.  */
2397
  int i2_is_used;
2398
  /* INSN_CODEs for new I3, new I2, and user of condition code.  */
2399
  int insn_code_number, i2_code_number = 0, other_code_number = 0;
2400
  /* Contains I3 if the destination of I3 is used in its source, which means
2401
     that the old life of I3 is being killed.  If that usage is placed into
2402
     I2 and not in I3, a REG_DEAD note must be made.  */
2403
  rtx i3dest_killed = 0;
2404
  /* SET_DEST and SET_SRC of I2 and I1.  */
2405
  rtx i2dest = 0, i2src = 0, i1dest = 0, i1src = 0;
2406
  /* Set if I2DEST was reused as a scratch register.  */
2407
  bool i2scratch = false;
2408
  /* PATTERN (I1) and PATTERN (I2), or a copy of it in certain cases.  */
2409
  rtx i1pat = 0, i2pat = 0;
2410
  /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC.  */
2411
  int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
2412
  int i2dest_killed = 0, i1dest_killed = 0;
2413
  int i1_feeds_i3 = 0;
2414
  /* Notes that must be added to REG_NOTES in I3 and I2.  */
2415
  rtx new_i3_notes, new_i2_notes;
2416
  /* Notes that we substituted I3 into I2 instead of the normal case.  */
2417
  int i3_subst_into_i2 = 0;
2418
  /* Notes that I1, I2 or I3 is a MULT operation.  */
2419
  int have_mult = 0;
2420
  int swap_i2i3 = 0;
2421
  int changed_i3_dest = 0;
2422
 
2423
  int maxreg;
2424
  rtx temp;
2425
  rtx link;
2426
  rtx other_pat = 0;
2427
  rtx new_other_notes;
2428
  int i;
2429
 
2430
  /* Exit early if one of the insns involved can't be used for
2431
     combinations.  */
2432
  if (cant_combine_insn_p (i3)
2433
      || cant_combine_insn_p (i2)
2434
      || (i1 && cant_combine_insn_p (i1))
2435
      || likely_spilled_retval_p (i3))
2436
    return 0;
2437
 
2438
  combine_attempts++;
2439
  undobuf.other_insn = 0;
2440
 
2441
  /* Reset the hard register usage information.  */
2442
  CLEAR_HARD_REG_SET (newpat_used_regs);
2443
 
2444
  if (dump_file && (dump_flags & TDF_DETAILS))
2445
    {
2446
      if (i1)
2447
        fprintf (dump_file, "\nTrying %d, %d -> %d:\n",
2448
                 INSN_UID (i1), INSN_UID (i2), INSN_UID (i3));
2449
      else
2450
        fprintf (dump_file, "\nTrying %d -> %d:\n",
2451
                 INSN_UID (i2), INSN_UID (i3));
2452
    }
2453
 
2454
  /* If I1 and I2 both feed I3, they can be in any order.  To simplify the
2455
     code below, set I1 to be the earlier of the two insns.  */
2456
  if (i1 && DF_INSN_LUID (i1) > DF_INSN_LUID (i2))
2457
    temp = i1, i1 = i2, i2 = temp;
2458
 
2459
  added_links_insn = 0;
2460
 
2461
  /* First check for one important special-case that the code below will
2462
     not handle.  Namely, the case where I1 is zero, I2 is a PARALLEL
2463
     and I3 is a SET whose SET_SRC is a SET_DEST in I2.  In that case,
2464
     we may be able to replace that destination with the destination of I3.
2465
     This occurs in the common code where we compute both a quotient and
2466
     remainder into a structure, in which case we want to do the computation
2467
     directly into the structure to avoid register-register copies.
2468
 
2469
     Note that this case handles both multiple sets in I2 and also
2470
     cases where I2 has a number of CLOBBER or PARALLELs.
2471
 
2472
     We make very conservative checks below and only try to handle the
2473
     most common cases of this.  For example, we only handle the case
2474
     where I2 and I3 are adjacent to avoid making difficult register
2475
     usage tests.  */
2476
 
2477
  if (i1 == 0 && NONJUMP_INSN_P (i3) && GET_CODE (PATTERN (i3)) == SET
2478
      && REG_P (SET_SRC (PATTERN (i3)))
2479
      && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
2480
      && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
2481
      && GET_CODE (PATTERN (i2)) == PARALLEL
2482
      && ! side_effects_p (SET_DEST (PATTERN (i3)))
2483
      /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
2484
         below would need to check what is inside (and reg_overlap_mentioned_p
2485
         doesn't support those codes anyway).  Don't allow those destinations;
2486
         the resulting insn isn't likely to be recognized anyway.  */
2487
      && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
2488
      && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
2489
      && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
2490
                                    SET_DEST (PATTERN (i3)))
2491
      && next_active_insn (i2) == i3)
2492
    {
2493
      rtx p2 = PATTERN (i2);
2494
 
2495
      /* Make sure that the destination of I3,
2496
         which we are going to substitute into one output of I2,
2497
         is not used within another output of I2.  We must avoid making this:
2498
         (parallel [(set (mem (reg 69)) ...)
2499
                    (set (reg 69) ...)])
2500
         which is not well-defined as to order of actions.
2501
         (Besides, reload can't handle output reloads for this.)
2502
 
2503
         The problem can also happen if the dest of I3 is a memory ref,
2504
         if another dest in I2 is an indirect memory ref.  */
2505
      for (i = 0; i < XVECLEN (p2, 0); i++)
2506
        if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
2507
             || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
2508
            && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
2509
                                        SET_DEST (XVECEXP (p2, 0, i))))
2510
          break;
2511
 
2512
      if (i == XVECLEN (p2, 0))
2513
        for (i = 0; i < XVECLEN (p2, 0); i++)
2514
          if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
2515
               || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
2516
              && SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
2517
            {
2518
              combine_merges++;
2519
 
2520
              subst_insn = i3;
2521
              subst_low_luid = DF_INSN_LUID (i2);
2522
 
2523
              added_sets_2 = added_sets_1 = 0;
2524
              i2src = SET_DEST (PATTERN (i3));
2525
              i2dest = SET_SRC (PATTERN (i3));
2526
              i2dest_killed = dead_or_set_p (i2, i2dest);
2527
 
2528
              /* Replace the dest in I2 with our dest and make the resulting
2529
                 insn the new pattern for I3.  Then skip to where we
2530
                 validate the pattern.  Everything was set up above.  */
2531
              SUBST (SET_DEST (XVECEXP (p2, 0, i)),
2532
                     SET_DEST (PATTERN (i3)));
2533
 
2534
              newpat = p2;
2535
              i3_subst_into_i2 = 1;
2536
              goto validate_replacement;
2537
            }
2538
    }
2539
 
2540
  /* If I2 is setting a pseudo to a constant and I3 is setting some
2541
     sub-part of it to another constant, merge them by making a new
2542
     constant.  */
2543
  if (i1 == 0
2544
      && (temp = single_set (i2)) != 0
2545
      && (CONST_INT_P (SET_SRC (temp))
2546
          || GET_CODE (SET_SRC (temp)) == CONST_DOUBLE)
2547
      && GET_CODE (PATTERN (i3)) == SET
2548
      && (CONST_INT_P (SET_SRC (PATTERN (i3)))
2549
          || GET_CODE (SET_SRC (PATTERN (i3))) == CONST_DOUBLE)
2550
      && reg_subword_p (SET_DEST (PATTERN (i3)), SET_DEST (temp)))
2551
    {
2552
      rtx dest = SET_DEST (PATTERN (i3));
2553
      int offset = -1;
2554
      int width = 0;
2555
 
2556
      if (GET_CODE (dest) == ZERO_EXTRACT)
2557
        {
2558
          if (CONST_INT_P (XEXP (dest, 1))
2559
              && CONST_INT_P (XEXP (dest, 2)))
2560
            {
2561
              width = INTVAL (XEXP (dest, 1));
2562
              offset = INTVAL (XEXP (dest, 2));
2563
              dest = XEXP (dest, 0);
2564
              if (BITS_BIG_ENDIAN)
2565
                offset = GET_MODE_BITSIZE (GET_MODE (dest)) - width - offset;
2566
            }
2567
        }
2568
      else
2569
        {
2570
          if (GET_CODE (dest) == STRICT_LOW_PART)
2571
            dest = XEXP (dest, 0);
2572
          width = GET_MODE_BITSIZE (GET_MODE (dest));
2573
          offset = 0;
2574
        }
2575
 
2576
      if (offset >= 0)
2577
        {
2578
          /* If this is the low part, we're done.  */
2579
          if (subreg_lowpart_p (dest))
2580
            ;
2581
          /* Handle the case where inner is twice the size of outer.  */
2582
          else if (GET_MODE_BITSIZE (GET_MODE (SET_DEST (temp)))
2583
                   == 2 * GET_MODE_BITSIZE (GET_MODE (dest)))
2584
            offset += GET_MODE_BITSIZE (GET_MODE (dest));
2585
          /* Otherwise give up for now.  */
2586
          else
2587
            offset = -1;
2588
        }
2589
 
2590
      if (offset >= 0
2591
          && (GET_MODE_BITSIZE (GET_MODE (SET_DEST (temp)))
2592
              <= HOST_BITS_PER_WIDE_INT * 2))
2593
        {
2594
          HOST_WIDE_INT mhi, ohi, ihi;
2595
          HOST_WIDE_INT mlo, olo, ilo;
2596
          rtx inner = SET_SRC (PATTERN (i3));
2597
          rtx outer = SET_SRC (temp);
2598
 
2599
          if (CONST_INT_P (outer))
2600
            {
2601
              olo = INTVAL (outer);
2602
              ohi = olo < 0 ? -1 : 0;
2603
            }
2604
          else
2605
            {
2606
              olo = CONST_DOUBLE_LOW (outer);
2607
              ohi = CONST_DOUBLE_HIGH (outer);
2608
            }
2609
 
2610
          if (CONST_INT_P (inner))
2611
            {
2612
              ilo = INTVAL (inner);
2613
              ihi = ilo < 0 ? -1 : 0;
2614
            }
2615
          else
2616
            {
2617
              ilo = CONST_DOUBLE_LOW (inner);
2618
              ihi = CONST_DOUBLE_HIGH (inner);
2619
            }
2620
 
2621
          if (width < HOST_BITS_PER_WIDE_INT)
2622
            {
2623
              mlo = ((unsigned HOST_WIDE_INT) 1 << width) - 1;
2624
              mhi = 0;
2625
            }
2626
          else if (width < HOST_BITS_PER_WIDE_INT * 2)
2627
            {
2628
              mhi = ((unsigned HOST_WIDE_INT) 1
2629
                     << (width - HOST_BITS_PER_WIDE_INT)) - 1;
2630
              mlo = -1;
2631
            }
2632
          else
2633
            {
2634
              mlo = -1;
2635
              mhi = -1;
2636
            }
2637
 
2638
          ilo &= mlo;
2639
          ihi &= mhi;
2640
 
2641
          if (offset >= HOST_BITS_PER_WIDE_INT)
2642
            {
2643
              mhi = mlo << (offset - HOST_BITS_PER_WIDE_INT);
2644
              mlo = 0;
2645
              ihi = ilo << (offset - HOST_BITS_PER_WIDE_INT);
2646
              ilo = 0;
2647
            }
2648
          else if (offset > 0)
2649
            {
2650
              mhi = (mhi << offset) | ((unsigned HOST_WIDE_INT) mlo
2651
                                       >> (HOST_BITS_PER_WIDE_INT - offset));
2652
              mlo = mlo << offset;
2653
              ihi = (ihi << offset) | ((unsigned HOST_WIDE_INT) ilo
2654
                                       >> (HOST_BITS_PER_WIDE_INT - offset));
2655
              ilo = ilo << offset;
2656
            }
2657
 
2658
          olo = (olo & ~mlo) | ilo;
2659
          ohi = (ohi & ~mhi) | ihi;
2660
 
2661
          combine_merges++;
2662
          subst_insn = i3;
2663
          subst_low_luid = DF_INSN_LUID (i2);
2664
          added_sets_2 = added_sets_1 = 0;
2665
          i2dest = SET_DEST (temp);
2666
          i2dest_killed = dead_or_set_p (i2, i2dest);
2667
 
2668
          /* Replace the source in I2 with the new constant and make the
2669
             resulting insn the new pattern for I3.  Then skip to where we
2670
             validate the pattern.  Everything was set up above.  */
2671
          SUBST (SET_SRC (temp),
2672
                 immed_double_const (olo, ohi, GET_MODE (SET_DEST (temp))));
2673
 
2674
          newpat = PATTERN (i2);
2675
 
2676
          /* The dest of I3 has been replaced with the dest of I2.  */
2677
          changed_i3_dest = 1;
2678
          goto validate_replacement;
2679
        }
2680
    }
2681
 
2682
#ifndef HAVE_cc0
2683
  /* If we have no I1 and I2 looks like:
2684
        (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
2685
                   (set Y OP)])
2686
     make up a dummy I1 that is
2687
        (set Y OP)
2688
     and change I2 to be
2689
        (set (reg:CC X) (compare:CC Y (const_int 0)))
2690
 
2691
     (We can ignore any trailing CLOBBERs.)
2692
 
2693
     This undoes a previous combination and allows us to match a branch-and-
2694
     decrement insn.  */
2695
 
2696
  if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
2697
      && XVECLEN (PATTERN (i2), 0) >= 2
2698
      && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
2699
      && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
2700
          == MODE_CC)
2701
      && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
2702
      && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
2703
      && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
2704
      && REG_P (SET_DEST (XVECEXP (PATTERN (i2), 0, 1)))
2705
      && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
2706
                      SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
2707
    {
2708
      for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
2709
        if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
2710
          break;
2711
 
2712
      if (i == 1)
2713
        {
2714
          /* We make I1 with the same INSN_UID as I2.  This gives it
2715
             the same DF_INSN_LUID for value tracking.  Our fake I1 will
2716
             never appear in the insn stream so giving it the same INSN_UID
2717
             as I2 will not cause a problem.  */
2718
 
2719
          i1 = gen_rtx_INSN (VOIDmode, INSN_UID (i2), NULL_RTX, i2,
2720
                             BLOCK_FOR_INSN (i2), INSN_LOCATOR (i2),
2721
                             XVECEXP (PATTERN (i2), 0, 1), -1, NULL_RTX);
2722
 
2723
          SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
2724
          SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
2725
                 SET_DEST (PATTERN (i1)));
2726
        }
2727
    }
2728
#endif
2729
 
2730
  /* Verify that I2 and I1 are valid for combining.  */
2731
  if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
2732
      || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
2733
    {
2734
      undo_all ();
2735
      return 0;
2736
    }
2737
 
2738
  /* Record whether I2DEST is used in I2SRC and similarly for the other
2739
     cases.  Knowing this will help in register status updating below.  */
2740
  i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
2741
  i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
2742
  i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
2743
  i2dest_killed = dead_or_set_p (i2, i2dest);
2744
  i1dest_killed = i1 && dead_or_set_p (i1, i1dest);
2745
 
2746
  /* See if I1 directly feeds into I3.  It does if I1DEST is not used
2747
     in I2SRC.  */
2748
  i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
2749
 
2750
  /* Ensure that I3's pattern can be the destination of combines.  */
2751
  if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
2752
                          i1 && i2dest_in_i1src && i1_feeds_i3,
2753
                          &i3dest_killed))
2754
    {
2755
      undo_all ();
2756
      return 0;
2757
    }
2758
 
2759
  /* See if any of the insns is a MULT operation.  Unless one is, we will
2760
     reject a combination that is, since it must be slower.  Be conservative
2761
     here.  */
2762
  if (GET_CODE (i2src) == MULT
2763
      || (i1 != 0 && GET_CODE (i1src) == MULT)
2764
      || (GET_CODE (PATTERN (i3)) == SET
2765
          && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
2766
    have_mult = 1;
2767
 
2768
  /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
2769
     We used to do this EXCEPT in one case: I3 has a post-inc in an
2770
     output operand.  However, that exception can give rise to insns like
2771
        mov r3,(r3)+
2772
     which is a famous insn on the PDP-11 where the value of r3 used as the
2773
     source was model-dependent.  Avoid this sort of thing.  */
2774
 
2775
#if 0
2776
  if (!(GET_CODE (PATTERN (i3)) == SET
2777
        && REG_P (SET_SRC (PATTERN (i3)))
2778
        && MEM_P (SET_DEST (PATTERN (i3)))
2779
        && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
2780
            || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
2781
    /* It's not the exception.  */
2782
#endif
2783
#ifdef AUTO_INC_DEC
2784
    for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
2785
      if (REG_NOTE_KIND (link) == REG_INC
2786
          && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
2787
              || (i1 != 0
2788
                  && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
2789
        {
2790
          undo_all ();
2791
          return 0;
2792
        }
2793
#endif
2794
 
2795
  /* See if the SETs in I1 or I2 need to be kept around in the merged
2796
     instruction: whenever the value set there is still needed past I3.
2797
     For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
2798
 
2799
     For the SET in I1, we have two cases:  If I1 and I2 independently
2800
     feed into I3, the set in I1 needs to be kept around if I1DEST dies
2801
     or is set in I3.  Otherwise (if I1 feeds I2 which feeds I3), the set
2802
     in I1 needs to be kept around unless I1DEST dies or is set in either
2803
     I2 or I3.  We can distinguish these cases by seeing if I2SRC mentions
2804
     I1DEST.  If so, we know I1 feeds into I2.  */
2805
 
2806
  added_sets_2 = ! dead_or_set_p (i3, i2dest);
2807
 
2808
  added_sets_1
2809
    = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
2810
               : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
2811
 
2812
  /* If the set in I2 needs to be kept around, we must make a copy of
2813
     PATTERN (I2), so that when we substitute I1SRC for I1DEST in
2814
     PATTERN (I2), we are only substituting for the original I1DEST, not into
2815
     an already-substituted copy.  This also prevents making self-referential
2816
     rtx.  If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
2817
     I2DEST.  */
2818
 
2819
  if (added_sets_2)
2820
    {
2821
      if (GET_CODE (PATTERN (i2)) == PARALLEL)
2822
        i2pat = gen_rtx_SET (VOIDmode, i2dest, copy_rtx (i2src));
2823
      else
2824
        i2pat = copy_rtx (PATTERN (i2));
2825
    }
2826
 
2827
  if (added_sets_1)
2828
    {
2829
      if (GET_CODE (PATTERN (i1)) == PARALLEL)
2830
        i1pat = gen_rtx_SET (VOIDmode, i1dest, copy_rtx (i1src));
2831
      else
2832
        i1pat = copy_rtx (PATTERN (i1));
2833
    }
2834
 
2835
  combine_merges++;
2836
 
2837
  /* Substitute in the latest insn for the regs set by the earlier ones.  */
2838
 
2839
  maxreg = max_reg_num ();
2840
 
2841
  subst_insn = i3;
2842
 
2843
#ifndef HAVE_cc0
2844
  /* Many machines that don't use CC0 have insns that can both perform an
2845
     arithmetic operation and set the condition code.  These operations will
2846
     be represented as a PARALLEL with the first element of the vector
2847
     being a COMPARE of an arithmetic operation with the constant zero.
2848
     The second element of the vector will set some pseudo to the result
2849
     of the same arithmetic operation.  If we simplify the COMPARE, we won't
2850
     match such a pattern and so will generate an extra insn.   Here we test
2851
     for this case, where both the comparison and the operation result are
2852
     needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
2853
     I2SRC.  Later we will make the PARALLEL that contains I2.  */
2854
 
2855
  if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
2856
      && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
2857
      && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
2858
      && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
2859
    {
2860
#ifdef SELECT_CC_MODE
2861
      rtx *cc_use;
2862
      enum machine_mode compare_mode;
2863
#endif
2864
 
2865
      newpat = PATTERN (i3);
2866
      SUBST (XEXP (SET_SRC (newpat), 0), i2src);
2867
 
2868
      i2_is_used = 1;
2869
 
2870
#ifdef SELECT_CC_MODE
2871
      /* See if a COMPARE with the operand we substituted in should be done
2872
         with the mode that is currently being used.  If not, do the same
2873
         processing we do in `subst' for a SET; namely, if the destination
2874
         is used only once, try to replace it with a register of the proper
2875
         mode and also replace the COMPARE.  */
2876
      if (undobuf.other_insn == 0
2877
          && (cc_use = find_single_use (SET_DEST (newpat), i3,
2878
                                        &undobuf.other_insn))
2879
          && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
2880
                                              i2src, const0_rtx))
2881
              != GET_MODE (SET_DEST (newpat))))
2882
        {
2883
          if (can_change_dest_mode(SET_DEST (newpat), added_sets_2,
2884
                                   compare_mode))
2885
            {
2886
              unsigned int regno = REGNO (SET_DEST (newpat));
2887
              rtx new_dest;
2888
 
2889
              if (regno < FIRST_PSEUDO_REGISTER)
2890
                new_dest = gen_rtx_REG (compare_mode, regno);
2891
              else
2892
                {
2893
                  SUBST_MODE (regno_reg_rtx[regno], compare_mode);
2894
                  new_dest = regno_reg_rtx[regno];
2895
                }
2896
 
2897
              SUBST (SET_DEST (newpat), new_dest);
2898
              SUBST (XEXP (*cc_use, 0), new_dest);
2899
              SUBST (SET_SRC (newpat),
2900
                     gen_rtx_COMPARE (compare_mode, i2src, const0_rtx));
2901
            }
2902
          else
2903
            undobuf.other_insn = 0;
2904
        }
2905
#endif
2906
    }
2907
  else
2908
#endif
2909
    {
2910
      /* It is possible that the source of I2 or I1 may be performing
2911
         an unneeded operation, such as a ZERO_EXTEND of something
2912
         that is known to have the high part zero.  Handle that case
2913
         by letting subst look at the innermost one of them.
2914
 
2915
         Another way to do this would be to have a function that tries
2916
         to simplify a single insn instead of merging two or more
2917
         insns.  We don't do this because of the potential of infinite
2918
         loops and because of the potential extra memory required.
2919
         However, doing it the way we are is a bit of a kludge and
2920
         doesn't catch all cases.
2921
 
2922
         But only do this if -fexpensive-optimizations since it slows
2923
         things down and doesn't usually win.
2924
 
2925
         This is not done in the COMPARE case above because the
2926
         unmodified I2PAT is used in the PARALLEL and so a pattern
2927
         with a modified I2SRC would not match.  */
2928
 
2929
      if (flag_expensive_optimizations)
2930
        {
2931
          /* Pass pc_rtx so no substitutions are done, just
2932
             simplifications.  */
2933
          if (i1)
2934
            {
2935
              subst_low_luid = DF_INSN_LUID (i1);
2936
              i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
2937
            }
2938
          else
2939
            {
2940
              subst_low_luid = DF_INSN_LUID (i2);
2941
              i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
2942
            }
2943
        }
2944
 
2945
      n_occurrences = 0;         /* `subst' counts here */
2946
 
2947
      /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
2948
         need to make a unique copy of I2SRC each time we substitute it
2949
         to avoid self-referential rtl.  */
2950
 
2951
      subst_low_luid = DF_INSN_LUID (i2);
2952
      newpat = subst (PATTERN (i3), i2dest, i2src, 0,
2953
                      ! i1_feeds_i3 && i1dest_in_i1src);
2954
      substed_i2 = 1;
2955
 
2956
      /* Record whether i2's body now appears within i3's body.  */
2957
      i2_is_used = n_occurrences;
2958
    }
2959
 
2960
  /* If we already got a failure, don't try to do more.  Otherwise,
2961
     try to substitute in I1 if we have it.  */
2962
 
2963
  if (i1 && GET_CODE (newpat) != CLOBBER)
2964
    {
2965
      /* Check that an autoincrement side-effect on I1 has not been lost.
2966
         This happens if I1DEST is mentioned in I2 and dies there, and
2967
         has disappeared from the new pattern.  */
2968
      if ((FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
2969
           && !i1_feeds_i3
2970
           && dead_or_set_p (i2, i1dest)
2971
           && !reg_overlap_mentioned_p (i1dest, newpat))
2972
          /* Before we can do this substitution, we must redo the test done
2973
             above (see detailed comments there) that ensures  that I1DEST
2974
             isn't mentioned in any SETs in NEWPAT that are field assignments.  */
2975
          || !combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX, 0, 0))
2976
        {
2977
          undo_all ();
2978
          return 0;
2979
        }
2980
 
2981
      n_occurrences = 0;
2982
      subst_low_luid = DF_INSN_LUID (i1);
2983
      newpat = subst (newpat, i1dest, i1src, 0, 0);
2984
      substed_i1 = 1;
2985
    }
2986
 
2987
  /* Fail if an autoincrement side-effect has been duplicated.  Be careful
2988
     to count all the ways that I2SRC and I1SRC can be used.  */
2989
  if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
2990
       && i2_is_used + added_sets_2 > 1)
2991
      || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
2992
          && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
2993
              > 1))
2994
      /* Fail if we tried to make a new register.  */
2995
      || max_reg_num () != maxreg
2996
      /* Fail if we couldn't do something and have a CLOBBER.  */
2997
      || GET_CODE (newpat) == CLOBBER
2998
      /* Fail if this new pattern is a MULT and we didn't have one before
2999
         at the outer level.  */
3000
      || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
3001
          && ! have_mult))
3002
    {
3003
      undo_all ();
3004
      return 0;
3005
    }
3006
 
3007
  /* If the actions of the earlier insns must be kept
3008
     in addition to substituting them into the latest one,
3009
     we must make a new PARALLEL for the latest insn
3010
     to hold additional the SETs.  */
3011
 
3012
  if (added_sets_1 || added_sets_2)
3013
    {
3014
      combine_extras++;
3015
 
3016
      if (GET_CODE (newpat) == PARALLEL)
3017
        {
3018
          rtvec old = XVEC (newpat, 0);
3019
          total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
3020
          newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
3021
          memcpy (XVEC (newpat, 0)->elem, &old->elem[0],
3022
                  sizeof (old->elem[0]) * old->num_elem);
3023
        }
3024
      else
3025
        {
3026
          rtx old = newpat;
3027
          total_sets = 1 + added_sets_1 + added_sets_2;
3028
          newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
3029
          XVECEXP (newpat, 0, 0) = old;
3030
        }
3031
 
3032
      if (added_sets_1)
3033
        XVECEXP (newpat, 0, --total_sets) = i1pat;
3034
 
3035
      if (added_sets_2)
3036
        {
3037
          /* If there is no I1, use I2's body as is.  We used to also not do
3038
             the subst call below if I2 was substituted into I3,
3039
             but that could lose a simplification.  */
3040
          if (i1 == 0)
3041
            XVECEXP (newpat, 0, --total_sets) = i2pat;
3042
          else
3043
            /* See comment where i2pat is assigned.  */
3044
            XVECEXP (newpat, 0, --total_sets)
3045
              = subst (i2pat, i1dest, i1src, 0, 0);
3046
        }
3047
    }
3048
 
3049
 validate_replacement:
3050
 
3051
  /* Note which hard regs this insn has as inputs.  */
3052
  mark_used_regs_combine (newpat);
3053
 
3054
  /* If recog_for_combine fails, it strips existing clobbers.  If we'll
3055
     consider splitting this pattern, we might need these clobbers.  */
3056
  if (i1 && GET_CODE (newpat) == PARALLEL
3057
      && GET_CODE (XVECEXP (newpat, 0, XVECLEN (newpat, 0) - 1)) == CLOBBER)
3058
    {
3059
      int len = XVECLEN (newpat, 0);
3060
 
3061
      newpat_vec_with_clobbers = rtvec_alloc (len);
3062
      for (i = 0; i < len; i++)
3063
        RTVEC_ELT (newpat_vec_with_clobbers, i) = XVECEXP (newpat, 0, i);
3064
    }
3065
 
3066
  /* Is the result of combination a valid instruction?  */
3067
  insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3068
 
3069
  /* If the result isn't valid, see if it is a PARALLEL of two SETs where
3070
     the second SET's destination is a register that is unused and isn't
3071
     marked as an instruction that might trap in an EH region.  In that case,
3072
     we just need the first SET.   This can occur when simplifying a divmod
3073
     insn.  We *must* test for this case here because the code below that
3074
     splits two independent SETs doesn't handle this case correctly when it
3075
     updates the register status.
3076
 
3077
     It's pointless doing this if we originally had two sets, one from
3078
     i3, and one from i2.  Combining then splitting the parallel results
3079
     in the original i2 again plus an invalid insn (which we delete).
3080
     The net effect is only to move instructions around, which makes
3081
     debug info less accurate.
3082
 
3083
     Also check the case where the first SET's destination is unused.
3084
     That would not cause incorrect code, but does cause an unneeded
3085
     insn to remain.  */
3086
 
3087
  if (insn_code_number < 0
3088
      && !(added_sets_2 && i1 == 0)
3089
      && GET_CODE (newpat) == PARALLEL
3090
      && XVECLEN (newpat, 0) == 2
3091
      && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
3092
      && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
3093
      && asm_noperands (newpat) < 0)
3094
    {
3095
      rtx set0 = XVECEXP (newpat, 0, 0);
3096
      rtx set1 = XVECEXP (newpat, 0, 1);
3097
 
3098
      if (((REG_P (SET_DEST (set1))
3099
            && find_reg_note (i3, REG_UNUSED, SET_DEST (set1)))
3100
           || (GET_CODE (SET_DEST (set1)) == SUBREG
3101
               && find_reg_note (i3, REG_UNUSED, SUBREG_REG (SET_DEST (set1)))))
3102
          && insn_nothrow_p (i3)
3103
          && !side_effects_p (SET_SRC (set1)))
3104
        {
3105
          newpat = set0;
3106
          insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3107
        }
3108
 
3109
      else if (((REG_P (SET_DEST (set0))
3110
                 && find_reg_note (i3, REG_UNUSED, SET_DEST (set0)))
3111
                || (GET_CODE (SET_DEST (set0)) == SUBREG
3112
                    && find_reg_note (i3, REG_UNUSED,
3113
                                      SUBREG_REG (SET_DEST (set0)))))
3114
               && insn_nothrow_p (i3)
3115
               && !side_effects_p (SET_SRC (set0)))
3116
        {
3117
          newpat = set1;
3118
          insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3119
 
3120
          if (insn_code_number >= 0)
3121
            changed_i3_dest = 1;
3122
        }
3123
    }
3124
 
3125
  /* If we were combining three insns and the result is a simple SET
3126
     with no ASM_OPERANDS that wasn't recognized, try to split it into two
3127
     insns.  There are two ways to do this.  It can be split using a
3128
     machine-specific method (like when you have an addition of a large
3129
     constant) or by combine in the function find_split_point.  */
3130
 
3131
  if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
3132
      && asm_noperands (newpat) < 0)
3133
    {
3134
      rtx parallel, m_split, *split;
3135
 
3136
      /* See if the MD file can split NEWPAT.  If it can't, see if letting it
3137
         use I2DEST as a scratch register will help.  In the latter case,
3138
         convert I2DEST to the mode of the source of NEWPAT if we can.  */
3139
 
3140
      m_split = combine_split_insns (newpat, i3);
3141
 
3142
      /* We can only use I2DEST as a scratch reg if it doesn't overlap any
3143
         inputs of NEWPAT.  */
3144
 
3145
      /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
3146
         possible to try that as a scratch reg.  This would require adding
3147
         more code to make it work though.  */
3148
 
3149
      if (m_split == 0 && ! reg_overlap_mentioned_p (i2dest, newpat))
3150
        {
3151
          enum machine_mode new_mode = GET_MODE (SET_DEST (newpat));
3152
 
3153
          /* First try to split using the original register as a
3154
             scratch register.  */
3155
          parallel = gen_rtx_PARALLEL (VOIDmode,
3156
                                       gen_rtvec (2, newpat,
3157
                                                  gen_rtx_CLOBBER (VOIDmode,
3158
                                                                   i2dest)));
3159
          m_split = combine_split_insns (parallel, i3);
3160
 
3161
          /* If that didn't work, try changing the mode of I2DEST if
3162
             we can.  */
3163
          if (m_split == 0
3164
              && new_mode != GET_MODE (i2dest)
3165
              && new_mode != VOIDmode
3166
              && can_change_dest_mode (i2dest, added_sets_2, new_mode))
3167
            {
3168
              enum machine_mode old_mode = GET_MODE (i2dest);
3169
              rtx ni2dest;
3170
 
3171
              if (REGNO (i2dest) < FIRST_PSEUDO_REGISTER)
3172
                ni2dest = gen_rtx_REG (new_mode, REGNO (i2dest));
3173
              else
3174
                {
3175
                  SUBST_MODE (regno_reg_rtx[REGNO (i2dest)], new_mode);
3176
                  ni2dest = regno_reg_rtx[REGNO (i2dest)];
3177
                }
3178
 
3179
              parallel = (gen_rtx_PARALLEL
3180
                          (VOIDmode,
3181
                           gen_rtvec (2, newpat,
3182
                                      gen_rtx_CLOBBER (VOIDmode,
3183
                                                       ni2dest))));
3184
              m_split = combine_split_insns (parallel, i3);
3185
 
3186
              if (m_split == 0
3187
                  && REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
3188
                {
3189
                  struct undo *buf;
3190
 
3191
                  adjust_reg_mode (regno_reg_rtx[REGNO (i2dest)], old_mode);
3192
                  buf = undobuf.undos;
3193
                  undobuf.undos = buf->next;
3194
                  buf->next = undobuf.frees;
3195
                  undobuf.frees = buf;
3196
                }
3197
            }
3198
 
3199
          i2scratch = m_split != 0;
3200
        }
3201
 
3202
      /* If recog_for_combine has discarded clobbers, try to use them
3203
         again for the split.  */
3204
      if (m_split == 0 && newpat_vec_with_clobbers)
3205
        {
3206
          parallel = gen_rtx_PARALLEL (VOIDmode, newpat_vec_with_clobbers);
3207
          m_split = combine_split_insns (parallel, i3);
3208
        }
3209
 
3210
      if (m_split && NEXT_INSN (m_split) == NULL_RTX)
3211
        {
3212
          m_split = PATTERN (m_split);
3213
          insn_code_number = recog_for_combine (&m_split, i3, &new_i3_notes);
3214
          if (insn_code_number >= 0)
3215
            newpat = m_split;
3216
        }
3217
      else if (m_split && NEXT_INSN (NEXT_INSN (m_split)) == NULL_RTX
3218
               && (next_real_insn (i2) == i3
3219
                   || ! use_crosses_set_p (PATTERN (m_split), DF_INSN_LUID (i2))))
3220
        {
3221
          rtx i2set, i3set;
3222
          rtx newi3pat = PATTERN (NEXT_INSN (m_split));
3223
          newi2pat = PATTERN (m_split);
3224
 
3225
          i3set = single_set (NEXT_INSN (m_split));
3226
          i2set = single_set (m_split);
3227
 
3228
          i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
3229
 
3230
          /* If I2 or I3 has multiple SETs, we won't know how to track
3231
             register status, so don't use these insns.  If I2's destination
3232
             is used between I2 and I3, we also can't use these insns.  */
3233
 
3234
          if (i2_code_number >= 0 && i2set && i3set
3235
              && (next_real_insn (i2) == i3
3236
                  || ! reg_used_between_p (SET_DEST (i2set), i2, i3)))
3237
            insn_code_number = recog_for_combine (&newi3pat, i3,
3238
                                                  &new_i3_notes);
3239
          if (insn_code_number >= 0)
3240
            newpat = newi3pat;
3241
 
3242
          /* It is possible that both insns now set the destination of I3.
3243
             If so, we must show an extra use of it.  */
3244
 
3245
          if (insn_code_number >= 0)
3246
            {
3247
              rtx new_i3_dest = SET_DEST (i3set);
3248
              rtx new_i2_dest = SET_DEST (i2set);
3249
 
3250
              while (GET_CODE (new_i3_dest) == ZERO_EXTRACT
3251
                     || GET_CODE (new_i3_dest) == STRICT_LOW_PART
3252
                     || GET_CODE (new_i3_dest) == SUBREG)
3253
                new_i3_dest = XEXP (new_i3_dest, 0);
3254
 
3255
              while (GET_CODE (new_i2_dest) == ZERO_EXTRACT
3256
                     || GET_CODE (new_i2_dest) == STRICT_LOW_PART
3257
                     || GET_CODE (new_i2_dest) == SUBREG)
3258
                new_i2_dest = XEXP (new_i2_dest, 0);
3259
 
3260
              if (REG_P (new_i3_dest)
3261
                  && REG_P (new_i2_dest)
3262
                  && REGNO (new_i3_dest) == REGNO (new_i2_dest))
3263
                INC_REG_N_SETS (REGNO (new_i2_dest), 1);
3264
            }
3265
        }
3266
 
3267
      /* If we can split it and use I2DEST, go ahead and see if that
3268
         helps things be recognized.  Verify that none of the registers
3269
         are set between I2 and I3.  */
3270
      if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
3271
#ifdef HAVE_cc0
3272
          && REG_P (i2dest)
3273
#endif
3274
          /* We need I2DEST in the proper mode.  If it is a hard register
3275
             or the only use of a pseudo, we can change its mode.
3276
             Make sure we don't change a hard register to have a mode that
3277
             isn't valid for it, or change the number of registers.  */
3278
          && (GET_MODE (*split) == GET_MODE (i2dest)
3279
              || GET_MODE (*split) == VOIDmode
3280
              || can_change_dest_mode (i2dest, added_sets_2,
3281
                                       GET_MODE (*split)))
3282
          && (next_real_insn (i2) == i3
3283
              || ! use_crosses_set_p (*split, DF_INSN_LUID (i2)))
3284
          /* We can't overwrite I2DEST if its value is still used by
3285
             NEWPAT.  */
3286
          && ! reg_referenced_p (i2dest, newpat))
3287
        {
3288
          rtx newdest = i2dest;
3289
          enum rtx_code split_code = GET_CODE (*split);
3290
          enum machine_mode split_mode = GET_MODE (*split);
3291
          bool subst_done = false;
3292
          newi2pat = NULL_RTX;
3293
 
3294
          i2scratch = true;
3295
 
3296
          /* Get NEWDEST as a register in the proper mode.  We have already
3297
             validated that we can do this.  */
3298
          if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
3299
            {
3300
              if (REGNO (i2dest) < FIRST_PSEUDO_REGISTER)
3301
                newdest = gen_rtx_REG (split_mode, REGNO (i2dest));
3302
              else
3303
                {
3304
                  SUBST_MODE (regno_reg_rtx[REGNO (i2dest)], split_mode);
3305
                  newdest = regno_reg_rtx[REGNO (i2dest)];
3306
                }
3307
            }
3308
 
3309
          /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
3310
             an ASHIFT.  This can occur if it was inside a PLUS and hence
3311
             appeared to be a memory address.  This is a kludge.  */
3312
          if (split_code == MULT
3313
              && CONST_INT_P (XEXP (*split, 1))
3314
              && INTVAL (XEXP (*split, 1)) > 0
3315
              && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
3316
            {
3317
              SUBST (*split, gen_rtx_ASHIFT (split_mode,
3318
                                             XEXP (*split, 0), GEN_INT (i)));
3319
              /* Update split_code because we may not have a multiply
3320
                 anymore.  */
3321
              split_code = GET_CODE (*split);
3322
            }
3323
 
3324
#ifdef INSN_SCHEDULING
3325
          /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
3326
             be written as a ZERO_EXTEND.  */
3327
          if (split_code == SUBREG && MEM_P (SUBREG_REG (*split)))
3328
            {
3329
#ifdef LOAD_EXTEND_OP
3330
              /* Or as a SIGN_EXTEND if LOAD_EXTEND_OP says that that's
3331
                 what it really is.  */
3332
              if (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (*split)))
3333
                  == SIGN_EXTEND)
3334
                SUBST (*split, gen_rtx_SIGN_EXTEND (split_mode,
3335
                                                    SUBREG_REG (*split)));
3336
              else
3337
#endif
3338
                SUBST (*split, gen_rtx_ZERO_EXTEND (split_mode,
3339
                                                    SUBREG_REG (*split)));
3340
            }
3341
#endif
3342
 
3343
          /* Attempt to split binary operators using arithmetic identities.  */
3344
          if (BINARY_P (SET_SRC (newpat))
3345
              && split_mode == GET_MODE (SET_SRC (newpat))
3346
              && ! side_effects_p (SET_SRC (newpat)))
3347
            {
3348
              rtx setsrc = SET_SRC (newpat);
3349
              enum machine_mode mode = GET_MODE (setsrc);
3350
              enum rtx_code code = GET_CODE (setsrc);
3351
              rtx src_op0 = XEXP (setsrc, 0);
3352
              rtx src_op1 = XEXP (setsrc, 1);
3353
 
3354
              /* Split "X = Y op Y" as "Z = Y; X = Z op Z".  */
3355
              if (rtx_equal_p (src_op0, src_op1))
3356
                {
3357
                  newi2pat = gen_rtx_SET (VOIDmode, newdest, src_op0);
3358
                  SUBST (XEXP (setsrc, 0), newdest);
3359
                  SUBST (XEXP (setsrc, 1), newdest);
3360
                  subst_done = true;
3361
                }
3362
              /* Split "((P op Q) op R) op S" where op is PLUS or MULT.  */
3363
              else if ((code == PLUS || code == MULT)
3364
                       && GET_CODE (src_op0) == code
3365
                       && GET_CODE (XEXP (src_op0, 0)) == code
3366
                       && (INTEGRAL_MODE_P (mode)
3367
                           || (FLOAT_MODE_P (mode)
3368
                               && flag_unsafe_math_optimizations)))
3369
                {
3370
                  rtx p = XEXP (XEXP (src_op0, 0), 0);
3371
                  rtx q = XEXP (XEXP (src_op0, 0), 1);
3372
                  rtx r = XEXP (src_op0, 1);
3373
                  rtx s = src_op1;
3374
 
3375
                  /* Split both "((X op Y) op X) op Y" and
3376
                     "((X op Y) op Y) op X" as "T op T" where T is
3377
                     "X op Y".  */
3378
                  if ((rtx_equal_p (p,r) && rtx_equal_p (q,s))
3379
                       || (rtx_equal_p (p,s) && rtx_equal_p (q,r)))
3380
                    {
3381
                      newi2pat = gen_rtx_SET (VOIDmode, newdest,
3382
                                              XEXP (src_op0, 0));
3383
                      SUBST (XEXP (setsrc, 0), newdest);
3384
                      SUBST (XEXP (setsrc, 1), newdest);
3385
                      subst_done = true;
3386
                    }
3387
                  /* Split "((X op X) op Y) op Y)" as "T op T" where
3388
                     T is "X op Y".  */
3389
                  else if (rtx_equal_p (p,q) && rtx_equal_p (r,s))
3390
                    {
3391
                      rtx tmp = simplify_gen_binary (code, mode, p, r);
3392
                      newi2pat = gen_rtx_SET (VOIDmode, newdest, tmp);
3393
                      SUBST (XEXP (setsrc, 0), newdest);
3394
                      SUBST (XEXP (setsrc, 1), newdest);
3395
                      subst_done = true;
3396
                    }
3397
                }
3398
            }
3399
 
3400
          if (!subst_done)
3401
            {
3402
              newi2pat = gen_rtx_SET (VOIDmode, newdest, *split);
3403
              SUBST (*split, newdest);
3404
            }
3405
 
3406
          i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
3407
 
3408
          /* recog_for_combine might have added CLOBBERs to newi2pat.
3409
             Make sure NEWPAT does not depend on the clobbered regs.  */
3410
          if (GET_CODE (newi2pat) == PARALLEL)
3411
            for (i = XVECLEN (newi2pat, 0) - 1; i >= 0; i--)
3412
              if (GET_CODE (XVECEXP (newi2pat, 0, i)) == CLOBBER)
3413
                {
3414
                  rtx reg = XEXP (XVECEXP (newi2pat, 0, i), 0);
3415
                  if (reg_overlap_mentioned_p (reg, newpat))
3416
                    {
3417
                      undo_all ();
3418
                      return 0;
3419
                    }
3420
                }
3421
 
3422
          /* If the split point was a MULT and we didn't have one before,
3423
             don't use one now.  */
3424
          if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
3425
            insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3426
        }
3427
    }
3428
 
3429
  /* Check for a case where we loaded from memory in a narrow mode and
3430
     then sign extended it, but we need both registers.  In that case,
3431
     we have a PARALLEL with both loads from the same memory location.
3432
     We can split this into a load from memory followed by a register-register
3433
     copy.  This saves at least one insn, more if register allocation can
3434
     eliminate the copy.
3435
 
3436
     We cannot do this if the destination of the first assignment is a
3437
     condition code register or cc0.  We eliminate this case by making sure
3438
     the SET_DEST and SET_SRC have the same mode.
3439
 
3440
     We cannot do this if the destination of the second assignment is
3441
     a register that we have already assumed is zero-extended.  Similarly
3442
     for a SUBREG of such a register.  */
3443
 
3444
  else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
3445
           && GET_CODE (newpat) == PARALLEL
3446
           && XVECLEN (newpat, 0) == 2
3447
           && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
3448
           && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
3449
           && (GET_MODE (SET_DEST (XVECEXP (newpat, 0, 0)))
3450
               == GET_MODE (SET_SRC (XVECEXP (newpat, 0, 0))))
3451
           && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
3452
           && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
3453
                           XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
3454
           && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
3455
                                   DF_INSN_LUID (i2))
3456
           && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
3457
           && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
3458
           && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
3459
                 (REG_P (temp)
3460
                  && VEC_index (reg_stat_type, reg_stat,
3461
                                REGNO (temp))->nonzero_bits != 0
3462
                  && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
3463
                  && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
3464
                  && (VEC_index (reg_stat_type, reg_stat,
3465
                                 REGNO (temp))->nonzero_bits
3466
                      != GET_MODE_MASK (word_mode))))
3467
           && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
3468
                 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
3469
                     (REG_P (temp)
3470
                      && VEC_index (reg_stat_type, reg_stat,
3471
                                    REGNO (temp))->nonzero_bits != 0
3472
                      && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
3473
                      && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
3474
                      && (VEC_index (reg_stat_type, reg_stat,
3475
                                     REGNO (temp))->nonzero_bits
3476
                          != GET_MODE_MASK (word_mode)))))
3477
           && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
3478
                                         SET_SRC (XVECEXP (newpat, 0, 1)))
3479
           && ! find_reg_note (i3, REG_UNUSED,
3480
                               SET_DEST (XVECEXP (newpat, 0, 0))))
3481
    {
3482
      rtx ni2dest;
3483
 
3484
      newi2pat = XVECEXP (newpat, 0, 0);
3485
      ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
3486
      newpat = XVECEXP (newpat, 0, 1);
3487
      SUBST (SET_SRC (newpat),
3488
             gen_lowpart (GET_MODE (SET_SRC (newpat)), ni2dest));
3489
      i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
3490
 
3491
      if (i2_code_number >= 0)
3492
        insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3493
 
3494
      if (insn_code_number >= 0)
3495
        swap_i2i3 = 1;
3496
    }
3497
 
3498
  /* Similarly, check for a case where we have a PARALLEL of two independent
3499
     SETs but we started with three insns.  In this case, we can do the sets
3500
     as two separate insns.  This case occurs when some SET allows two
3501
     other insns to combine, but the destination of that SET is still live.  */
3502
 
3503
  else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
3504
           && GET_CODE (newpat) == PARALLEL
3505
           && XVECLEN (newpat, 0) == 2
3506
           && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
3507
           && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
3508
           && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
3509
           && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
3510
           && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
3511
           && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
3512
           && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
3513
                                   DF_INSN_LUID (i2))
3514
           && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
3515
                                  XVECEXP (newpat, 0, 0))
3516
           && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
3517
                                  XVECEXP (newpat, 0, 1))
3518
           && ! (contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 0)))
3519
                 && contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 1))))
3520
#ifdef HAVE_cc0
3521
           /* We cannot split the parallel into two sets if both sets
3522
              reference cc0.  */
3523
           && ! (reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0))
3524
                 && reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 1)))
3525
#endif
3526
           )
3527
    {
3528
      /* Normally, it doesn't matter which of the two is done first,
3529
         but it does if one references cc0.  In that case, it has to
3530
         be first.  */
3531
#ifdef HAVE_cc0
3532
      if (reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0)))
3533
        {
3534
          newi2pat = XVECEXP (newpat, 0, 0);
3535
          newpat = XVECEXP (newpat, 0, 1);
3536
        }
3537
      else
3538
#endif
3539
        {
3540
          newi2pat = XVECEXP (newpat, 0, 1);
3541
          newpat = XVECEXP (newpat, 0, 0);
3542
        }
3543
 
3544
      i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
3545
 
3546
      if (i2_code_number >= 0)
3547 378 julius
        {
3548
          /* recog_for_combine might have added CLOBBERs to newi2pat.
3549
             Make sure NEWPAT does not depend on the clobbered regs.  */
3550
          if (GET_CODE (newi2pat) == PARALLEL)
3551
            {
3552
              for (i = XVECLEN (newi2pat, 0) - 1; i >= 0; i--)
3553
                if (GET_CODE (XVECEXP (newi2pat, 0, i)) == CLOBBER)
3554
                  {
3555
                    rtx reg = XEXP (XVECEXP (newi2pat, 0, i), 0);
3556
                    if (reg_overlap_mentioned_p (reg, newpat))
3557
                      break;
3558
                  }
3559
 
3560
              if (i >= 0)
3561
                {
3562
                  /* CLOBBERs on newi2pat prevent it going first.
3563
                     Try the other order of the insns if possible.  */
3564
                  temp = newpat;
3565
                  newpat = XVECEXP (newi2pat, 0, 0);
3566
                  newi2pat = temp;
3567
#ifdef HAVE_cc0
3568
                  if (reg_referenced_p (cc0_rtx, newpat))
3569
                    {
3570
                      undo_all ();
3571
                      return 0;
3572
                    }
3573
#endif
3574
 
3575
                  i2_code_number = recog_for_combine (&newi2pat, i2,
3576
                                                      &new_i2_notes);
3577
                  if (i2_code_number < 0)
3578
                    {
3579
                      undo_all ();
3580
                      return 0;
3581
                    }
3582
 
3583
                  if (GET_CODE (newi2pat) == PARALLEL)
3584
                    for (i = XVECLEN (newi2pat, 0) - 1; i >= 0; i--)
3585
                      if (GET_CODE (XVECEXP (newi2pat, 0, i)) == CLOBBER)
3586
                        {
3587
                          rtx reg = XEXP (XVECEXP (newi2pat, 0, i), 0);
3588
                          if (reg_overlap_mentioned_p (reg, newpat))
3589
                            {
3590
                              undo_all ();
3591
                              return 0;
3592
                            }
3593
                        }
3594
                }
3595
            }
3596
 
3597
          insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3598
        }
3599 280 jeremybenn
    }
3600
 
3601
  /* If it still isn't recognized, fail and change things back the way they
3602
     were.  */
3603
  if ((insn_code_number < 0
3604
       /* Is the result a reasonable ASM_OPERANDS?  */
3605
       && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
3606
    {
3607
      undo_all ();
3608
      return 0;
3609
    }
3610
 
3611
  /* If we had to change another insn, make sure it is valid also.  */
3612
  if (undobuf.other_insn)
3613
    {
3614
      CLEAR_HARD_REG_SET (newpat_used_regs);
3615
 
3616
      other_pat = PATTERN (undobuf.other_insn);
3617
      other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
3618
                                             &new_other_notes);
3619
 
3620
      if (other_code_number < 0 && ! check_asm_operands (other_pat))
3621
        {
3622
          undo_all ();
3623
          return 0;
3624
        }
3625
    }
3626
 
3627
#ifdef HAVE_cc0
3628
  /* If I2 is the CC0 setter and I3 is the CC0 user then check whether
3629
     they are adjacent to each other or not.  */
3630
  {
3631
    rtx p = prev_nonnote_insn (i3);
3632
    if (p && p != i2 && NONJUMP_INSN_P (p) && newi2pat
3633
        && sets_cc0_p (newi2pat))
3634
      {
3635
        undo_all ();
3636
        return 0;
3637
      }
3638
  }
3639
#endif
3640
 
3641
  /* Only allow this combination if insn_rtx_costs reports that the
3642
     replacement instructions are cheaper than the originals.  */
3643
  if (!combine_validate_cost (i1, i2, i3, newpat, newi2pat, other_pat))
3644
    {
3645
      undo_all ();
3646
      return 0;
3647
    }
3648
 
3649
  if (MAY_HAVE_DEBUG_INSNS)
3650
    {
3651
      struct undo *undo;
3652
 
3653
      for (undo = undobuf.undos; undo; undo = undo->next)
3654
        if (undo->kind == UNDO_MODE)
3655
          {
3656
            rtx reg = *undo->where.r;
3657
            enum machine_mode new_mode = GET_MODE (reg);
3658
            enum machine_mode old_mode = undo->old_contents.m;
3659
 
3660
            /* Temporarily revert mode back.  */
3661
            adjust_reg_mode (reg, old_mode);
3662
 
3663
            if (reg == i2dest && i2scratch)
3664
              {
3665
                /* If we used i2dest as a scratch register with a
3666
                   different mode, substitute it for the original
3667
                   i2src while its original mode is temporarily
3668
                   restored, and then clear i2scratch so that we don't
3669
                   do it again later.  */
3670
                propagate_for_debug (i2, i3, reg, i2src, false);
3671
                i2scratch = false;
3672
                /* Put back the new mode.  */
3673
                adjust_reg_mode (reg, new_mode);
3674
              }
3675
            else
3676
              {
3677
                rtx tempreg = gen_raw_REG (old_mode, REGNO (reg));
3678
                rtx first, last;
3679
 
3680
                if (reg == i2dest)
3681
                  {
3682
                    first = i2;
3683
                    last = i3;
3684
                  }
3685
                else
3686
                  {
3687
                    first = i3;
3688
                    last = undobuf.other_insn;
3689
                    gcc_assert (last);
3690
                  }
3691
 
3692
                /* We're dealing with a reg that changed mode but not
3693
                   meaning, so we want to turn it into a subreg for
3694
                   the new mode.  However, because of REG sharing and
3695
                   because its mode had already changed, we have to do
3696
                   it in two steps.  First, replace any debug uses of
3697
                   reg, with its original mode temporarily restored,
3698
                   with this copy we have created; then, replace the
3699
                   copy with the SUBREG of the original shared reg,
3700
                   once again changed to the new mode.  */
3701
                propagate_for_debug (first, last, reg, tempreg, false);
3702
                adjust_reg_mode (reg, new_mode);
3703
                propagate_for_debug (first, last, tempreg,
3704
                                     lowpart_subreg (old_mode, reg, new_mode),
3705
                                     false);
3706
              }
3707
          }
3708
    }
3709
 
3710
  /* If we will be able to accept this, we have made a
3711
     change to the destination of I3.  This requires us to
3712
     do a few adjustments.  */
3713
 
3714
  if (changed_i3_dest)
3715
    {
3716
      PATTERN (i3) = newpat;
3717
      adjust_for_new_dest (i3);
3718
    }
3719
 
3720
  /* We now know that we can do this combination.  Merge the insns and
3721
     update the status of registers and LOG_LINKS.  */
3722
 
3723
  if (undobuf.other_insn)
3724
    {
3725
      rtx note, next;
3726
 
3727
      PATTERN (undobuf.other_insn) = other_pat;
3728
 
3729
      /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
3730
         are still valid.  Then add any non-duplicate notes added by
3731
         recog_for_combine.  */
3732
      for (note = REG_NOTES (undobuf.other_insn); note; note = next)
3733
        {
3734
          next = XEXP (note, 1);
3735
 
3736
          if (REG_NOTE_KIND (note) == REG_UNUSED
3737
              && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
3738
            remove_note (undobuf.other_insn, note);
3739
        }
3740
 
3741
      distribute_notes (new_other_notes, undobuf.other_insn,
3742
                        undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
3743
    }
3744
 
3745
  if (swap_i2i3)
3746
    {
3747
      rtx insn;
3748
      rtx link;
3749
      rtx ni2dest;
3750
 
3751
      /* I3 now uses what used to be its destination and which is now
3752
         I2's destination.  This requires us to do a few adjustments.  */
3753
      PATTERN (i3) = newpat;
3754
      adjust_for_new_dest (i3);
3755
 
3756
      /* We need a LOG_LINK from I3 to I2.  But we used to have one,
3757
         so we still will.
3758
 
3759
         However, some later insn might be using I2's dest and have
3760
         a LOG_LINK pointing at I3.  We must remove this link.
3761
         The simplest way to remove the link is to point it at I1,
3762
         which we know will be a NOTE.  */
3763
 
3764
      /* newi2pat is usually a SET here; however, recog_for_combine might
3765
         have added some clobbers.  */
3766
      if (GET_CODE (newi2pat) == PARALLEL)
3767
        ni2dest = SET_DEST (XVECEXP (newi2pat, 0, 0));
3768
      else
3769
        ni2dest = SET_DEST (newi2pat);
3770
 
3771
      for (insn = NEXT_INSN (i3);
3772
           insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR
3773
                    || insn != BB_HEAD (this_basic_block->next_bb));
3774
           insn = NEXT_INSN (insn))
3775
        {
3776
          if (INSN_P (insn) && reg_referenced_p (ni2dest, PATTERN (insn)))
3777
            {
3778
              for (link = LOG_LINKS (insn); link;
3779
                   link = XEXP (link, 1))
3780
                if (XEXP (link, 0) == i3)
3781
                  XEXP (link, 0) = i1;
3782
 
3783
              break;
3784
            }
3785
        }
3786
    }
3787
 
3788
  {
3789
    rtx i3notes, i2notes, i1notes = 0;
3790
    rtx i3links, i2links, i1links = 0;
3791
    rtx midnotes = 0;
3792
    unsigned int regno;
3793
    /* Compute which registers we expect to eliminate.  newi2pat may be setting
3794
       either i3dest or i2dest, so we must check it.  Also, i1dest may be the
3795
       same as i3dest, in which case newi2pat may be setting i1dest.  */
3796
    rtx elim_i2 = ((newi2pat && reg_set_p (i2dest, newi2pat))
3797
                   || i2dest_in_i2src || i2dest_in_i1src
3798
                   || !i2dest_killed
3799
                   ? 0 : i2dest);
3800
    rtx elim_i1 = (i1 == 0 || i1dest_in_i1src
3801
                   || (newi2pat && reg_set_p (i1dest, newi2pat))
3802
                   || !i1dest_killed
3803
                   ? 0 : i1dest);
3804
 
3805
    /* Get the old REG_NOTES and LOG_LINKS from all our insns and
3806
       clear them.  */
3807
    i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
3808
    i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
3809
    if (i1)
3810
      i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
3811
 
3812
    /* Ensure that we do not have something that should not be shared but
3813
       occurs multiple times in the new insns.  Check this by first
3814
       resetting all the `used' flags and then copying anything is shared.  */
3815
 
3816
    reset_used_flags (i3notes);
3817
    reset_used_flags (i2notes);
3818
    reset_used_flags (i1notes);
3819
    reset_used_flags (newpat);
3820
    reset_used_flags (newi2pat);
3821
    if (undobuf.other_insn)
3822
      reset_used_flags (PATTERN (undobuf.other_insn));
3823
 
3824
    i3notes = copy_rtx_if_shared (i3notes);
3825
    i2notes = copy_rtx_if_shared (i2notes);
3826
    i1notes = copy_rtx_if_shared (i1notes);
3827
    newpat = copy_rtx_if_shared (newpat);
3828
    newi2pat = copy_rtx_if_shared (newi2pat);
3829
    if (undobuf.other_insn)
3830
      reset_used_flags (PATTERN (undobuf.other_insn));
3831
 
3832
    INSN_CODE (i3) = insn_code_number;
3833
    PATTERN (i3) = newpat;
3834
 
3835
    if (CALL_P (i3) && CALL_INSN_FUNCTION_USAGE (i3))
3836
      {
3837
        rtx call_usage = CALL_INSN_FUNCTION_USAGE (i3);
3838
 
3839
        reset_used_flags (call_usage);
3840
        call_usage = copy_rtx (call_usage);
3841
 
3842
        if (substed_i2)
3843
          replace_rtx (call_usage, i2dest, i2src);
3844
 
3845
        if (substed_i1)
3846
          replace_rtx (call_usage, i1dest, i1src);
3847
 
3848
        CALL_INSN_FUNCTION_USAGE (i3) = call_usage;
3849
      }
3850
 
3851
    if (undobuf.other_insn)
3852
      INSN_CODE (undobuf.other_insn) = other_code_number;
3853
 
3854
    /* We had one special case above where I2 had more than one set and
3855
       we replaced a destination of one of those sets with the destination
3856
       of I3.  In that case, we have to update LOG_LINKS of insns later
3857
       in this basic block.  Note that this (expensive) case is rare.
3858
 
3859
       Also, in this case, we must pretend that all REG_NOTEs for I2
3860
       actually came from I3, so that REG_UNUSED notes from I2 will be
3861
       properly handled.  */
3862
 
3863
    if (i3_subst_into_i2)
3864
      {
3865
        for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
3866
          if ((GET_CODE (XVECEXP (PATTERN (i2), 0, i)) == SET
3867
               || GET_CODE (XVECEXP (PATTERN (i2), 0, i)) == CLOBBER)
3868
              && REG_P (SET_DEST (XVECEXP (PATTERN (i2), 0, i)))
3869
              && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
3870
              && ! find_reg_note (i2, REG_UNUSED,
3871
                                  SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
3872
            for (temp = NEXT_INSN (i2);
3873
                 temp && (this_basic_block->next_bb == EXIT_BLOCK_PTR
3874
                          || BB_HEAD (this_basic_block) != temp);
3875
                 temp = NEXT_INSN (temp))
3876
              if (temp != i3 && INSN_P (temp))
3877
                for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
3878
                  if (XEXP (link, 0) == i2)
3879
                    XEXP (link, 0) = i3;
3880
 
3881
        if (i3notes)
3882
          {
3883
            rtx link = i3notes;
3884
            while (XEXP (link, 1))
3885
              link = XEXP (link, 1);
3886
            XEXP (link, 1) = i2notes;
3887
          }
3888
        else
3889
          i3notes = i2notes;
3890
        i2notes = 0;
3891
      }
3892
 
3893
    LOG_LINKS (i3) = 0;
3894
    REG_NOTES (i3) = 0;
3895
    LOG_LINKS (i2) = 0;
3896
    REG_NOTES (i2) = 0;
3897
 
3898
    if (newi2pat)
3899
      {
3900
        if (MAY_HAVE_DEBUG_INSNS && i2scratch)
3901
          propagate_for_debug (i2, i3, i2dest, i2src, false);
3902
        INSN_CODE (i2) = i2_code_number;
3903
        PATTERN (i2) = newi2pat;
3904
      }
3905
    else
3906
      {
3907
        if (MAY_HAVE_DEBUG_INSNS && i2src)
3908
          propagate_for_debug (i2, i3, i2dest, i2src, i3_subst_into_i2);
3909
        SET_INSN_DELETED (i2);
3910
      }
3911
 
3912
    if (i1)
3913
      {
3914
        LOG_LINKS (i1) = 0;
3915
        REG_NOTES (i1) = 0;
3916
        if (MAY_HAVE_DEBUG_INSNS)
3917
          propagate_for_debug (i1, i3, i1dest, i1src, false);
3918
        SET_INSN_DELETED (i1);
3919
      }
3920
 
3921
    /* Get death notes for everything that is now used in either I3 or
3922
       I2 and used to die in a previous insn.  If we built two new
3923
       patterns, move from I1 to I2 then I2 to I3 so that we get the
3924
       proper movement on registers that I2 modifies.  */
3925
 
3926
    if (newi2pat)
3927
      {
3928
        move_deaths (newi2pat, NULL_RTX, DF_INSN_LUID (i1), i2, &midnotes);
3929
        move_deaths (newpat, newi2pat, DF_INSN_LUID (i1), i3, &midnotes);
3930
      }
3931
    else
3932
      move_deaths (newpat, NULL_RTX, i1 ? DF_INSN_LUID (i1) : DF_INSN_LUID (i2),
3933
                   i3, &midnotes);
3934
 
3935
    /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3.  */
3936
    if (i3notes)
3937
      distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
3938
                        elim_i2, elim_i1);
3939
    if (i2notes)
3940
      distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
3941
                        elim_i2, elim_i1);
3942
    if (i1notes)
3943
      distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
3944
                        elim_i2, elim_i1);
3945
    if (midnotes)
3946
      distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
3947
                        elim_i2, elim_i1);
3948
 
3949
    /* Distribute any notes added to I2 or I3 by recog_for_combine.  We
3950
       know these are REG_UNUSED and want them to go to the desired insn,
3951
       so we always pass it as i3.  */
3952
 
3953
    if (newi2pat && new_i2_notes)
3954
      distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
3955
 
3956
    if (new_i3_notes)
3957
      distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
3958
 
3959
    /* If I3DEST was used in I3SRC, it really died in I3.  We may need to
3960
       put a REG_DEAD note for it somewhere.  If NEWI2PAT exists and sets
3961
       I3DEST, the death must be somewhere before I2, not I3.  If we passed I3
3962
       in that case, it might delete I2.  Similarly for I2 and I1.
3963
       Show an additional death due to the REG_DEAD note we make here.  If
3964
       we discard it in distribute_notes, we will decrement it again.  */
3965
 
3966
    if (i3dest_killed)
3967
      {
3968
        if (newi2pat && reg_set_p (i3dest_killed, newi2pat))
3969
          distribute_notes (alloc_reg_note (REG_DEAD, i3dest_killed,
3970
                                            NULL_RTX),
3971
                            NULL_RTX, i2, NULL_RTX, elim_i2, elim_i1);
3972
        else
3973
          distribute_notes (alloc_reg_note (REG_DEAD, i3dest_killed,
3974
                                            NULL_RTX),
3975
                            NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
3976
                            elim_i2, elim_i1);
3977
      }
3978
 
3979
    if (i2dest_in_i2src)
3980
      {
3981
        if (newi2pat && reg_set_p (i2dest, newi2pat))
3982
          distribute_notes (alloc_reg_note (REG_DEAD, i2dest, NULL_RTX),
3983
                            NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
3984
        else
3985
          distribute_notes (alloc_reg_note (REG_DEAD, i2dest, NULL_RTX),
3986
                            NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
3987
                            NULL_RTX, NULL_RTX);
3988
      }
3989
 
3990
    if (i1dest_in_i1src)
3991
      {
3992
        if (newi2pat && reg_set_p (i1dest, newi2pat))
3993
          distribute_notes (alloc_reg_note (REG_DEAD, i1dest, NULL_RTX),
3994
                            NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
3995
        else
3996
          distribute_notes (alloc_reg_note (REG_DEAD, i1dest, NULL_RTX),
3997
                            NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
3998
                            NULL_RTX, NULL_RTX);
3999
      }
4000
 
4001
    distribute_links (i3links);
4002
    distribute_links (i2links);
4003
    distribute_links (i1links);
4004
 
4005
    if (REG_P (i2dest))
4006
      {
4007
        rtx link;
4008
        rtx i2_insn = 0, i2_val = 0, set;
4009
 
4010
        /* The insn that used to set this register doesn't exist, and
4011
           this life of the register may not exist either.  See if one of
4012
           I3's links points to an insn that sets I2DEST.  If it does,
4013
           that is now the last known value for I2DEST. If we don't update
4014
           this and I2 set the register to a value that depended on its old
4015
           contents, we will get confused.  If this insn is used, thing
4016
           will be set correctly in combine_instructions.  */
4017
 
4018
        for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
4019
          if ((set = single_set (XEXP (link, 0))) != 0
4020
              && rtx_equal_p (i2dest, SET_DEST (set)))
4021
            i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
4022
 
4023
        record_value_for_reg (i2dest, i2_insn, i2_val);
4024
 
4025
        /* If the reg formerly set in I2 died only once and that was in I3,
4026
           zero its use count so it won't make `reload' do any work.  */
4027
        if (! added_sets_2
4028
            && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat))
4029
            && ! i2dest_in_i2src)
4030
          {
4031
            regno = REGNO (i2dest);
4032
            INC_REG_N_SETS (regno, -1);
4033
          }
4034
      }
4035
 
4036
    if (i1 && REG_P (i1dest))
4037
      {
4038
        rtx link;
4039
        rtx i1_insn = 0, i1_val = 0, set;
4040
 
4041
        for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
4042
          if ((set = single_set (XEXP (link, 0))) != 0
4043
              && rtx_equal_p (i1dest, SET_DEST (set)))
4044
            i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
4045
 
4046
        record_value_for_reg (i1dest, i1_insn, i1_val);
4047
 
4048
        regno = REGNO (i1dest);
4049
        if (! added_sets_1 && ! i1dest_in_i1src)
4050
          INC_REG_N_SETS (regno, -1);
4051
      }
4052
 
4053
    /* Update reg_stat[].nonzero_bits et al for any changes that may have
4054
       been made to this insn.  The order of
4055
       set_nonzero_bits_and_sign_copies() is important.  Because newi2pat
4056
       can affect nonzero_bits of newpat */
4057
    if (newi2pat)
4058
      note_stores (newi2pat, set_nonzero_bits_and_sign_copies, NULL);
4059
    note_stores (newpat, set_nonzero_bits_and_sign_copies, NULL);
4060
  }
4061
 
4062
  if (undobuf.other_insn != NULL_RTX)
4063
    {
4064
      if (dump_file)
4065
        {
4066
          fprintf (dump_file, "modifying other_insn ");
4067
          dump_insn_slim (dump_file, undobuf.other_insn);
4068
        }
4069
      df_insn_rescan (undobuf.other_insn);
4070
    }
4071
 
4072
  if (i1 && !(NOTE_P(i1) && (NOTE_KIND (i1) == NOTE_INSN_DELETED)))
4073
    {
4074
      if (dump_file)
4075
        {
4076
          fprintf (dump_file, "modifying insn i1 ");
4077
          dump_insn_slim (dump_file, i1);
4078
        }
4079
      df_insn_rescan (i1);
4080
    }
4081
 
4082
  if (i2 && !(NOTE_P(i2) && (NOTE_KIND (i2) == NOTE_INSN_DELETED)))
4083
    {
4084
      if (dump_file)
4085
        {
4086
          fprintf (dump_file, "modifying insn i2 ");
4087
          dump_insn_slim (dump_file, i2);
4088
        }
4089
      df_insn_rescan (i2);
4090
    }
4091
 
4092
  if (i3 && !(NOTE_P(i3) && (NOTE_KIND (i3) == NOTE_INSN_DELETED)))
4093
    {
4094
      if (dump_file)
4095
        {
4096
          fprintf (dump_file, "modifying insn i3 ");
4097
          dump_insn_slim (dump_file, i3);
4098
        }
4099
      df_insn_rescan (i3);
4100
    }
4101
 
4102
  /* Set new_direct_jump_p if a new return or simple jump instruction
4103
     has been created.  Adjust the CFG accordingly.  */
4104
 
4105
  if (returnjump_p (i3) || any_uncondjump_p (i3))
4106
    {
4107
      *new_direct_jump_p = 1;
4108
      mark_jump_label (PATTERN (i3), i3, 0);
4109
      update_cfg_for_uncondjump (i3);
4110
    }
4111
 
4112
  if (undobuf.other_insn != NULL_RTX
4113
      && (returnjump_p (undobuf.other_insn)
4114
          || any_uncondjump_p (undobuf.other_insn)))
4115
    {
4116
      *new_direct_jump_p = 1;
4117
      update_cfg_for_uncondjump (undobuf.other_insn);
4118
    }
4119
 
4120
  /* A noop might also need cleaning up of CFG, if it comes from the
4121
     simplification of a jump.  */
4122
  if (GET_CODE (newpat) == SET
4123
      && SET_SRC (newpat) == pc_rtx
4124
      && SET_DEST (newpat) == pc_rtx)
4125
    {
4126
      *new_direct_jump_p = 1;
4127
      update_cfg_for_uncondjump (i3);
4128
    }
4129
 
4130
  combine_successes++;
4131
  undo_commit ();
4132
 
4133
  if (added_links_insn
4134
      && (newi2pat == 0 || DF_INSN_LUID (added_links_insn) < DF_INSN_LUID (i2))
4135
      && DF_INSN_LUID (added_links_insn) < DF_INSN_LUID (i3))
4136
    return added_links_insn;
4137
  else
4138
    return newi2pat ? i2 : i3;
4139
}
4140
 
4141
/* Undo all the modifications recorded in undobuf.  */
4142
 
4143
static void
4144
undo_all (void)
4145
{
4146
  struct undo *undo, *next;
4147
 
4148
  for (undo = undobuf.undos; undo; undo = next)
4149
    {
4150
      next = undo->next;
4151
      switch (undo->kind)
4152
        {
4153
        case UNDO_RTX:
4154
          *undo->where.r = undo->old_contents.r;
4155
          break;
4156
        case UNDO_INT:
4157
          *undo->where.i = undo->old_contents.i;
4158
          break;
4159
        case UNDO_MODE:
4160
          adjust_reg_mode (*undo->where.r, undo->old_contents.m);
4161
          break;
4162
        default:
4163
          gcc_unreachable ();
4164
        }
4165
 
4166
      undo->next = undobuf.frees;
4167
      undobuf.frees = undo;
4168
    }
4169
 
4170
  undobuf.undos = 0;
4171
}
4172
 
4173
/* We've committed to accepting the changes we made.  Move all
4174
   of the undos to the free list.  */
4175
 
4176
static void
4177
undo_commit (void)
4178
{
4179
  struct undo *undo, *next;
4180
 
4181
  for (undo = undobuf.undos; undo; undo = next)
4182
    {
4183
      next = undo->next;
4184
      undo->next = undobuf.frees;
4185
      undobuf.frees = undo;
4186
    }
4187
  undobuf.undos = 0;
4188
}
4189
 
4190
/* Find the innermost point within the rtx at LOC, possibly LOC itself,
4191
   where we have an arithmetic expression and return that point.  LOC will
4192
   be inside INSN.
4193
 
4194
   try_combine will call this function to see if an insn can be split into
4195
   two insns.  */
4196
 
4197
static rtx *
4198
find_split_point (rtx *loc, rtx insn)
4199
{
4200
  rtx x = *loc;
4201
  enum rtx_code code = GET_CODE (x);
4202
  rtx *split;
4203
  unsigned HOST_WIDE_INT len = 0;
4204
  HOST_WIDE_INT pos = 0;
4205
  int unsignedp = 0;
4206
  rtx inner = NULL_RTX;
4207
 
4208
  /* First special-case some codes.  */
4209
  switch (code)
4210
    {
4211
    case SUBREG:
4212
#ifdef INSN_SCHEDULING
4213
      /* If we are making a paradoxical SUBREG invalid, it becomes a split
4214
         point.  */
4215
      if (MEM_P (SUBREG_REG (x)))
4216
        return loc;
4217
#endif
4218
      return find_split_point (&SUBREG_REG (x), insn);
4219
 
4220
    case MEM:
4221
#ifdef HAVE_lo_sum
4222
      /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
4223
         using LO_SUM and HIGH.  */
4224
      if (GET_CODE (XEXP (x, 0)) == CONST
4225
          || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
4226
        {
4227
          enum machine_mode address_mode
4228
            = targetm.addr_space.address_mode (MEM_ADDR_SPACE (x));
4229
 
4230
          SUBST (XEXP (x, 0),
4231
                 gen_rtx_LO_SUM (address_mode,
4232
                                 gen_rtx_HIGH (address_mode, XEXP (x, 0)),
4233
                                 XEXP (x, 0)));
4234
          return &XEXP (XEXP (x, 0), 0);
4235
        }
4236
#endif
4237
 
4238
      /* If we have a PLUS whose second operand is a constant and the
4239
         address is not valid, perhaps will can split it up using
4240
         the machine-specific way to split large constants.  We use
4241
         the first pseudo-reg (one of the virtual regs) as a placeholder;
4242
         it will not remain in the result.  */
4243
      if (GET_CODE (XEXP (x, 0)) == PLUS
4244
          && CONST_INT_P (XEXP (XEXP (x, 0), 1))
4245
          && ! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
4246
                                            MEM_ADDR_SPACE (x)))
4247
        {
4248
          rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
4249
          rtx seq = combine_split_insns (gen_rtx_SET (VOIDmode, reg,
4250
                                                      XEXP (x, 0)),
4251
                                         subst_insn);
4252
 
4253
          /* This should have produced two insns, each of which sets our
4254
             placeholder.  If the source of the second is a valid address,
4255
             we can make put both sources together and make a split point
4256
             in the middle.  */
4257
 
4258
          if (seq
4259
              && NEXT_INSN (seq) != NULL_RTX
4260
              && NEXT_INSN (NEXT_INSN (seq)) == NULL_RTX
4261
              && NONJUMP_INSN_P (seq)
4262
              && GET_CODE (PATTERN (seq)) == SET
4263
              && SET_DEST (PATTERN (seq)) == reg
4264
              && ! reg_mentioned_p (reg,
4265
                                    SET_SRC (PATTERN (seq)))
4266
              && NONJUMP_INSN_P (NEXT_INSN (seq))
4267
              && GET_CODE (PATTERN (NEXT_INSN (seq))) == SET
4268
              && SET_DEST (PATTERN (NEXT_INSN (seq))) == reg
4269
              && memory_address_addr_space_p
4270
                   (GET_MODE (x), SET_SRC (PATTERN (NEXT_INSN (seq))),
4271
                    MEM_ADDR_SPACE (x)))
4272
            {
4273
              rtx src1 = SET_SRC (PATTERN (seq));
4274
              rtx src2 = SET_SRC (PATTERN (NEXT_INSN (seq)));
4275
 
4276
              /* Replace the placeholder in SRC2 with SRC1.  If we can
4277
                 find where in SRC2 it was placed, that can become our
4278
                 split point and we can replace this address with SRC2.
4279
                 Just try two obvious places.  */
4280
 
4281
              src2 = replace_rtx (src2, reg, src1);
4282
              split = 0;
4283
              if (XEXP (src2, 0) == src1)
4284
                split = &XEXP (src2, 0);
4285
              else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
4286
                       && XEXP (XEXP (src2, 0), 0) == src1)
4287
                split = &XEXP (XEXP (src2, 0), 0);
4288
 
4289
              if (split)
4290
                {
4291
                  SUBST (XEXP (x, 0), src2);
4292
                  return split;
4293
                }
4294
            }
4295
 
4296
          /* If that didn't work, perhaps the first operand is complex and
4297
             needs to be computed separately, so make a split point there.
4298
             This will occur on machines that just support REG + CONST
4299
             and have a constant moved through some previous computation.  */
4300
 
4301
          else if (!OBJECT_P (XEXP (XEXP (x, 0), 0))
4302
                   && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
4303
                         && OBJECT_P (SUBREG_REG (XEXP (XEXP (x, 0), 0)))))
4304
            return &XEXP (XEXP (x, 0), 0);
4305
        }
4306
 
4307
      /* If we have a PLUS whose first operand is complex, try computing it
4308
         separately by making a split there.  */
4309
      if (GET_CODE (XEXP (x, 0)) == PLUS
4310
          && ! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
4311
                                            MEM_ADDR_SPACE (x))
4312
          && ! OBJECT_P (XEXP (XEXP (x, 0), 0))
4313
          && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
4314
                && OBJECT_P (SUBREG_REG (XEXP (XEXP (x, 0), 0)))))
4315
        return &XEXP (XEXP (x, 0), 0);
4316
      break;
4317
 
4318
    case SET:
4319
#ifdef HAVE_cc0
4320
      /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
4321
         ZERO_EXTRACT, the most likely reason why this doesn't match is that
4322
         we need to put the operand into a register.  So split at that
4323
         point.  */
4324
 
4325
      if (SET_DEST (x) == cc0_rtx
4326
          && GET_CODE (SET_SRC (x)) != COMPARE
4327
          && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
4328
          && !OBJECT_P (SET_SRC (x))
4329
          && ! (GET_CODE (SET_SRC (x)) == SUBREG
4330
                && OBJECT_P (SUBREG_REG (SET_SRC (x)))))
4331
        return &SET_SRC (x);
4332
#endif
4333
 
4334
      /* See if we can split SET_SRC as it stands.  */
4335
      split = find_split_point (&SET_SRC (x), insn);
4336
      if (split && split != &SET_SRC (x))
4337
        return split;
4338
 
4339
      /* See if we can split SET_DEST as it stands.  */
4340
      split = find_split_point (&SET_DEST (x), insn);
4341
      if (split && split != &SET_DEST (x))
4342
        return split;
4343
 
4344
      /* See if this is a bitfield assignment with everything constant.  If
4345
         so, this is an IOR of an AND, so split it into that.  */
4346
      if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
4347
          && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
4348
              <= HOST_BITS_PER_WIDE_INT)
4349
          && CONST_INT_P (XEXP (SET_DEST (x), 1))
4350
          && CONST_INT_P (XEXP (SET_DEST (x), 2))
4351
          && CONST_INT_P (SET_SRC (x))
4352
          && ((INTVAL (XEXP (SET_DEST (x), 1))
4353
               + INTVAL (XEXP (SET_DEST (x), 2)))
4354
              <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
4355
          && ! side_effects_p (XEXP (SET_DEST (x), 0)))
4356
        {
4357
          HOST_WIDE_INT pos = INTVAL (XEXP (SET_DEST (x), 2));
4358
          unsigned HOST_WIDE_INT len = INTVAL (XEXP (SET_DEST (x), 1));
4359
          unsigned HOST_WIDE_INT src = INTVAL (SET_SRC (x));
4360
          rtx dest = XEXP (SET_DEST (x), 0);
4361
          enum machine_mode mode = GET_MODE (dest);
4362
          unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
4363
          rtx or_mask;
4364
 
4365
          if (BITS_BIG_ENDIAN)
4366
            pos = GET_MODE_BITSIZE (mode) - len - pos;
4367
 
4368
          or_mask = gen_int_mode (src << pos, mode);
4369
          if (src == mask)
4370
            SUBST (SET_SRC (x),
4371
                   simplify_gen_binary (IOR, mode, dest, or_mask));
4372
          else
4373
            {
4374
              rtx negmask = gen_int_mode (~(mask << pos), mode);
4375
              SUBST (SET_SRC (x),
4376
                     simplify_gen_binary (IOR, mode,
4377
                                          simplify_gen_binary (AND, mode,
4378
                                                               dest, negmask),
4379
                                          or_mask));
4380
            }
4381
 
4382
          SUBST (SET_DEST (x), dest);
4383
 
4384
          split = find_split_point (&SET_SRC (x), insn);
4385
          if (split && split != &SET_SRC (x))
4386
            return split;
4387
        }
4388
 
4389
      /* Otherwise, see if this is an operation that we can split into two.
4390
         If so, try to split that.  */
4391
      code = GET_CODE (SET_SRC (x));
4392
 
4393
      switch (code)
4394
        {
4395
        case AND:
4396
          /* If we are AND'ing with a large constant that is only a single
4397
             bit and the result is only being used in a context where we
4398
             need to know if it is zero or nonzero, replace it with a bit
4399
             extraction.  This will avoid the large constant, which might
4400
             have taken more than one insn to make.  If the constant were
4401
             not a valid argument to the AND but took only one insn to make,
4402
             this is no worse, but if it took more than one insn, it will
4403
             be better.  */
4404
 
4405
          if (CONST_INT_P (XEXP (SET_SRC (x), 1))
4406
              && REG_P (XEXP (SET_SRC (x), 0))
4407
              && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
4408
              && REG_P (SET_DEST (x))
4409
              && (split = find_single_use (SET_DEST (x), insn, (rtx*) 0)) != 0
4410
              && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
4411
              && XEXP (*split, 0) == SET_DEST (x)
4412
              && XEXP (*split, 1) == const0_rtx)
4413
            {
4414
              rtx extraction = make_extraction (GET_MODE (SET_DEST (x)),
4415
                                                XEXP (SET_SRC (x), 0),
4416
                                                pos, NULL_RTX, 1, 1, 0, 0);
4417
              if (extraction != 0)
4418
                {
4419
                  SUBST (SET_SRC (x), extraction);
4420
                  return find_split_point (loc, insn);
4421
                }
4422
            }
4423
          break;
4424
 
4425
        case NE:
4426
          /* If STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X
4427
             is known to be on, this can be converted into a NEG of a shift.  */
4428
          if (STORE_FLAG_VALUE == -1 && XEXP (SET_SRC (x), 1) == const0_rtx
4429
              && GET_MODE (SET_SRC (x)) == GET_MODE (XEXP (SET_SRC (x), 0))
4430
              && 1 <= (pos = exact_log2
4431
                       (nonzero_bits (XEXP (SET_SRC (x), 0),
4432
                                      GET_MODE (XEXP (SET_SRC (x), 0))))))
4433
            {
4434
              enum machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0));
4435
 
4436
              SUBST (SET_SRC (x),
4437
                     gen_rtx_NEG (mode,
4438
                                  gen_rtx_LSHIFTRT (mode,
4439
                                                    XEXP (SET_SRC (x), 0),
4440
                                                    GEN_INT (pos))));
4441
 
4442
              split = find_split_point (&SET_SRC (x), insn);
4443
              if (split && split != &SET_SRC (x))
4444
                return split;
4445
            }
4446
          break;
4447
 
4448
        case SIGN_EXTEND:
4449
          inner = XEXP (SET_SRC (x), 0);
4450
 
4451
          /* We can't optimize if either mode is a partial integer
4452
             mode as we don't know how many bits are significant
4453
             in those modes.  */
4454
          if (GET_MODE_CLASS (GET_MODE (inner)) == MODE_PARTIAL_INT
4455
              || GET_MODE_CLASS (GET_MODE (SET_SRC (x))) == MODE_PARTIAL_INT)
4456
            break;
4457
 
4458
          pos = 0;
4459
          len = GET_MODE_BITSIZE (GET_MODE (inner));
4460
          unsignedp = 0;
4461
          break;
4462
 
4463
        case SIGN_EXTRACT:
4464
        case ZERO_EXTRACT:
4465
          if (CONST_INT_P (XEXP (SET_SRC (x), 1))
4466
              && CONST_INT_P (XEXP (SET_SRC (x), 2)))
4467
            {
4468
              inner = XEXP (SET_SRC (x), 0);
4469
              len = INTVAL (XEXP (SET_SRC (x), 1));
4470
              pos = INTVAL (XEXP (SET_SRC (x), 2));
4471
 
4472
              if (BITS_BIG_ENDIAN)
4473
                pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
4474
              unsignedp = (code == ZERO_EXTRACT);
4475
            }
4476
          break;
4477
 
4478
        default:
4479
          break;
4480
        }
4481
 
4482
      if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
4483
        {
4484
          enum machine_mode mode = GET_MODE (SET_SRC (x));
4485
 
4486
          /* For unsigned, we have a choice of a shift followed by an
4487
             AND or two shifts.  Use two shifts for field sizes where the
4488
             constant might be too large.  We assume here that we can
4489
             always at least get 8-bit constants in an AND insn, which is
4490
             true for every current RISC.  */
4491
 
4492
          if (unsignedp && len <= 8)
4493
            {
4494
              SUBST (SET_SRC (x),
4495
                     gen_rtx_AND (mode,
4496
                                  gen_rtx_LSHIFTRT
4497
                                  (mode, gen_lowpart (mode, inner),
4498
                                   GEN_INT (pos)),
4499
                                  GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
4500
 
4501
              split = find_split_point (&SET_SRC (x), insn);
4502
              if (split && split != &SET_SRC (x))
4503
                return split;
4504
            }
4505
          else
4506
            {
4507
              SUBST (SET_SRC (x),
4508
                     gen_rtx_fmt_ee
4509
                     (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
4510
                      gen_rtx_ASHIFT (mode,
4511
                                      gen_lowpart (mode, inner),
4512
                                      GEN_INT (GET_MODE_BITSIZE (mode)
4513
                                               - len - pos)),
4514
                      GEN_INT (GET_MODE_BITSIZE (mode) - len)));
4515
 
4516
              split = find_split_point (&SET_SRC (x), insn);
4517
              if (split && split != &SET_SRC (x))
4518
                return split;
4519
            }
4520
        }
4521
 
4522
      /* See if this is a simple operation with a constant as the second
4523
         operand.  It might be that this constant is out of range and hence
4524
         could be used as a split point.  */
4525
      if (BINARY_P (SET_SRC (x))
4526
          && CONSTANT_P (XEXP (SET_SRC (x), 1))
4527
          && (OBJECT_P (XEXP (SET_SRC (x), 0))
4528
              || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
4529
                  && OBJECT_P (SUBREG_REG (XEXP (SET_SRC (x), 0))))))
4530
        return &XEXP (SET_SRC (x), 1);
4531
 
4532
      /* Finally, see if this is a simple operation with its first operand
4533
         not in a register.  The operation might require this operand in a
4534
         register, so return it as a split point.  We can always do this
4535
         because if the first operand were another operation, we would have
4536
         already found it as a split point.  */
4537
      if ((BINARY_P (SET_SRC (x)) || UNARY_P (SET_SRC (x)))
4538
          && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
4539
        return &XEXP (SET_SRC (x), 0);
4540
 
4541
      return 0;
4542
 
4543
    case AND:
4544
    case IOR:
4545
      /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
4546
         it is better to write this as (not (ior A B)) so we can split it.
4547
         Similarly for IOR.  */
4548
      if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
4549
        {
4550
          SUBST (*loc,
4551
                 gen_rtx_NOT (GET_MODE (x),
4552
                              gen_rtx_fmt_ee (code == IOR ? AND : IOR,
4553
                                              GET_MODE (x),
4554
                                              XEXP (XEXP (x, 0), 0),
4555
                                              XEXP (XEXP (x, 1), 0))));
4556
          return find_split_point (loc, insn);
4557
        }
4558
 
4559
      /* Many RISC machines have a large set of logical insns.  If the
4560
         second operand is a NOT, put it first so we will try to split the
4561
         other operand first.  */
4562
      if (GET_CODE (XEXP (x, 1)) == NOT)
4563
        {
4564
          rtx tem = XEXP (x, 0);
4565
          SUBST (XEXP (x, 0), XEXP (x, 1));
4566
          SUBST (XEXP (x, 1), tem);
4567
        }
4568
      break;
4569
 
4570
    default:
4571
      break;
4572
    }
4573
 
4574
  /* Otherwise, select our actions depending on our rtx class.  */
4575
  switch (GET_RTX_CLASS (code))
4576
    {
4577
    case RTX_BITFIELD_OPS:              /* This is ZERO_EXTRACT and SIGN_EXTRACT.  */
4578
    case RTX_TERNARY:
4579
      split = find_split_point (&XEXP (x, 2), insn);
4580
      if (split)
4581
        return split;
4582
      /* ... fall through ...  */
4583
    case RTX_BIN_ARITH:
4584
    case RTX_COMM_ARITH:
4585
    case RTX_COMPARE:
4586
    case RTX_COMM_COMPARE:
4587
      split = find_split_point (&XEXP (x, 1), insn);
4588
      if (split)
4589
        return split;
4590
      /* ... fall through ...  */
4591
    case RTX_UNARY:
4592
      /* Some machines have (and (shift ...) ...) insns.  If X is not
4593
         an AND, but XEXP (X, 0) is, use it as our split point.  */
4594
      if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
4595
        return &XEXP (x, 0);
4596
 
4597
      split = find_split_point (&XEXP (x, 0), insn);
4598
      if (split)
4599
        return split;
4600
      return loc;
4601
 
4602
    default:
4603
      /* Otherwise, we don't have a split point.  */
4604
      return 0;
4605
    }
4606
}
4607
 
4608
/* Throughout X, replace FROM with TO, and return the result.
4609
   The result is TO if X is FROM;
4610
   otherwise the result is X, but its contents may have been modified.
4611
   If they were modified, a record was made in undobuf so that
4612
   undo_all will (among other things) return X to its original state.
4613
 
4614
   If the number of changes necessary is too much to record to undo,
4615
   the excess changes are not made, so the result is invalid.
4616
   The changes already made can still be undone.
4617
   undobuf.num_undo is incremented for such changes, so by testing that
4618
   the caller can tell whether the result is valid.
4619
 
4620
   `n_occurrences' is incremented each time FROM is replaced.
4621
 
4622
   IN_DEST is nonzero if we are processing the SET_DEST of a SET.
4623
 
4624
   UNIQUE_COPY is nonzero if each substitution must be unique.  We do this
4625
   by copying if `n_occurrences' is nonzero.  */
4626
 
4627
static rtx
4628
subst (rtx x, rtx from, rtx to, int in_dest, int unique_copy)
4629
{
4630
  enum rtx_code code = GET_CODE (x);
4631
  enum machine_mode op0_mode = VOIDmode;
4632
  const char *fmt;
4633
  int len, i;
4634
  rtx new_rtx;
4635
 
4636
/* Two expressions are equal if they are identical copies of a shared
4637
   RTX or if they are both registers with the same register number
4638
   and mode.  */
4639
 
4640
#define COMBINE_RTX_EQUAL_P(X,Y)                        \
4641
  ((X) == (Y)                                           \
4642
   || (REG_P (X) && REG_P (Y)   \
4643
       && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
4644
 
4645
  if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
4646
    {
4647
      n_occurrences++;
4648
      return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
4649
    }
4650
 
4651
  /* If X and FROM are the same register but different modes, they
4652
     will not have been seen as equal above.  However, the log links code
4653
     will make a LOG_LINKS entry for that case.  If we do nothing, we
4654
     will try to rerecognize our original insn and, when it succeeds,
4655
     we will delete the feeding insn, which is incorrect.
4656
 
4657
     So force this insn not to match in this (rare) case.  */
4658
  if (! in_dest && code == REG && REG_P (from)
4659
      && reg_overlap_mentioned_p (x, from))
4660
    return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
4661
 
4662
  /* If this is an object, we are done unless it is a MEM or LO_SUM, both
4663
     of which may contain things that can be combined.  */
4664
  if (code != MEM && code != LO_SUM && OBJECT_P (x))
4665
    return x;
4666
 
4667
  /* It is possible to have a subexpression appear twice in the insn.
4668
     Suppose that FROM is a register that appears within TO.
4669
     Then, after that subexpression has been scanned once by `subst',
4670
     the second time it is scanned, TO may be found.  If we were
4671
     to scan TO here, we would find FROM within it and create a
4672
     self-referent rtl structure which is completely wrong.  */
4673
  if (COMBINE_RTX_EQUAL_P (x, to))
4674
    return to;
4675
 
4676
  /* Parallel asm_operands need special attention because all of the
4677
     inputs are shared across the arms.  Furthermore, unsharing the
4678
     rtl results in recognition failures.  Failure to handle this case
4679
     specially can result in circular rtl.
4680
 
4681
     Solve this by doing a normal pass across the first entry of the
4682
     parallel, and only processing the SET_DESTs of the subsequent
4683
     entries.  Ug.  */
4684
 
4685
  if (code == PARALLEL
4686
      && GET_CODE (XVECEXP (x, 0, 0)) == SET
4687
      && GET_CODE (SET_SRC (XVECEXP (x, 0, 0))) == ASM_OPERANDS)
4688
    {
4689
      new_rtx = subst (XVECEXP (x, 0, 0), from, to, 0, unique_copy);
4690
 
4691
      /* If this substitution failed, this whole thing fails.  */
4692
      if (GET_CODE (new_rtx) == CLOBBER
4693
          && XEXP (new_rtx, 0) == const0_rtx)
4694
        return new_rtx;
4695
 
4696
      SUBST (XVECEXP (x, 0, 0), new_rtx);
4697
 
4698
      for (i = XVECLEN (x, 0) - 1; i >= 1; i--)
4699
        {
4700
          rtx dest = SET_DEST (XVECEXP (x, 0, i));
4701
 
4702
          if (!REG_P (dest)
4703
              && GET_CODE (dest) != CC0
4704
              && GET_CODE (dest) != PC)
4705
            {
4706
              new_rtx = subst (dest, from, to, 0, unique_copy);
4707
 
4708
              /* If this substitution failed, this whole thing fails.  */
4709
              if (GET_CODE (new_rtx) == CLOBBER
4710
                  && XEXP (new_rtx, 0) == const0_rtx)
4711
                return new_rtx;
4712
 
4713
              SUBST (SET_DEST (XVECEXP (x, 0, i)), new_rtx);
4714
            }
4715
        }
4716
    }
4717
  else
4718
    {
4719
      len = GET_RTX_LENGTH (code);
4720
      fmt = GET_RTX_FORMAT (code);
4721
 
4722
      /* We don't need to process a SET_DEST that is a register, CC0,
4723
         or PC, so set up to skip this common case.  All other cases
4724
         where we want to suppress replacing something inside a
4725
         SET_SRC are handled via the IN_DEST operand.  */
4726
      if (code == SET
4727
          && (REG_P (SET_DEST (x))
4728
              || GET_CODE (SET_DEST (x)) == CC0
4729
              || GET_CODE (SET_DEST (x)) == PC))
4730
        fmt = "ie";
4731
 
4732
      /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a
4733
         constant.  */
4734
      if (fmt[0] == 'e')
4735
        op0_mode = GET_MODE (XEXP (x, 0));
4736
 
4737
      for (i = 0; i < len; i++)
4738
        {
4739
          if (fmt[i] == 'E')
4740
            {
4741
              int j;
4742
              for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4743
                {
4744
                  if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
4745
                    {
4746
                      new_rtx = (unique_copy && n_occurrences
4747
                             ? copy_rtx (to) : to);
4748
                      n_occurrences++;
4749
                    }
4750
                  else
4751
                    {
4752
                      new_rtx = subst (XVECEXP (x, i, j), from, to, 0,
4753
                                   unique_copy);
4754
 
4755
                      /* If this substitution failed, this whole thing
4756
                         fails.  */
4757
                      if (GET_CODE (new_rtx) == CLOBBER
4758
                          && XEXP (new_rtx, 0) == const0_rtx)
4759
                        return new_rtx;
4760
                    }
4761
 
4762
                  SUBST (XVECEXP (x, i, j), new_rtx);
4763
                }
4764
            }
4765
          else if (fmt[i] == 'e')
4766
            {
4767
              /* If this is a register being set, ignore it.  */
4768
              new_rtx = XEXP (x, i);
4769
              if (in_dest
4770
                  && i == 0
4771
                  && (((code == SUBREG || code == ZERO_EXTRACT)
4772
                       && REG_P (new_rtx))
4773
                      || code == STRICT_LOW_PART))
4774
                ;
4775
 
4776
              else if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
4777
                {
4778
                  /* In general, don't install a subreg involving two
4779
                     modes not tieable.  It can worsen register
4780
                     allocation, and can even make invalid reload
4781
                     insns, since the reg inside may need to be copied
4782
                     from in the outside mode, and that may be invalid
4783
                     if it is an fp reg copied in integer mode.
4784
 
4785
                     We allow two exceptions to this: It is valid if
4786
                     it is inside another SUBREG and the mode of that
4787
                     SUBREG and the mode of the inside of TO is
4788
                     tieable and it is valid if X is a SET that copies
4789
                     FROM to CC0.  */
4790
 
4791
                  if (GET_CODE (to) == SUBREG
4792
                      && ! MODES_TIEABLE_P (GET_MODE (to),
4793
                                            GET_MODE (SUBREG_REG (to)))
4794
                      && ! (code == SUBREG
4795
                            && MODES_TIEABLE_P (GET_MODE (x),
4796
                                                GET_MODE (SUBREG_REG (to))))
4797
#ifdef HAVE_cc0
4798
                      && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
4799
#endif
4800
                      )
4801
                    return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
4802
 
4803
#ifdef CANNOT_CHANGE_MODE_CLASS
4804
                  if (code == SUBREG
4805
                      && REG_P (to)
4806
                      && REGNO (to) < FIRST_PSEUDO_REGISTER
4807
                      && REG_CANNOT_CHANGE_MODE_P (REGNO (to),
4808
                                                   GET_MODE (to),
4809
                                                   GET_MODE (x)))
4810
                    return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
4811
#endif
4812
 
4813
                  new_rtx = (unique_copy && n_occurrences ? copy_rtx (to) : to);
4814
                  n_occurrences++;
4815
                }
4816
              else
4817
                /* If we are in a SET_DEST, suppress most cases unless we
4818
                   have gone inside a MEM, in which case we want to
4819
                   simplify the address.  We assume here that things that
4820
                   are actually part of the destination have their inner
4821
                   parts in the first expression.  This is true for SUBREG,
4822
                   STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
4823
                   things aside from REG and MEM that should appear in a
4824
                   SET_DEST.  */
4825
                new_rtx = subst (XEXP (x, i), from, to,
4826
                             (((in_dest
4827
                                && (code == SUBREG || code == STRICT_LOW_PART
4828
                                    || code == ZERO_EXTRACT))
4829
                               || code == SET)
4830
                              && i == 0), unique_copy);
4831
 
4832
              /* If we found that we will have to reject this combination,
4833
                 indicate that by returning the CLOBBER ourselves, rather than
4834
                 an expression containing it.  This will speed things up as
4835
                 well as prevent accidents where two CLOBBERs are considered
4836
                 to be equal, thus producing an incorrect simplification.  */
4837
 
4838
              if (GET_CODE (new_rtx) == CLOBBER && XEXP (new_rtx, 0) == const0_rtx)
4839
                return new_rtx;
4840
 
4841
              if (GET_CODE (x) == SUBREG
4842
                  && (CONST_INT_P (new_rtx)
4843
                      || GET_CODE (new_rtx) == CONST_DOUBLE))
4844
                {
4845
                  enum machine_mode mode = GET_MODE (x);
4846
 
4847
                  x = simplify_subreg (GET_MODE (x), new_rtx,
4848
                                       GET_MODE (SUBREG_REG (x)),
4849
                                       SUBREG_BYTE (x));
4850
                  if (! x)
4851
                    x = gen_rtx_CLOBBER (mode, const0_rtx);
4852
                }
4853
              else if (CONST_INT_P (new_rtx)
4854
                       && GET_CODE (x) == ZERO_EXTEND)
4855
                {
4856
                  x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
4857
                                                new_rtx, GET_MODE (XEXP (x, 0)));
4858
                  gcc_assert (x);
4859
                }
4860
              else
4861
                SUBST (XEXP (x, i), new_rtx);
4862
            }
4863
        }
4864
    }
4865
 
4866
  /* Check if we are loading something from the constant pool via float
4867
     extension; in this case we would undo compress_float_constant
4868
     optimization and degenerate constant load to an immediate value.  */
4869
  if (GET_CODE (x) == FLOAT_EXTEND
4870
      && MEM_P (XEXP (x, 0))
4871
      && MEM_READONLY_P (XEXP (x, 0)))
4872
    {
4873
      rtx tmp = avoid_constant_pool_reference (x);
4874
      if (x != tmp)
4875
        return x;
4876
    }
4877
 
4878
  /* Try to simplify X.  If the simplification changed the code, it is likely
4879
     that further simplification will help, so loop, but limit the number
4880
     of repetitions that will be performed.  */
4881
 
4882
  for (i = 0; i < 4; i++)
4883
    {
4884
      /* If X is sufficiently simple, don't bother trying to do anything
4885
         with it.  */
4886
      if (code != CONST_INT && code != REG && code != CLOBBER)
4887
        x = combine_simplify_rtx (x, op0_mode, in_dest);
4888
 
4889
      if (GET_CODE (x) == code)
4890
        break;
4891
 
4892
      code = GET_CODE (x);
4893
 
4894
      /* We no longer know the original mode of operand 0 since we
4895
         have changed the form of X)  */
4896
      op0_mode = VOIDmode;
4897
    }
4898
 
4899
  return x;
4900
}
4901
 
4902
/* Simplify X, a piece of RTL.  We just operate on the expression at the
4903
   outer level; call `subst' to simplify recursively.  Return the new
4904
   expression.
4905
 
4906
   OP0_MODE is the original mode of XEXP (x, 0).  IN_DEST is nonzero
4907
   if we are inside a SET_DEST.  */
4908
 
4909
static rtx
4910
combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest)
4911
{
4912
  enum rtx_code code = GET_CODE (x);
4913
  enum machine_mode mode = GET_MODE (x);
4914
  rtx temp;
4915
  int i;
4916
 
4917
  /* If this is a commutative operation, put a constant last and a complex
4918
     expression first.  We don't need to do this for comparisons here.  */
4919
  if (COMMUTATIVE_ARITH_P (x)
4920
      && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
4921
    {
4922
      temp = XEXP (x, 0);
4923
      SUBST (XEXP (x, 0), XEXP (x, 1));
4924
      SUBST (XEXP (x, 1), temp);
4925
    }
4926
 
4927
  /* If this is a simple operation applied to an IF_THEN_ELSE, try
4928
     applying it to the arms of the IF_THEN_ELSE.  This often simplifies
4929
     things.  Check for cases where both arms are testing the same
4930
     condition.
4931
 
4932
     Don't do anything if all operands are very simple.  */
4933
 
4934
  if ((BINARY_P (x)
4935
       && ((!OBJECT_P (XEXP (x, 0))
4936
            && ! (GET_CODE (XEXP (x, 0)) == SUBREG
4937
                  && OBJECT_P (SUBREG_REG (XEXP (x, 0)))))
4938
           || (!OBJECT_P (XEXP (x, 1))
4939
               && ! (GET_CODE (XEXP (x, 1)) == SUBREG
4940
                     && OBJECT_P (SUBREG_REG (XEXP (x, 1)))))))
4941
      || (UNARY_P (x)
4942
          && (!OBJECT_P (XEXP (x, 0))
4943
               && ! (GET_CODE (XEXP (x, 0)) == SUBREG
4944
                     && OBJECT_P (SUBREG_REG (XEXP (x, 0)))))))
4945
    {
4946
      rtx cond, true_rtx, false_rtx;
4947
 
4948
      cond = if_then_else_cond (x, &true_rtx, &false_rtx);
4949
      if (cond != 0
4950
          /* If everything is a comparison, what we have is highly unlikely
4951
             to be simpler, so don't use it.  */
4952
          && ! (COMPARISON_P (x)
4953
                && (COMPARISON_P (true_rtx) || COMPARISON_P (false_rtx))))
4954
        {
4955
          rtx cop1 = const0_rtx;
4956
          enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
4957
 
4958
          if (cond_code == NE && COMPARISON_P (cond))
4959
            return x;
4960
 
4961
          /* Simplify the alternative arms; this may collapse the true and
4962
             false arms to store-flag values.  Be careful to use copy_rtx
4963
             here since true_rtx or false_rtx might share RTL with x as a
4964
             result of the if_then_else_cond call above.  */
4965
          true_rtx = subst (copy_rtx (true_rtx), pc_rtx, pc_rtx, 0, 0);
4966
          false_rtx = subst (copy_rtx (false_rtx), pc_rtx, pc_rtx, 0, 0);
4967
 
4968
          /* If true_rtx and false_rtx are not general_operands, an if_then_else
4969
             is unlikely to be simpler.  */
4970
          if (general_operand (true_rtx, VOIDmode)
4971
              && general_operand (false_rtx, VOIDmode))
4972
            {
4973
              enum rtx_code reversed;
4974
 
4975
              /* Restarting if we generate a store-flag expression will cause
4976
                 us to loop.  Just drop through in this case.  */
4977
 
4978
              /* If the result values are STORE_FLAG_VALUE and zero, we can
4979
                 just make the comparison operation.  */
4980
              if (true_rtx == const_true_rtx && false_rtx == const0_rtx)
4981
                x = simplify_gen_relational (cond_code, mode, VOIDmode,
4982
                                             cond, cop1);
4983
              else if (true_rtx == const0_rtx && false_rtx == const_true_rtx
4984
                       && ((reversed = reversed_comparison_code_parts
4985
                                        (cond_code, cond, cop1, NULL))
4986
                           != UNKNOWN))
4987
                x = simplify_gen_relational (reversed, mode, VOIDmode,
4988
                                             cond, cop1);
4989
 
4990
              /* Likewise, we can make the negate of a comparison operation
4991
                 if the result values are - STORE_FLAG_VALUE and zero.  */
4992
              else if (CONST_INT_P (true_rtx)
4993
                       && INTVAL (true_rtx) == - STORE_FLAG_VALUE
4994
                       && false_rtx == const0_rtx)
4995
                x = simplify_gen_unary (NEG, mode,
4996
                                        simplify_gen_relational (cond_code,
4997
                                                                 mode, VOIDmode,
4998
                                                                 cond, cop1),
4999
                                        mode);
5000
              else if (CONST_INT_P (false_rtx)
5001
                       && INTVAL (false_rtx) == - STORE_FLAG_VALUE
5002
                       && true_rtx == const0_rtx
5003
                       && ((reversed = reversed_comparison_code_parts
5004
                                        (cond_code, cond, cop1, NULL))
5005
                           != UNKNOWN))
5006
                x = simplify_gen_unary (NEG, mode,
5007
                                        simplify_gen_relational (reversed,
5008
                                                                 mode, VOIDmode,
5009
                                                                 cond, cop1),
5010
                                        mode);
5011
              else
5012
                return gen_rtx_IF_THEN_ELSE (mode,
5013
                                             simplify_gen_relational (cond_code,
5014
                                                                      mode,
5015
                                                                      VOIDmode,
5016
                                                                      cond,
5017
                                                                      cop1),
5018
                                             true_rtx, false_rtx);
5019
 
5020
              code = GET_CODE (x);
5021
              op0_mode = VOIDmode;
5022
            }
5023
        }
5024
    }
5025
 
5026
  /* Try to fold this expression in case we have constants that weren't
5027
     present before.  */
5028
  temp = 0;
5029
  switch (GET_RTX_CLASS (code))
5030
    {
5031
    case RTX_UNARY:
5032
      if (op0_mode == VOIDmode)
5033
        op0_mode = GET_MODE (XEXP (x, 0));
5034
      temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
5035
      break;
5036
    case RTX_COMPARE:
5037
    case RTX_COMM_COMPARE:
5038
      {
5039
        enum machine_mode cmp_mode = GET_MODE (XEXP (x, 0));
5040
        if (cmp_mode == VOIDmode)
5041
          {
5042
            cmp_mode = GET_MODE (XEXP (x, 1));
5043
            if (cmp_mode == VOIDmode)
5044
              cmp_mode = op0_mode;
5045
          }
5046
        temp = simplify_relational_operation (code, mode, cmp_mode,
5047
                                              XEXP (x, 0), XEXP (x, 1));
5048
      }
5049
      break;
5050
    case RTX_COMM_ARITH:
5051
    case RTX_BIN_ARITH:
5052
      temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
5053
      break;
5054
    case RTX_BITFIELD_OPS:
5055
    case RTX_TERNARY:
5056
      temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
5057
                                         XEXP (x, 1), XEXP (x, 2));
5058
      break;
5059
    default:
5060
      break;
5061
    }
5062
 
5063
  if (temp)
5064
    {
5065
      x = temp;
5066
      code = GET_CODE (temp);
5067
      op0_mode = VOIDmode;
5068
      mode = GET_MODE (temp);
5069
    }
5070
 
5071
  /* First see if we can apply the inverse distributive law.  */
5072
  if (code == PLUS || code == MINUS
5073
      || code == AND || code == IOR || code == XOR)
5074
    {
5075
      x = apply_distributive_law (x);
5076
      code = GET_CODE (x);
5077
      op0_mode = VOIDmode;
5078
    }
5079
 
5080
  /* If CODE is an associative operation not otherwise handled, see if we
5081
     can associate some operands.  This can win if they are constants or
5082
     if they are logically related (i.e. (a & b) & a).  */
5083
  if ((code == PLUS || code == MINUS || code == MULT || code == DIV
5084
       || code == AND || code == IOR || code == XOR
5085
       || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
5086
      && ((INTEGRAL_MODE_P (mode) && code != DIV)
5087
          || (flag_associative_math && FLOAT_MODE_P (mode))))
5088
    {
5089
      if (GET_CODE (XEXP (x, 0)) == code)
5090
        {
5091
          rtx other = XEXP (XEXP (x, 0), 0);
5092
          rtx inner_op0 = XEXP (XEXP (x, 0), 1);
5093
          rtx inner_op1 = XEXP (x, 1);
5094
          rtx inner;
5095
 
5096
          /* Make sure we pass the constant operand if any as the second
5097
             one if this is a commutative operation.  */
5098
          if (CONSTANT_P (inner_op0) && COMMUTATIVE_ARITH_P (x))
5099
            {
5100
              rtx tem = inner_op0;
5101
              inner_op0 = inner_op1;
5102
              inner_op1 = tem;
5103
            }
5104
          inner = simplify_binary_operation (code == MINUS ? PLUS
5105
                                             : code == DIV ? MULT
5106
                                             : code,
5107
                                             mode, inner_op0, inner_op1);
5108
 
5109
          /* For commutative operations, try the other pair if that one
5110
             didn't simplify.  */
5111
          if (inner == 0 && COMMUTATIVE_ARITH_P (x))
5112
            {
5113
              other = XEXP (XEXP (x, 0), 1);
5114
              inner = simplify_binary_operation (code, mode,
5115
                                                 XEXP (XEXP (x, 0), 0),
5116
                                                 XEXP (x, 1));
5117
            }
5118
 
5119
          if (inner)
5120
            return simplify_gen_binary (code, mode, other, inner);
5121
        }
5122
    }
5123
 
5124
  /* A little bit of algebraic simplification here.  */
5125
  switch (code)
5126
    {
5127
    case MEM:
5128
      /* Ensure that our address has any ASHIFTs converted to MULT in case
5129
         address-recognizing predicates are called later.  */
5130
      temp = make_compound_operation (XEXP (x, 0), MEM);
5131
      SUBST (XEXP (x, 0), temp);
5132
      break;
5133
 
5134
    case SUBREG:
5135
      if (op0_mode == VOIDmode)
5136
        op0_mode = GET_MODE (SUBREG_REG (x));
5137
 
5138
      /* See if this can be moved to simplify_subreg.  */
5139
      if (CONSTANT_P (SUBREG_REG (x))
5140
          && subreg_lowpart_offset (mode, op0_mode) == SUBREG_BYTE (x)
5141
             /* Don't call gen_lowpart if the inner mode
5142
                is VOIDmode and we cannot simplify it, as SUBREG without
5143
                inner mode is invalid.  */
5144
          && (GET_MODE (SUBREG_REG (x)) != VOIDmode
5145
              || gen_lowpart_common (mode, SUBREG_REG (x))))
5146
        return gen_lowpart (mode, SUBREG_REG (x));
5147
 
5148
      if (GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_CC)
5149
        break;
5150
      {
5151
        rtx temp;
5152
        temp = simplify_subreg (mode, SUBREG_REG (x), op0_mode,
5153
                                SUBREG_BYTE (x));
5154
        if (temp)
5155
          return temp;
5156
      }
5157
 
5158
      /* Don't change the mode of the MEM if that would change the meaning
5159
         of the address.  */
5160
      if (MEM_P (SUBREG_REG (x))
5161
          && (MEM_VOLATILE_P (SUBREG_REG (x))
5162
              || mode_dependent_address_p (XEXP (SUBREG_REG (x), 0))))
5163
        return gen_rtx_CLOBBER (mode, const0_rtx);
5164
 
5165
      /* Note that we cannot do any narrowing for non-constants since
5166
         we might have been counting on using the fact that some bits were
5167
         zero.  We now do this in the SET.  */
5168
 
5169
      break;
5170
 
5171
    case NEG:
5172
      temp = expand_compound_operation (XEXP (x, 0));
5173
 
5174
      /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
5175
         replaced by (lshiftrt X C).  This will convert
5176
         (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y).  */
5177
 
5178
      if (GET_CODE (temp) == ASHIFTRT
5179
          && CONST_INT_P (XEXP (temp, 1))
5180
          && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
5181
        return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (temp, 0),
5182
                                     INTVAL (XEXP (temp, 1)));
5183
 
5184
      /* If X has only a single bit that might be nonzero, say, bit I, convert
5185
         (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
5186
         MODE minus 1.  This will convert (neg (zero_extract X 1 Y)) to
5187
         (sign_extract X 1 Y).  But only do this if TEMP isn't a register
5188
         or a SUBREG of one since we'd be making the expression more
5189
         complex if it was just a register.  */
5190
 
5191
      if (!REG_P (temp)
5192
          && ! (GET_CODE (temp) == SUBREG
5193
                && REG_P (SUBREG_REG (temp)))
5194
          && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
5195
        {
5196
          rtx temp1 = simplify_shift_const
5197
            (NULL_RTX, ASHIFTRT, mode,
5198
             simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
5199
                                   GET_MODE_BITSIZE (mode) - 1 - i),
5200
             GET_MODE_BITSIZE (mode) - 1 - i);
5201
 
5202
          /* If all we did was surround TEMP with the two shifts, we
5203
             haven't improved anything, so don't use it.  Otherwise,
5204
             we are better off with TEMP1.  */
5205
          if (GET_CODE (temp1) != ASHIFTRT
5206
              || GET_CODE (XEXP (temp1, 0)) != ASHIFT
5207
              || XEXP (XEXP (temp1, 0), 0) != temp)
5208
            return temp1;
5209
        }
5210
      break;
5211
 
5212
    case TRUNCATE:
5213
      /* We can't handle truncation to a partial integer mode here
5214
         because we don't know the real bitsize of the partial
5215
         integer mode.  */
5216
      if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
5217
        break;
5218
 
5219
      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
5220
        SUBST (XEXP (x, 0),
5221
               force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
5222
                              GET_MODE_MASK (mode), 0));
5223
 
5224
      /* We can truncate a constant value and return it.  */
5225
      if (CONST_INT_P (XEXP (x, 0)))
5226
        return gen_int_mode (INTVAL (XEXP (x, 0)), mode);
5227
 
5228
      /* Similarly to what we do in simplify-rtx.c, a truncate of a register
5229
         whose value is a comparison can be replaced with a subreg if
5230
         STORE_FLAG_VALUE permits.  */
5231
      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5232
          && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0
5233
          && (temp = get_last_value (XEXP (x, 0)))
5234
          && COMPARISON_P (temp))
5235
        return gen_lowpart (mode, XEXP (x, 0));
5236
      break;
5237
 
5238
    case CONST:
5239
      /* (const (const X)) can become (const X).  Do it this way rather than
5240
         returning the inner CONST since CONST can be shared with a
5241
         REG_EQUAL note.  */
5242
      if (GET_CODE (XEXP (x, 0)) == CONST)
5243
        SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
5244
      break;
5245
 
5246
#ifdef HAVE_lo_sum
5247
    case LO_SUM:
5248
      /* Convert (lo_sum (high FOO) FOO) to FOO.  This is necessary so we
5249
         can add in an offset.  find_split_point will split this address up
5250
         again if it doesn't match.  */
5251
      if (GET_CODE (XEXP (x, 0)) == HIGH
5252
          && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
5253
        return XEXP (x, 1);
5254
      break;
5255
#endif
5256
 
5257
    case PLUS:
5258
      /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
5259
         when c is (const_int (pow2 + 1) / 2) is a sign extension of a
5260
         bit-field and can be replaced by either a sign_extend or a
5261
         sign_extract.  The `and' may be a zero_extend and the two
5262
         <c>, -<c> constants may be reversed.  */
5263
      if (GET_CODE (XEXP (x, 0)) == XOR
5264
          && CONST_INT_P (XEXP (x, 1))
5265
          && CONST_INT_P (XEXP (XEXP (x, 0), 1))
5266
          && INTVAL (XEXP (x, 1)) == -INTVAL (XEXP (XEXP (x, 0), 1))
5267
          && ((i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
5268
              || (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
5269
          && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5270
          && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
5271
               && CONST_INT_P (XEXP (XEXP (XEXP (x, 0), 0), 1))
5272
               && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
5273
                   == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
5274
              || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
5275
                  && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
5276
                      == (unsigned int) i + 1))))
5277
        return simplify_shift_const
5278
          (NULL_RTX, ASHIFTRT, mode,
5279
           simplify_shift_const (NULL_RTX, ASHIFT, mode,
5280
                                 XEXP (XEXP (XEXP (x, 0), 0), 0),
5281
                                 GET_MODE_BITSIZE (mode) - (i + 1)),
5282
           GET_MODE_BITSIZE (mode) - (i + 1));
5283
 
5284
      /* If only the low-order bit of X is possibly nonzero, (plus x -1)
5285
         can become (ashiftrt (ashift (xor x 1) C) C) where C is
5286
         the bitsize of the mode - 1.  This allows simplification of
5287
         "a = (b & 8) == 0;"  */
5288
      if (XEXP (x, 1) == constm1_rtx
5289
          && !REG_P (XEXP (x, 0))
5290
          && ! (GET_CODE (XEXP (x, 0)) == SUBREG
5291
                && REG_P (SUBREG_REG (XEXP (x, 0))))
5292
          && nonzero_bits (XEXP (x, 0), mode) == 1)
5293
        return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
5294
           simplify_shift_const (NULL_RTX, ASHIFT, mode,
5295
                                 gen_rtx_XOR (mode, XEXP (x, 0), const1_rtx),
5296
                                 GET_MODE_BITSIZE (mode) - 1),
5297
           GET_MODE_BITSIZE (mode) - 1);
5298
 
5299
      /* If we are adding two things that have no bits in common, convert
5300
         the addition into an IOR.  This will often be further simplified,
5301
         for example in cases like ((a & 1) + (a & 2)), which can
5302
         become a & 3.  */
5303
 
5304
      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5305
          && (nonzero_bits (XEXP (x, 0), mode)
5306
              & nonzero_bits (XEXP (x, 1), mode)) == 0)
5307
        {
5308
          /* Try to simplify the expression further.  */
5309
          rtx tor = simplify_gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
5310
          temp = combine_simplify_rtx (tor, mode, in_dest);
5311
 
5312
          /* If we could, great.  If not, do not go ahead with the IOR
5313
             replacement, since PLUS appears in many special purpose
5314
             address arithmetic instructions.  */
5315
          if (GET_CODE (temp) != CLOBBER && temp != tor)
5316
            return temp;
5317
        }
5318
      break;
5319
 
5320
    case MINUS:
5321
      /* (minus <foo> (and <foo> (const_int -pow2))) becomes
5322
         (and <foo> (const_int pow2-1))  */
5323
      if (GET_CODE (XEXP (x, 1)) == AND
5324
          && CONST_INT_P (XEXP (XEXP (x, 1), 1))
5325
          && exact_log2 (-INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
5326
          && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
5327
        return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
5328
                                       -INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
5329
      break;
5330
 
5331
    case MULT:
5332
      /* If we have (mult (plus A B) C), apply the distributive law and then
5333
         the inverse distributive law to see if things simplify.  This
5334
         occurs mostly in addresses, often when unrolling loops.  */
5335
 
5336
      if (GET_CODE (XEXP (x, 0)) == PLUS)
5337
        {
5338
          rtx result = distribute_and_simplify_rtx (x, 0);
5339
          if (result)
5340
            return result;
5341
        }
5342
 
5343
      /* Try simplify a*(b/c) as (a*b)/c.  */
5344
      if (FLOAT_MODE_P (mode) && flag_associative_math
5345
          && GET_CODE (XEXP (x, 0)) == DIV)
5346
        {
5347
          rtx tem = simplify_binary_operation (MULT, mode,
5348
                                               XEXP (XEXP (x, 0), 0),
5349
                                               XEXP (x, 1));
5350
          if (tem)
5351
            return simplify_gen_binary (DIV, mode, tem, XEXP (XEXP (x, 0), 1));
5352
        }
5353
      break;
5354
 
5355
    case UDIV:
5356
      /* If this is a divide by a power of two, treat it as a shift if
5357
         its first operand is a shift.  */
5358
      if (CONST_INT_P (XEXP (x, 1))
5359
          && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
5360
          && (GET_CODE (XEXP (x, 0)) == ASHIFT
5361
              || GET_CODE (XEXP (x, 0)) == LSHIFTRT
5362
              || GET_CODE (XEXP (x, 0)) == ASHIFTRT
5363
              || GET_CODE (XEXP (x, 0)) == ROTATE
5364
              || GET_CODE (XEXP (x, 0)) == ROTATERT))
5365
        return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
5366
      break;
5367
 
5368
    case EQ:  case NE:
5369
    case GT:  case GTU:  case GE:  case GEU:
5370
    case LT:  case LTU:  case LE:  case LEU:
5371
    case UNEQ:  case LTGT:
5372
    case UNGT:  case UNGE:
5373
    case UNLT:  case UNLE:
5374
    case UNORDERED: case ORDERED:
5375
      /* If the first operand is a condition code, we can't do anything
5376
         with it.  */
5377
      if (GET_CODE (XEXP (x, 0)) == COMPARE
5378
          || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
5379
              && ! CC0_P (XEXP (x, 0))))
5380
        {
5381
          rtx op0 = XEXP (x, 0);
5382
          rtx op1 = XEXP (x, 1);
5383
          enum rtx_code new_code;
5384
 
5385
          if (GET_CODE (op0) == COMPARE)
5386
            op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5387
 
5388
          /* Simplify our comparison, if possible.  */
5389
          new_code = simplify_comparison (code, &op0, &op1);
5390
 
5391
          /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
5392
             if only the low-order bit is possibly nonzero in X (such as when
5393
             X is a ZERO_EXTRACT of one bit).  Similarly, we can convert EQ to
5394
             (xor X 1) or (minus 1 X); we use the former.  Finally, if X is
5395
             known to be either 0 or -1, NE becomes a NEG and EQ becomes
5396
             (plus X 1).
5397
 
5398
             Remove any ZERO_EXTRACT we made when thinking this was a
5399
             comparison.  It may now be simpler to use, e.g., an AND.  If a
5400
             ZERO_EXTRACT is indeed appropriate, it will be placed back by
5401
             the call to make_compound_operation in the SET case.  */
5402
 
5403
          if (STORE_FLAG_VALUE == 1
5404
              && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5405
              && op1 == const0_rtx
5406
              && mode == GET_MODE (op0)
5407
              && nonzero_bits (op0, mode) == 1)
5408
            return gen_lowpart (mode,
5409
                                expand_compound_operation (op0));
5410
 
5411
          else if (STORE_FLAG_VALUE == 1
5412
                   && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5413
                   && op1 == const0_rtx
5414
                   && mode == GET_MODE (op0)
5415
                   && (num_sign_bit_copies (op0, mode)
5416
                       == GET_MODE_BITSIZE (mode)))
5417
            {
5418
              op0 = expand_compound_operation (op0);
5419
              return simplify_gen_unary (NEG, mode,
5420
                                         gen_lowpart (mode, op0),
5421
                                         mode);
5422
            }
5423
 
5424
          else if (STORE_FLAG_VALUE == 1
5425
                   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5426
                   && op1 == const0_rtx
5427
                   && mode == GET_MODE (op0)
5428
                   && nonzero_bits (op0, mode) == 1)
5429
            {
5430
              op0 = expand_compound_operation (op0);
5431
              return simplify_gen_binary (XOR, mode,
5432
                                          gen_lowpart (mode, op0),
5433
                                          const1_rtx);
5434
            }
5435
 
5436
          else if (STORE_FLAG_VALUE == 1
5437
                   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5438
                   && op1 == const0_rtx
5439
                   && mode == GET_MODE (op0)
5440
                   && (num_sign_bit_copies (op0, mode)
5441
                       == GET_MODE_BITSIZE (mode)))
5442
            {
5443
              op0 = expand_compound_operation (op0);
5444
              return plus_constant (gen_lowpart (mode, op0), 1);
5445
            }
5446
 
5447
          /* If STORE_FLAG_VALUE is -1, we have cases similar to
5448
             those above.  */
5449
          if (STORE_FLAG_VALUE == -1
5450
              && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5451
              && op1 == const0_rtx
5452
              && (num_sign_bit_copies (op0, mode)
5453
                  == GET_MODE_BITSIZE (mode)))
5454
            return gen_lowpart (mode,
5455
                                expand_compound_operation (op0));
5456
 
5457
          else if (STORE_FLAG_VALUE == -1
5458
                   && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5459
                   && op1 == const0_rtx
5460
                   && mode == GET_MODE (op0)
5461
                   && nonzero_bits (op0, mode) == 1)
5462
            {
5463
              op0 = expand_compound_operation (op0);
5464
              return simplify_gen_unary (NEG, mode,
5465
                                         gen_lowpart (mode, op0),
5466
                                         mode);
5467
            }
5468
 
5469
          else if (STORE_FLAG_VALUE == -1
5470
                   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5471
                   && op1 == const0_rtx
5472
                   && mode == GET_MODE (op0)
5473
                   && (num_sign_bit_copies (op0, mode)
5474
                       == GET_MODE_BITSIZE (mode)))
5475
            {
5476
              op0 = expand_compound_operation (op0);
5477
              return simplify_gen_unary (NOT, mode,
5478
                                         gen_lowpart (mode, op0),
5479
                                         mode);
5480
            }
5481
 
5482
          /* If X is 0/1, (eq X 0) is X-1.  */
5483
          else if (STORE_FLAG_VALUE == -1
5484
                   && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5485
                   && op1 == const0_rtx
5486
                   && mode == GET_MODE (op0)
5487
                   && nonzero_bits (op0, mode) == 1)
5488
            {
5489
              op0 = expand_compound_operation (op0);
5490
              return plus_constant (gen_lowpart (mode, op0), -1);
5491
            }
5492
 
5493
          /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
5494
             one bit that might be nonzero, we can convert (ne x 0) to
5495
             (ashift x c) where C puts the bit in the sign bit.  Remove any
5496
             AND with STORE_FLAG_VALUE when we are done, since we are only
5497
             going to test the sign bit.  */
5498
          if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5499
              && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5500
              && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
5501
                  == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
5502
              && op1 == const0_rtx
5503
              && mode == GET_MODE (op0)
5504
              && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
5505
            {
5506
              x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
5507
                                        expand_compound_operation (op0),
5508
                                        GET_MODE_BITSIZE (mode) - 1 - i);
5509
              if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
5510
                return XEXP (x, 0);
5511
              else
5512
                return x;
5513
            }
5514
 
5515
          /* If the code changed, return a whole new comparison.  */
5516
          if (new_code != code)
5517
            return gen_rtx_fmt_ee (new_code, mode, op0, op1);
5518
 
5519
          /* Otherwise, keep this operation, but maybe change its operands.
5520
             This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR).  */
5521
          SUBST (XEXP (x, 0), op0);
5522
          SUBST (XEXP (x, 1), op1);
5523
        }
5524
      break;
5525
 
5526
    case IF_THEN_ELSE:
5527
      return simplify_if_then_else (x);
5528
 
5529
    case ZERO_EXTRACT:
5530
    case SIGN_EXTRACT:
5531
    case ZERO_EXTEND:
5532
    case SIGN_EXTEND:
5533
      /* If we are processing SET_DEST, we are done.  */
5534
      if (in_dest)
5535
        return x;
5536
 
5537
      return expand_compound_operation (x);
5538
 
5539
    case SET:
5540
      return simplify_set (x);
5541
 
5542
    case AND:
5543
    case IOR:
5544
      return simplify_logical (x);
5545
 
5546
    case ASHIFT:
5547
    case LSHIFTRT:
5548
    case ASHIFTRT:
5549
    case ROTATE:
5550
    case ROTATERT:
5551
      /* If this is a shift by a constant amount, simplify it.  */
5552
      if (CONST_INT_P (XEXP (x, 1)))
5553
        return simplify_shift_const (x, code, mode, XEXP (x, 0),
5554
                                     INTVAL (XEXP (x, 1)));
5555
 
5556
      else if (SHIFT_COUNT_TRUNCATED && !REG_P (XEXP (x, 1)))
5557
        SUBST (XEXP (x, 1),
5558
               force_to_mode (XEXP (x, 1), GET_MODE (XEXP (x, 1)),
5559
                              ((HOST_WIDE_INT) 1
5560
                               << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
5561
                              - 1,
5562
                              0));
5563
      break;
5564
 
5565
    default:
5566
      break;
5567
    }
5568
 
5569
  return x;
5570
}
5571
 
5572
/* Simplify X, an IF_THEN_ELSE expression.  Return the new expression.  */
5573
 
5574
static rtx
5575
simplify_if_then_else (rtx x)
5576
{
5577
  enum machine_mode mode = GET_MODE (x);
5578
  rtx cond = XEXP (x, 0);
5579
  rtx true_rtx = XEXP (x, 1);
5580
  rtx false_rtx = XEXP (x, 2);
5581
  enum rtx_code true_code = GET_CODE (cond);
5582
  int comparison_p = COMPARISON_P (cond);
5583
  rtx temp;
5584
  int i;
5585
  enum rtx_code false_code;
5586
  rtx reversed;
5587
 
5588
  /* Simplify storing of the truth value.  */
5589
  if (comparison_p && true_rtx == const_true_rtx && false_rtx == const0_rtx)
5590
    return simplify_gen_relational (true_code, mode, VOIDmode,
5591
                                    XEXP (cond, 0), XEXP (cond, 1));
5592
 
5593
  /* Also when the truth value has to be reversed.  */
5594
  if (comparison_p
5595
      && true_rtx == const0_rtx && false_rtx == const_true_rtx
5596
      && (reversed = reversed_comparison (cond, mode)))
5597
    return reversed;
5598
 
5599
  /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
5600
     in it is being compared against certain values.  Get the true and false
5601
     comparisons and see if that says anything about the value of each arm.  */
5602
 
5603
  if (comparison_p
5604
      && ((false_code = reversed_comparison_code (cond, NULL))
5605
          != UNKNOWN)
5606
      && REG_P (XEXP (cond, 0)))
5607
    {
5608
      HOST_WIDE_INT nzb;
5609
      rtx from = XEXP (cond, 0);
5610
      rtx true_val = XEXP (cond, 1);
5611
      rtx false_val = true_val;
5612
      int swapped = 0;
5613
 
5614
      /* If FALSE_CODE is EQ, swap the codes and arms.  */
5615
 
5616
      if (false_code == EQ)
5617
        {
5618
          swapped = 1, true_code = EQ, false_code = NE;
5619
          temp = true_rtx, true_rtx = false_rtx, false_rtx = temp;
5620
        }
5621
 
5622
      /* If we are comparing against zero and the expression being tested has
5623
         only a single bit that might be nonzero, that is its value when it is
5624
         not equal to zero.  Similarly if it is known to be -1 or 0.  */
5625
 
5626
      if (true_code == EQ && true_val == const0_rtx
5627
          && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
5628
        {
5629
          false_code = EQ;
5630
          false_val = GEN_INT (trunc_int_for_mode (nzb, GET_MODE (from)));
5631
        }
5632
      else if (true_code == EQ && true_val == const0_rtx
5633
               && (num_sign_bit_copies (from, GET_MODE (from))
5634
                   == GET_MODE_BITSIZE (GET_MODE (from))))
5635
        {
5636
          false_code = EQ;
5637
          false_val = constm1_rtx;
5638
        }
5639
 
5640
      /* Now simplify an arm if we know the value of the register in the
5641
         branch and it is used in the arm.  Be careful due to the potential
5642
         of locally-shared RTL.  */
5643
 
5644
      if (reg_mentioned_p (from, true_rtx))
5645
        true_rtx = subst (known_cond (copy_rtx (true_rtx), true_code,
5646
                                      from, true_val),
5647
                      pc_rtx, pc_rtx, 0, 0);
5648
      if (reg_mentioned_p (from, false_rtx))
5649
        false_rtx = subst (known_cond (copy_rtx (false_rtx), false_code,
5650
                                   from, false_val),
5651
                       pc_rtx, pc_rtx, 0, 0);
5652
 
5653
      SUBST (XEXP (x, 1), swapped ? false_rtx : true_rtx);
5654
      SUBST (XEXP (x, 2), swapped ? true_rtx : false_rtx);
5655
 
5656
      true_rtx = XEXP (x, 1);
5657
      false_rtx = XEXP (x, 2);
5658
      true_code = GET_CODE (cond);
5659
    }
5660
 
5661
  /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
5662
     reversed, do so to avoid needing two sets of patterns for
5663
     subtract-and-branch insns.  Similarly if we have a constant in the true
5664
     arm, the false arm is the same as the first operand of the comparison, or
5665
     the false arm is more complicated than the true arm.  */
5666
 
5667
  if (comparison_p
5668
      && reversed_comparison_code (cond, NULL) != UNKNOWN
5669
      && (true_rtx == pc_rtx
5670
          || (CONSTANT_P (true_rtx)
5671
              && !CONST_INT_P (false_rtx) && false_rtx != pc_rtx)
5672
          || true_rtx == const0_rtx
5673
          || (OBJECT_P (true_rtx) && !OBJECT_P (false_rtx))
5674
          || (GET_CODE (true_rtx) == SUBREG && OBJECT_P (SUBREG_REG (true_rtx))
5675
              && !OBJECT_P (false_rtx))
5676
          || reg_mentioned_p (true_rtx, false_rtx)
5677
          || rtx_equal_p (false_rtx, XEXP (cond, 0))))
5678
    {
5679
      true_code = reversed_comparison_code (cond, NULL);
5680
      SUBST (XEXP (x, 0), reversed_comparison (cond, GET_MODE (cond)));
5681
      SUBST (XEXP (x, 1), false_rtx);
5682
      SUBST (XEXP (x, 2), true_rtx);
5683
 
5684
      temp = true_rtx, true_rtx = false_rtx, false_rtx = temp;
5685
      cond = XEXP (x, 0);
5686
 
5687
      /* It is possible that the conditional has been simplified out.  */
5688
      true_code = GET_CODE (cond);
5689
      comparison_p = COMPARISON_P (cond);
5690
    }
5691
 
5692
  /* If the two arms are identical, we don't need the comparison.  */
5693
 
5694
  if (rtx_equal_p (true_rtx, false_rtx) && ! side_effects_p (cond))
5695
    return true_rtx;
5696
 
5697
  /* Convert a == b ? b : a to "a".  */
5698
  if (true_code == EQ && ! side_effects_p (cond)
5699
      && !HONOR_NANS (mode)
5700
      && rtx_equal_p (XEXP (cond, 0), false_rtx)
5701
      && rtx_equal_p (XEXP (cond, 1), true_rtx))
5702
    return false_rtx;
5703
  else if (true_code == NE && ! side_effects_p (cond)
5704
           && !HONOR_NANS (mode)
5705
           && rtx_equal_p (XEXP (cond, 0), true_rtx)
5706
           && rtx_equal_p (XEXP (cond, 1), false_rtx))
5707
    return true_rtx;
5708
 
5709
  /* Look for cases where we have (abs x) or (neg (abs X)).  */
5710
 
5711
  if (GET_MODE_CLASS (mode) == MODE_INT
5712
      && comparison_p
5713
      && XEXP (cond, 1) == const0_rtx
5714
      && GET_CODE (false_rtx) == NEG
5715
      && rtx_equal_p (true_rtx, XEXP (false_rtx, 0))
5716
      && rtx_equal_p (true_rtx, XEXP (cond, 0))
5717
      && ! side_effects_p (true_rtx))
5718
    switch (true_code)
5719
      {
5720
      case GT:
5721
      case GE:
5722
        return simplify_gen_unary (ABS, mode, true_rtx, mode);
5723
      case LT:
5724
      case LE:
5725
        return
5726
          simplify_gen_unary (NEG, mode,
5727
                              simplify_gen_unary (ABS, mode, true_rtx, mode),
5728
                              mode);
5729
      default:
5730
        break;
5731
      }
5732
 
5733
  /* Look for MIN or MAX.  */
5734
 
5735
  if ((! FLOAT_MODE_P (mode) || flag_unsafe_math_optimizations)
5736
      && comparison_p
5737
      && rtx_equal_p (XEXP (cond, 0), true_rtx)
5738
      && rtx_equal_p (XEXP (cond, 1), false_rtx)
5739
      && ! side_effects_p (cond))
5740
    switch (true_code)
5741
      {
5742
      case GE:
5743
      case GT:
5744
        return simplify_gen_binary (SMAX, mode, true_rtx, false_rtx);
5745
      case LE:
5746
      case LT:
5747
        return simplify_gen_binary (SMIN, mode, true_rtx, false_rtx);
5748
      case GEU:
5749
      case GTU:
5750
        return simplify_gen_binary (UMAX, mode, true_rtx, false_rtx);
5751
      case LEU:
5752
      case LTU:
5753
        return simplify_gen_binary (UMIN, mode, true_rtx, false_rtx);
5754
      default:
5755
        break;
5756
      }
5757
 
5758
  /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
5759
     second operand is zero, this can be done as (OP Z (mult COND C2)) where
5760
     C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
5761
     SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
5762
     We can do this kind of thing in some cases when STORE_FLAG_VALUE is
5763
     neither 1 or -1, but it isn't worth checking for.  */
5764
 
5765
  if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
5766
      && comparison_p
5767
      && GET_MODE_CLASS (mode) == MODE_INT
5768
      && ! side_effects_p (x))
5769
    {
5770
      rtx t = make_compound_operation (true_rtx, SET);
5771
      rtx f = make_compound_operation (false_rtx, SET);
5772
      rtx cond_op0 = XEXP (cond, 0);
5773
      rtx cond_op1 = XEXP (cond, 1);
5774
      enum rtx_code op = UNKNOWN, extend_op = UNKNOWN;
5775
      enum machine_mode m = mode;
5776
      rtx z = 0, c1 = NULL_RTX;
5777
 
5778
      if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
5779
           || GET_CODE (t) == IOR || GET_CODE (t) == XOR
5780
           || GET_CODE (t) == ASHIFT
5781
           || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
5782
          && rtx_equal_p (XEXP (t, 0), f))
5783
        c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
5784
 
5785
      /* If an identity-zero op is commutative, check whether there
5786
         would be a match if we swapped the operands.  */
5787
      else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
5788
                || GET_CODE (t) == XOR)
5789
               && rtx_equal_p (XEXP (t, 1), f))
5790
        c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
5791
      else if (GET_CODE (t) == SIGN_EXTEND
5792
               && (GET_CODE (XEXP (t, 0)) == PLUS
5793
                   || GET_CODE (XEXP (t, 0)) == MINUS
5794
                   || GET_CODE (XEXP (t, 0)) == IOR
5795
                   || GET_CODE (XEXP (t, 0)) == XOR
5796
                   || GET_CODE (XEXP (t, 0)) == ASHIFT
5797
                   || GET_CODE (XEXP (t, 0)) == LSHIFTRT
5798
                   || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
5799
               && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
5800
               && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
5801
               && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
5802
               && (num_sign_bit_copies (f, GET_MODE (f))
5803
                   > (unsigned int)
5804
                     (GET_MODE_BITSIZE (mode)
5805
                      - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
5806
        {
5807
          c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
5808
          extend_op = SIGN_EXTEND;
5809
          m = GET_MODE (XEXP (t, 0));
5810
        }
5811
      else if (GET_CODE (t) == SIGN_EXTEND
5812
               && (GET_CODE (XEXP (t, 0)) == PLUS
5813
                   || GET_CODE (XEXP (t, 0)) == IOR
5814
                   || GET_CODE (XEXP (t, 0)) == XOR)
5815
               && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
5816
               && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
5817
               && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
5818
               && (num_sign_bit_copies (f, GET_MODE (f))
5819
                   > (unsigned int)
5820
                     (GET_MODE_BITSIZE (mode)
5821
                      - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1))))))
5822
        {
5823
          c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
5824
          extend_op = SIGN_EXTEND;
5825
          m = GET_MODE (XEXP (t, 0));
5826
        }
5827
      else if (GET_CODE (t) == ZERO_EXTEND
5828
               && (GET_CODE (XEXP (t, 0)) == PLUS
5829
                   || GET_CODE (XEXP (t, 0)) == MINUS
5830
                   || GET_CODE (XEXP (t, 0)) == IOR
5831
                   || GET_CODE (XEXP (t, 0)) == XOR
5832
                   || GET_CODE (XEXP (t, 0)) == ASHIFT
5833
                   || GET_CODE (XEXP (t, 0)) == LSHIFTRT
5834
                   || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
5835
               && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
5836
               && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5837
               && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
5838
               && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
5839
               && ((nonzero_bits (f, GET_MODE (f))
5840
                    & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
5841
                   == 0))
5842
        {
5843
          c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
5844
          extend_op = ZERO_EXTEND;
5845
          m = GET_MODE (XEXP (t, 0));
5846
        }
5847
      else if (GET_CODE (t) == ZERO_EXTEND
5848
               && (GET_CODE (XEXP (t, 0)) == PLUS
5849
                   || GET_CODE (XEXP (t, 0)) == IOR
5850
                   || GET_CODE (XEXP (t, 0)) == XOR)
5851
               && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
5852
               && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5853
               && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
5854
               && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
5855
               && ((nonzero_bits (f, GET_MODE (f))
5856
                    & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
5857
                   == 0))
5858
        {
5859
          c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
5860
          extend_op = ZERO_EXTEND;
5861
          m = GET_MODE (XEXP (t, 0));
5862
        }
5863
 
5864
      if (z)
5865
        {
5866
          temp = subst (simplify_gen_relational (true_code, m, VOIDmode,
5867
                                                 cond_op0, cond_op1),
5868
                        pc_rtx, pc_rtx, 0, 0);
5869
          temp = simplify_gen_binary (MULT, m, temp,
5870
                                      simplify_gen_binary (MULT, m, c1,
5871
                                                           const_true_rtx));
5872
          temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
5873
          temp = simplify_gen_binary (op, m, gen_lowpart (m, z), temp);
5874
 
5875
          if (extend_op != UNKNOWN)
5876
            temp = simplify_gen_unary (extend_op, mode, temp, m);
5877
 
5878
          return temp;
5879
        }
5880
    }
5881
 
5882
  /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
5883
     1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
5884
     negation of a single bit, we can convert this operation to a shift.  We
5885
     can actually do this more generally, but it doesn't seem worth it.  */
5886
 
5887
  if (true_code == NE && XEXP (cond, 1) == const0_rtx
5888
      && false_rtx == const0_rtx && CONST_INT_P (true_rtx)
5889
      && ((1 == nonzero_bits (XEXP (cond, 0), mode)
5890
           && (i = exact_log2 (INTVAL (true_rtx))) >= 0)
5891
          || ((num_sign_bit_copies (XEXP (cond, 0), mode)
5892
               == GET_MODE_BITSIZE (mode))
5893
              && (i = exact_log2 (-INTVAL (true_rtx))) >= 0)))
5894
    return
5895
      simplify_shift_const (NULL_RTX, ASHIFT, mode,
5896
                            gen_lowpart (mode, XEXP (cond, 0)), i);
5897
 
5898
  /* (IF_THEN_ELSE (NE REG 0) (0) (8)) is REG for nonzero_bits (REG) == 8.  */
5899
  if (true_code == NE && XEXP (cond, 1) == const0_rtx
5900
      && false_rtx == const0_rtx && CONST_INT_P (true_rtx)
5901
      && GET_MODE (XEXP (cond, 0)) == mode
5902
      && (INTVAL (true_rtx) & GET_MODE_MASK (mode))
5903
          == nonzero_bits (XEXP (cond, 0), mode)
5904
      && (i = exact_log2 (INTVAL (true_rtx) & GET_MODE_MASK (mode))) >= 0)
5905
    return XEXP (cond, 0);
5906
 
5907
  return x;
5908
}
5909
 
5910
/* Simplify X, a SET expression.  Return the new expression.  */
5911
 
5912
static rtx
5913
simplify_set (rtx x)
5914
{
5915
  rtx src = SET_SRC (x);
5916
  rtx dest = SET_DEST (x);
5917
  enum machine_mode mode
5918
    = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
5919
  rtx other_insn;
5920
  rtx *cc_use;
5921
 
5922
  /* (set (pc) (return)) gets written as (return).  */
5923
  if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN)
5924
    return src;
5925
 
5926
  /* Now that we know for sure which bits of SRC we are using, see if we can
5927
     simplify the expression for the object knowing that we only need the
5928
     low-order bits.  */
5929
 
5930
  if (GET_MODE_CLASS (mode) == MODE_INT
5931
      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
5932
    {
5933
      src = force_to_mode (src, mode, ~(HOST_WIDE_INT) 0, 0);
5934
      SUBST (SET_SRC (x), src);
5935
    }
5936
 
5937
  /* If we are setting CC0 or if the source is a COMPARE, look for the use of
5938
     the comparison result and try to simplify it unless we already have used
5939
     undobuf.other_insn.  */
5940
  if ((GET_MODE_CLASS (mode) == MODE_CC
5941
       || GET_CODE (src) == COMPARE
5942
       || CC0_P (dest))
5943
      && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
5944
      && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
5945
      && COMPARISON_P (*cc_use)
5946
      && rtx_equal_p (XEXP (*cc_use, 0), dest))
5947
    {
5948
      enum rtx_code old_code = GET_CODE (*cc_use);
5949
      enum rtx_code new_code;
5950
      rtx op0, op1, tmp;
5951
      int other_changed = 0;
5952
      enum machine_mode compare_mode = GET_MODE (dest);
5953
 
5954
      if (GET_CODE (src) == COMPARE)
5955
        op0 = XEXP (src, 0), op1 = XEXP (src, 1);
5956
      else
5957
        op0 = src, op1 = CONST0_RTX (GET_MODE (src));
5958
 
5959
      tmp = simplify_relational_operation (old_code, compare_mode, VOIDmode,
5960
                                           op0, op1);
5961
      if (!tmp)
5962
        new_code = old_code;
5963
      else if (!CONSTANT_P (tmp))
5964
        {
5965
          new_code = GET_CODE (tmp);
5966
          op0 = XEXP (tmp, 0);
5967
          op1 = XEXP (tmp, 1);
5968
        }
5969
      else
5970
        {
5971
          rtx pat = PATTERN (other_insn);
5972
          undobuf.other_insn = other_insn;
5973
          SUBST (*cc_use, tmp);
5974
 
5975
          /* Attempt to simplify CC user.  */
5976
          if (GET_CODE (pat) == SET)
5977
            {
5978
              rtx new_rtx = simplify_rtx (SET_SRC (pat));
5979
              if (new_rtx != NULL_RTX)
5980
                SUBST (SET_SRC (pat), new_rtx);
5981
            }
5982
 
5983
          /* Convert X into a no-op move.  */
5984
          SUBST (SET_DEST (x), pc_rtx);
5985
          SUBST (SET_SRC (x), pc_rtx);
5986
          return x;
5987
        }
5988
 
5989
      /* Simplify our comparison, if possible.  */
5990
      new_code = simplify_comparison (new_code, &op0, &op1);
5991
 
5992
#ifdef SELECT_CC_MODE
5993
      /* If this machine has CC modes other than CCmode, check to see if we
5994
         need to use a different CC mode here.  */
5995
      if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
5996
        compare_mode = GET_MODE (op0);
5997
      else
5998
        compare_mode = SELECT_CC_MODE (new_code, op0, op1);
5999
 
6000
#ifndef HAVE_cc0
6001
      /* If the mode changed, we have to change SET_DEST, the mode in the
6002
         compare, and the mode in the place SET_DEST is used.  If SET_DEST is
6003
         a hard register, just build new versions with the proper mode.  If it
6004
         is a pseudo, we lose unless it is only time we set the pseudo, in
6005
         which case we can safely change its mode.  */
6006
      if (compare_mode != GET_MODE (dest))
6007
        {
6008
          if (can_change_dest_mode (dest, 0, compare_mode))
6009
            {
6010
              unsigned int regno = REGNO (dest);
6011
              rtx new_dest;
6012
 
6013
              if (regno < FIRST_PSEUDO_REGISTER)
6014
                new_dest = gen_rtx_REG (compare_mode, regno);
6015
              else
6016
                {
6017
                  SUBST_MODE (regno_reg_rtx[regno], compare_mode);
6018
                  new_dest = regno_reg_rtx[regno];
6019
                }
6020
 
6021
              SUBST (SET_DEST (x), new_dest);
6022
              SUBST (XEXP (*cc_use, 0), new_dest);
6023
              other_changed = 1;
6024
 
6025
              dest = new_dest;
6026
            }
6027
        }
6028
#endif  /* cc0 */
6029
#endif  /* SELECT_CC_MODE */
6030
 
6031
      /* If the code changed, we have to build a new comparison in
6032
         undobuf.other_insn.  */
6033
      if (new_code != old_code)
6034
        {
6035
          int other_changed_previously = other_changed;
6036
          unsigned HOST_WIDE_INT mask;
6037
          rtx old_cc_use = *cc_use;
6038
 
6039
          SUBST (*cc_use, gen_rtx_fmt_ee (new_code, GET_MODE (*cc_use),
6040
                                          dest, const0_rtx));
6041
          other_changed = 1;
6042
 
6043
          /* If the only change we made was to change an EQ into an NE or
6044
             vice versa, OP0 has only one bit that might be nonzero, and OP1
6045
             is zero, check if changing the user of the condition code will
6046
             produce a valid insn.  If it won't, we can keep the original code
6047
             in that insn by surrounding our operation with an XOR.  */
6048
 
6049
          if (((old_code == NE && new_code == EQ)
6050
               || (old_code == EQ && new_code == NE))
6051
              && ! other_changed_previously && op1 == const0_rtx
6052
              && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
6053
              && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
6054
            {
6055
              rtx pat = PATTERN (other_insn), note = 0;
6056
 
6057
              if ((recog_for_combine (&pat, other_insn, &note) < 0
6058
                   && ! check_asm_operands (pat)))
6059
                {
6060
                  *cc_use = old_cc_use;
6061
                  other_changed = 0;
6062
 
6063
                  op0 = simplify_gen_binary (XOR, GET_MODE (op0),
6064
                                             op0, GEN_INT (mask));
6065
                }
6066
            }
6067
        }
6068
 
6069
      if (other_changed)
6070
        undobuf.other_insn = other_insn;
6071
 
6072
      /* Otherwise, if we didn't previously have a COMPARE in the
6073
         correct mode, we need one.  */
6074
      if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode)
6075
        {
6076
          SUBST (SET_SRC (x), gen_rtx_COMPARE (compare_mode, op0, op1));
6077
          src = SET_SRC (x);
6078
        }
6079
      else if (GET_MODE (op0) == compare_mode && op1 == const0_rtx)
6080
        {
6081
          SUBST (SET_SRC (x), op0);
6082
          src = SET_SRC (x);
6083
        }
6084
      /* Otherwise, update the COMPARE if needed.  */
6085
      else if (XEXP (src, 0) != op0 || XEXP (src, 1) != op1)
6086
        {
6087
          SUBST (SET_SRC (x), gen_rtx_COMPARE (compare_mode, op0, op1));
6088
          src = SET_SRC (x);
6089
        }
6090
    }
6091
  else
6092
    {
6093
      /* Get SET_SRC in a form where we have placed back any
6094
         compound expressions.  Then do the checks below.  */
6095
      src = make_compound_operation (src, SET);
6096
      SUBST (SET_SRC (x), src);
6097
    }
6098
 
6099
  /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
6100
     and X being a REG or (subreg (reg)), we may be able to convert this to
6101
     (set (subreg:m2 x) (op)).
6102
 
6103
     We can always do this if M1 is narrower than M2 because that means that
6104
     we only care about the low bits of the result.
6105
 
6106
     However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
6107
     perform a narrower operation than requested since the high-order bits will
6108
     be undefined.  On machine where it is defined, this transformation is safe
6109
     as long as M1 and M2 have the same number of words.  */
6110
 
6111
  if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
6112
      && !OBJECT_P (SUBREG_REG (src))
6113
      && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
6114
           / UNITS_PER_WORD)
6115
          == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
6116
               + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
6117
#ifndef WORD_REGISTER_OPERATIONS
6118
      && (GET_MODE_SIZE (GET_MODE (src))
6119
        < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
6120
#endif
6121
#ifdef CANNOT_CHANGE_MODE_CLASS
6122
      && ! (REG_P (dest) && REGNO (dest) < FIRST_PSEUDO_REGISTER
6123
            && REG_CANNOT_CHANGE_MODE_P (REGNO (dest),
6124
                                         GET_MODE (SUBREG_REG (src)),
6125
                                         GET_MODE (src)))
6126
#endif
6127
      && (REG_P (dest)
6128
          || (GET_CODE (dest) == SUBREG
6129
              && REG_P (SUBREG_REG (dest)))))
6130
    {
6131
      SUBST (SET_DEST (x),
6132
             gen_lowpart (GET_MODE (SUBREG_REG (src)),
6133
                                      dest));
6134
      SUBST (SET_SRC (x), SUBREG_REG (src));
6135
 
6136
      src = SET_SRC (x), dest = SET_DEST (x);
6137
    }
6138
 
6139
#ifdef HAVE_cc0
6140
  /* If we have (set (cc0) (subreg ...)), we try to remove the subreg
6141
     in SRC.  */
6142
  if (dest == cc0_rtx
6143
      && GET_CODE (src) == SUBREG
6144
      && subreg_lowpart_p (src)
6145
      && (GET_MODE_BITSIZE (GET_MODE (src))
6146
          < GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (src)))))
6147
    {
6148
      rtx inner = SUBREG_REG (src);
6149
      enum machine_mode inner_mode = GET_MODE (inner);
6150
 
6151
      /* Here we make sure that we don't have a sign bit on.  */
6152
      if (GET_MODE_BITSIZE (inner_mode) <= HOST_BITS_PER_WIDE_INT
6153
          && (nonzero_bits (inner, inner_mode)
6154
              < ((unsigned HOST_WIDE_INT) 1
6155
                 << (GET_MODE_BITSIZE (GET_MODE (src)) - 1))))
6156
        {
6157
          SUBST (SET_SRC (x), inner);
6158
          src = SET_SRC (x);
6159
        }
6160
    }
6161
#endif
6162
 
6163
#ifdef LOAD_EXTEND_OP
6164
  /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
6165
     would require a paradoxical subreg.  Replace the subreg with a
6166
     zero_extend to avoid the reload that would otherwise be required.  */
6167
 
6168
  if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
6169
      && INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (src)))
6170
      && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != UNKNOWN
6171
      && SUBREG_BYTE (src) == 0
6172
      && (GET_MODE_SIZE (GET_MODE (src))
6173
          > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
6174
      && MEM_P (SUBREG_REG (src)))
6175
    {
6176
      SUBST (SET_SRC (x),
6177
             gen_rtx_fmt_e (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
6178
                            GET_MODE (src), SUBREG_REG (src)));
6179
 
6180
      src = SET_SRC (x);
6181
    }
6182
#endif
6183
 
6184
  /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
6185
     are comparing an item known to be 0 or -1 against 0, use a logical
6186
     operation instead. Check for one of the arms being an IOR of the other
6187
     arm with some value.  We compute three terms to be IOR'ed together.  In
6188
     practice, at most two will be nonzero.  Then we do the IOR's.  */
6189
 
6190
  if (GET_CODE (dest) != PC
6191
      && GET_CODE (src) == IF_THEN_ELSE
6192
      && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
6193
      && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
6194
      && XEXP (XEXP (src, 0), 1) == const0_rtx
6195
      && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0))
6196
#ifdef HAVE_conditional_move
6197
      && ! can_conditionally_move_p (GET_MODE (src))
6198
#endif
6199
      && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
6200
                               GET_MODE (XEXP (XEXP (src, 0), 0)))
6201
          == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0))))
6202
      && ! side_effects_p (src))
6203
    {
6204
      rtx true_rtx = (GET_CODE (XEXP (src, 0)) == NE
6205
                      ? XEXP (src, 1) : XEXP (src, 2));
6206
      rtx false_rtx = (GET_CODE (XEXP (src, 0)) == NE
6207
                   ? XEXP (src, 2) : XEXP (src, 1));
6208
      rtx term1 = const0_rtx, term2, term3;
6209
 
6210
      if (GET_CODE (true_rtx) == IOR
6211
          && rtx_equal_p (XEXP (true_rtx, 0), false_rtx))
6212
        term1 = false_rtx, true_rtx = XEXP (true_rtx, 1), false_rtx = const0_rtx;
6213
      else if (GET_CODE (true_rtx) == IOR
6214
               && rtx_equal_p (XEXP (true_rtx, 1), false_rtx))
6215
        term1 = false_rtx, true_rtx = XEXP (true_rtx, 0), false_rtx = const0_rtx;
6216
      else if (GET_CODE (false_rtx) == IOR
6217
               && rtx_equal_p (XEXP (false_rtx, 0), true_rtx))
6218
        term1 = true_rtx, false_rtx = XEXP (false_rtx, 1), true_rtx = const0_rtx;
6219
      else if (GET_CODE (false_rtx) == IOR
6220
               && rtx_equal_p (XEXP (false_rtx, 1), true_rtx))
6221
        term1 = true_rtx, false_rtx = XEXP (false_rtx, 0), true_rtx = const0_rtx;
6222
 
6223
      term2 = simplify_gen_binary (AND, GET_MODE (src),
6224
                                   XEXP (XEXP (src, 0), 0), true_rtx);
6225
      term3 = simplify_gen_binary (AND, GET_MODE (src),
6226
                                   simplify_gen_unary (NOT, GET_MODE (src),
6227
                                                       XEXP (XEXP (src, 0), 0),
6228
                                                       GET_MODE (src)),
6229
                                   false_rtx);
6230
 
6231
      SUBST (SET_SRC (x),
6232
             simplify_gen_binary (IOR, GET_MODE (src),
6233
                                  simplify_gen_binary (IOR, GET_MODE (src),
6234
                                                       term1, term2),
6235
                                  term3));
6236
 
6237
      src = SET_SRC (x);
6238
    }
6239
 
6240
  /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
6241
     whole thing fail.  */
6242
  if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
6243
    return src;
6244
  else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
6245
    return dest;
6246
  else
6247
    /* Convert this into a field assignment operation, if possible.  */
6248
    return make_field_assignment (x);
6249
}
6250
 
6251
/* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
6252
   result.  */
6253
 
6254
static rtx
6255
simplify_logical (rtx x)
6256
{
6257
  enum machine_mode mode = GET_MODE (x);
6258
  rtx op0 = XEXP (x, 0);
6259
  rtx op1 = XEXP (x, 1);
6260
 
6261
  switch (GET_CODE (x))
6262
    {
6263
    case AND:
6264
      /* We can call simplify_and_const_int only if we don't lose
6265
         any (sign) bits when converting INTVAL (op1) to
6266
         "unsigned HOST_WIDE_INT".  */
6267
      if (CONST_INT_P (op1)
6268
          && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6269
              || INTVAL (op1) > 0))
6270
        {
6271
          x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
6272
          if (GET_CODE (x) != AND)
6273
            return x;
6274
 
6275
          op0 = XEXP (x, 0);
6276
          op1 = XEXP (x, 1);
6277
        }
6278
 
6279
      /* If we have any of (and (ior A B) C) or (and (xor A B) C),
6280
         apply the distributive law and then the inverse distributive
6281
         law to see if things simplify.  */
6282
      if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
6283
        {
6284
          rtx result = distribute_and_simplify_rtx (x, 0);
6285
          if (result)
6286
            return result;
6287
        }
6288
      if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
6289
        {
6290
          rtx result = distribute_and_simplify_rtx (x, 1);
6291
          if (result)
6292
            return result;
6293
        }
6294
      break;
6295
 
6296
    case IOR:
6297
      /* If we have (ior (and A B) C), apply the distributive law and then
6298
         the inverse distributive law to see if things simplify.  */
6299
 
6300
      if (GET_CODE (op0) == AND)
6301
        {
6302
          rtx result = distribute_and_simplify_rtx (x, 0);
6303
          if (result)
6304
            return result;
6305
        }
6306
 
6307
      if (GET_CODE (op1) == AND)
6308
        {
6309
          rtx result = distribute_and_simplify_rtx (x, 1);
6310
          if (result)
6311
            return result;
6312
        }
6313
      break;
6314
 
6315
    default:
6316
      gcc_unreachable ();
6317
    }
6318
 
6319
  return x;
6320
}
6321
 
6322
/* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
6323
   operations" because they can be replaced with two more basic operations.
6324
   ZERO_EXTEND is also considered "compound" because it can be replaced with
6325
   an AND operation, which is simpler, though only one operation.
6326
 
6327
   The function expand_compound_operation is called with an rtx expression
6328
   and will convert it to the appropriate shifts and AND operations,
6329
   simplifying at each stage.
6330
 
6331
   The function make_compound_operation is called to convert an expression
6332
   consisting of shifts and ANDs into the equivalent compound expression.
6333
   It is the inverse of this function, loosely speaking.  */
6334
 
6335
static rtx
6336
expand_compound_operation (rtx x)
6337
{
6338
  unsigned HOST_WIDE_INT pos = 0, len;
6339
  int unsignedp = 0;
6340
  unsigned int modewidth;
6341
  rtx tem;
6342
 
6343
  switch (GET_CODE (x))
6344
    {
6345
    case ZERO_EXTEND:
6346
      unsignedp = 1;
6347
    case SIGN_EXTEND:
6348
      /* We can't necessarily use a const_int for a multiword mode;
6349
         it depends on implicitly extending the value.
6350
         Since we don't know the right way to extend it,
6351
         we can't tell whether the implicit way is right.
6352
 
6353
         Even for a mode that is no wider than a const_int,
6354
         we can't win, because we need to sign extend one of its bits through
6355
         the rest of it, and we don't know which bit.  */
6356
      if (CONST_INT_P (XEXP (x, 0)))
6357
        return x;
6358
 
6359
      /* Return if (subreg:MODE FROM 0) is not a safe replacement for
6360
         (zero_extend:MODE FROM) or (sign_extend:MODE FROM).  It is for any MEM
6361
         because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
6362
         reloaded. If not for that, MEM's would very rarely be safe.
6363
 
6364
         Reject MODEs bigger than a word, because we might not be able
6365
         to reference a two-register group starting with an arbitrary register
6366
         (and currently gen_lowpart might crash for a SUBREG).  */
6367
 
6368
      if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD)
6369
        return x;
6370
 
6371
      /* Reject MODEs that aren't scalar integers because turning vector
6372
         or complex modes into shifts causes problems.  */
6373
 
6374
      if (! SCALAR_INT_MODE_P (GET_MODE (XEXP (x, 0))))
6375
        return x;
6376
 
6377
      len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
6378
      /* If the inner object has VOIDmode (the only way this can happen
6379
         is if it is an ASM_OPERANDS), we can't do anything since we don't
6380
         know how much masking to do.  */
6381
      if (len == 0)
6382
        return x;
6383
 
6384
      break;
6385
 
6386
    case ZERO_EXTRACT:
6387
      unsignedp = 1;
6388
 
6389
      /* ... fall through ...  */
6390
 
6391
    case SIGN_EXTRACT:
6392
      /* If the operand is a CLOBBER, just return it.  */
6393
      if (GET_CODE (XEXP (x, 0)) == CLOBBER)
6394
        return XEXP (x, 0);
6395
 
6396
      if (!CONST_INT_P (XEXP (x, 1))
6397
          || !CONST_INT_P (XEXP (x, 2))
6398
          || GET_MODE (XEXP (x, 0)) == VOIDmode)
6399
        return x;
6400
 
6401
      /* Reject MODEs that aren't scalar integers because turning vector
6402
         or complex modes into shifts causes problems.  */
6403
 
6404
      if (! SCALAR_INT_MODE_P (GET_MODE (XEXP (x, 0))))
6405
        return x;
6406
 
6407
      len = INTVAL (XEXP (x, 1));
6408
      pos = INTVAL (XEXP (x, 2));
6409
 
6410
      /* This should stay within the object being extracted, fail otherwise.  */
6411
      if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
6412
        return x;
6413
 
6414
      if (BITS_BIG_ENDIAN)
6415
        pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
6416
 
6417
      break;
6418
 
6419
    default:
6420
      return x;
6421
    }
6422
  /* Convert sign extension to zero extension, if we know that the high
6423
     bit is not set, as this is easier to optimize.  It will be converted
6424
     back to cheaper alternative in make_extraction.  */
6425
  if (GET_CODE (x) == SIGN_EXTEND
6426
      && (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
6427
          && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
6428
                & ~(((unsigned HOST_WIDE_INT)
6429
                      GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
6430
                     >> 1))
6431
               == 0)))
6432
    {
6433
      rtx temp = gen_rtx_ZERO_EXTEND (GET_MODE (x), XEXP (x, 0));
6434
      rtx temp2 = expand_compound_operation (temp);
6435
 
6436
      /* Make sure this is a profitable operation.  */
6437
      if (rtx_cost (x, SET, optimize_this_for_speed_p)
6438
          > rtx_cost (temp2, SET, optimize_this_for_speed_p))
6439
       return temp2;
6440
      else if (rtx_cost (x, SET, optimize_this_for_speed_p)
6441
               > rtx_cost (temp, SET, optimize_this_for_speed_p))
6442
       return temp;
6443
      else
6444
       return x;
6445
    }
6446
 
6447
  /* We can optimize some special cases of ZERO_EXTEND.  */
6448
  if (GET_CODE (x) == ZERO_EXTEND)
6449
    {
6450
      /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we
6451
         know that the last value didn't have any inappropriate bits
6452
         set.  */
6453
      if (GET_CODE (XEXP (x, 0)) == TRUNCATE
6454
          && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
6455
          && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
6456
          && (nonzero_bits (XEXP (XEXP (x, 0), 0), GET_MODE (x))
6457
              & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
6458
        return XEXP (XEXP (x, 0), 0);
6459
 
6460
      /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)).  */
6461
      if (GET_CODE (XEXP (x, 0)) == SUBREG
6462
          && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
6463
          && subreg_lowpart_p (XEXP (x, 0))
6464
          && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
6465
          && (nonzero_bits (SUBREG_REG (XEXP (x, 0)), GET_MODE (x))
6466
              & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
6467
        return SUBREG_REG (XEXP (x, 0));
6468
 
6469
      /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
6470
         is a comparison and STORE_FLAG_VALUE permits.  This is like
6471
         the first case, but it works even when GET_MODE (x) is larger
6472
         than HOST_WIDE_INT.  */
6473
      if (GET_CODE (XEXP (x, 0)) == TRUNCATE
6474
          && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
6475
          && COMPARISON_P (XEXP (XEXP (x, 0), 0))
6476
          && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
6477
              <= HOST_BITS_PER_WIDE_INT)
6478
          && ((HOST_WIDE_INT) STORE_FLAG_VALUE
6479
              & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
6480
        return XEXP (XEXP (x, 0), 0);
6481
 
6482
      /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)).  */
6483
      if (GET_CODE (XEXP (x, 0)) == SUBREG
6484
          && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
6485
          && subreg_lowpart_p (XEXP (x, 0))
6486
          && COMPARISON_P (SUBREG_REG (XEXP (x, 0)))
6487
          && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
6488
              <= HOST_BITS_PER_WIDE_INT)
6489
          && ((HOST_WIDE_INT) STORE_FLAG_VALUE
6490
              & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
6491
        return SUBREG_REG (XEXP (x, 0));
6492
 
6493
    }
6494
 
6495
  /* If we reach here, we want to return a pair of shifts.  The inner
6496
     shift is a left shift of BITSIZE - POS - LEN bits.  The outer
6497
     shift is a right shift of BITSIZE - LEN bits.  It is arithmetic or
6498
     logical depending on the value of UNSIGNEDP.
6499
 
6500
     If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
6501
     converted into an AND of a shift.
6502
 
6503
     We must check for the case where the left shift would have a negative
6504
     count.  This can happen in a case like (x >> 31) & 255 on machines
6505
     that can't shift by a constant.  On those machines, we would first
6506
     combine the shift with the AND to produce a variable-position
6507
     extraction.  Then the constant of 31 would be substituted in to produce
6508
     a such a position.  */
6509
 
6510
  modewidth = GET_MODE_BITSIZE (GET_MODE (x));
6511
  if (modewidth + len >= pos)
6512
    {
6513
      enum machine_mode mode = GET_MODE (x);
6514
      tem = gen_lowpart (mode, XEXP (x, 0));
6515
      if (!tem || GET_CODE (tem) == CLOBBER)
6516
        return x;
6517
      tem = simplify_shift_const (NULL_RTX, ASHIFT, mode,
6518
                                  tem, modewidth - pos - len);
6519
      tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
6520
                                  mode, tem, modewidth - len);
6521
    }
6522
  else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
6523
    tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
6524
                                  simplify_shift_const (NULL_RTX, LSHIFTRT,
6525
                                                        GET_MODE (x),
6526
                                                        XEXP (x, 0), pos),
6527
                                  ((HOST_WIDE_INT) 1 << len) - 1);
6528
  else
6529
    /* Any other cases we can't handle.  */
6530
    return x;
6531
 
6532
  /* If we couldn't do this for some reason, return the original
6533
     expression.  */
6534
  if (GET_CODE (tem) == CLOBBER)
6535
    return x;
6536
 
6537
  return tem;
6538
}
6539
 
6540
/* X is a SET which contains an assignment of one object into
6541
   a part of another (such as a bit-field assignment, STRICT_LOW_PART,
6542
   or certain SUBREGS). If possible, convert it into a series of
6543
   logical operations.
6544
 
6545
   We half-heartedly support variable positions, but do not at all
6546
   support variable lengths.  */
6547
 
6548
static const_rtx
6549
expand_field_assignment (const_rtx x)
6550
{
6551
  rtx inner;
6552
  rtx pos;                      /* Always counts from low bit.  */
6553
  int len;
6554
  rtx mask, cleared, masked;
6555
  enum machine_mode compute_mode;
6556
 
6557
  /* Loop until we find something we can't simplify.  */
6558
  while (1)
6559
    {
6560
      if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
6561
          && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
6562
        {
6563
          inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
6564
          len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
6565
          pos = GEN_INT (subreg_lsb (XEXP (SET_DEST (x), 0)));
6566
        }
6567
      else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
6568
               && CONST_INT_P (XEXP (SET_DEST (x), 1)))
6569
        {
6570
          inner = XEXP (SET_DEST (x), 0);
6571
          len = INTVAL (XEXP (SET_DEST (x), 1));
6572
          pos = XEXP (SET_DEST (x), 2);
6573
 
6574
          /* A constant position should stay within the width of INNER.  */
6575
          if (CONST_INT_P (pos)
6576
              && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
6577
            break;
6578
 
6579
          if (BITS_BIG_ENDIAN)
6580
            {
6581
              if (CONST_INT_P (pos))
6582
                pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
6583
                               - INTVAL (pos));
6584
              else if (GET_CODE (pos) == MINUS
6585
                       && CONST_INT_P (XEXP (pos, 1))
6586
                       && (INTVAL (XEXP (pos, 1))
6587
                           == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
6588
                /* If position is ADJUST - X, new position is X.  */
6589
                pos = XEXP (pos, 0);
6590
              else
6591
                pos = simplify_gen_binary (MINUS, GET_MODE (pos),
6592
                                           GEN_INT (GET_MODE_BITSIZE (
6593
                                                    GET_MODE (inner))
6594
                                                    - len),
6595
                                           pos);
6596
            }
6597
        }
6598
 
6599
      /* A SUBREG between two modes that occupy the same numbers of words
6600
         can be done by moving the SUBREG to the source.  */
6601
      else if (GET_CODE (SET_DEST (x)) == SUBREG
6602
               /* We need SUBREGs to compute nonzero_bits properly.  */
6603
               && nonzero_sign_valid
6604
               && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
6605
                     + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
6606
                   == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
6607
                        + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
6608
        {
6609
          x = gen_rtx_SET (VOIDmode, SUBREG_REG (SET_DEST (x)),
6610
                           gen_lowpart
6611
                           (GET_MODE (SUBREG_REG (SET_DEST (x))),
6612
                            SET_SRC (x)));
6613
          continue;
6614
        }
6615
      else
6616
        break;
6617
 
6618
      while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
6619
        inner = SUBREG_REG (inner);
6620
 
6621
      compute_mode = GET_MODE (inner);
6622
 
6623
      /* Don't attempt bitwise arithmetic on non scalar integer modes.  */
6624
      if (! SCALAR_INT_MODE_P (compute_mode))
6625
        {
6626
          enum machine_mode imode;
6627
 
6628
          /* Don't do anything for vector or complex integral types.  */
6629
          if (! FLOAT_MODE_P (compute_mode))
6630
            break;
6631
 
6632
          /* Try to find an integral mode to pun with.  */
6633
          imode = mode_for_size (GET_MODE_BITSIZE (compute_mode), MODE_INT, 0);
6634
          if (imode == BLKmode)
6635
            break;
6636
 
6637
          compute_mode = imode;
6638
          inner = gen_lowpart (imode, inner);
6639
        }
6640
 
6641
      /* Compute a mask of LEN bits, if we can do this on the host machine.  */
6642
      if (len >= HOST_BITS_PER_WIDE_INT)
6643
        break;
6644
 
6645
      /* Now compute the equivalent expression.  Make a copy of INNER
6646
         for the SET_DEST in case it is a MEM into which we will substitute;
6647
         we don't want shared RTL in that case.  */
6648
      mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
6649
      cleared = simplify_gen_binary (AND, compute_mode,
6650
                                     simplify_gen_unary (NOT, compute_mode,
6651
                                       simplify_gen_binary (ASHIFT,
6652
                                                            compute_mode,
6653
                                                            mask, pos),
6654
                                       compute_mode),
6655
                                     inner);
6656
      masked = simplify_gen_binary (ASHIFT, compute_mode,
6657
                                    simplify_gen_binary (
6658
                                      AND, compute_mode,
6659
                                      gen_lowpart (compute_mode, SET_SRC (x)),
6660
                                      mask),
6661
                                    pos);
6662
 
6663
      x = gen_rtx_SET (VOIDmode, copy_rtx (inner),
6664
                       simplify_gen_binary (IOR, compute_mode,
6665
                                            cleared, masked));
6666
    }
6667
 
6668
  return x;
6669
}
6670
 
6671
/* Return an RTX for a reference to LEN bits of INNER.  If POS_RTX is nonzero,
6672
   it is an RTX that represents a variable starting position; otherwise,
6673
   POS is the (constant) starting bit position (counted from the LSB).
6674
 
6675
   UNSIGNEDP is nonzero for an unsigned reference and zero for a
6676
   signed reference.
6677
 
6678
   IN_DEST is nonzero if this is a reference in the destination of a
6679
   SET.  This is used when a ZERO_ or SIGN_EXTRACT isn't needed.  If nonzero,
6680
   a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
6681
   be used.
6682
 
6683
   IN_COMPARE is nonzero if we are in a COMPARE.  This means that a
6684
   ZERO_EXTRACT should be built even for bits starting at bit 0.
6685
 
6686
   MODE is the desired mode of the result (if IN_DEST == 0).
6687
 
6688
   The result is an RTX for the extraction or NULL_RTX if the target
6689
   can't handle it.  */
6690
 
6691
static rtx
6692
make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos,
6693
                 rtx pos_rtx, unsigned HOST_WIDE_INT len, int unsignedp,
6694
                 int in_dest, int in_compare)
6695
{
6696
  /* This mode describes the size of the storage area
6697
     to fetch the overall value from.  Within that, we
6698
     ignore the POS lowest bits, etc.  */
6699
  enum machine_mode is_mode = GET_MODE (inner);
6700
  enum machine_mode inner_mode;
6701
  enum machine_mode wanted_inner_mode;
6702
  enum machine_mode wanted_inner_reg_mode = word_mode;
6703
  enum machine_mode pos_mode = word_mode;
6704
  enum machine_mode extraction_mode = word_mode;
6705
  enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
6706
  rtx new_rtx = 0;
6707
  rtx orig_pos_rtx = pos_rtx;
6708
  HOST_WIDE_INT orig_pos;
6709
 
6710
  if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
6711
    {
6712
      /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
6713
         consider just the QI as the memory to extract from.
6714
         The subreg adds or removes high bits; its mode is
6715
         irrelevant to the meaning of this extraction,
6716
         since POS and LEN count from the lsb.  */
6717
      if (MEM_P (SUBREG_REG (inner)))
6718
        is_mode = GET_MODE (SUBREG_REG (inner));
6719
      inner = SUBREG_REG (inner);
6720
    }
6721
  else if (GET_CODE (inner) == ASHIFT
6722
           && CONST_INT_P (XEXP (inner, 1))
6723
           && pos_rtx == 0 && pos == 0
6724
           && len > (unsigned HOST_WIDE_INT) INTVAL (XEXP (inner, 1)))
6725
    {
6726
      /* We're extracting the least significant bits of an rtx
6727
         (ashift X (const_int C)), where LEN > C.  Extract the
6728
         least significant (LEN - C) bits of X, giving an rtx
6729
         whose mode is MODE, then shift it left C times.  */
6730
      new_rtx = make_extraction (mode, XEXP (inner, 0),
6731
                             0, 0, len - INTVAL (XEXP (inner, 1)),
6732
                             unsignedp, in_dest, in_compare);
6733
      if (new_rtx != 0)
6734
        return gen_rtx_ASHIFT (mode, new_rtx, XEXP (inner, 1));
6735
    }
6736
 
6737
  inner_mode = GET_MODE (inner);
6738
 
6739
  if (pos_rtx && CONST_INT_P (pos_rtx))
6740
    pos = INTVAL (pos_rtx), pos_rtx = 0;
6741
 
6742
  /* See if this can be done without an extraction.  We never can if the
6743
     width of the field is not the same as that of some integer mode. For
6744
     registers, we can only avoid the extraction if the position is at the
6745
     low-order bit and this is either not in the destination or we have the
6746
     appropriate STRICT_LOW_PART operation available.
6747
 
6748
     For MEM, we can avoid an extract if the field starts on an appropriate
6749
     boundary and we can change the mode of the memory reference.  */
6750
 
6751
  if (tmode != BLKmode
6752
      && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0
6753
           && !MEM_P (inner)
6754
           && (inner_mode == tmode
6755
               || !REG_P (inner)
6756
               || TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (tmode),
6757
                                         GET_MODE_BITSIZE (inner_mode))
6758
               || reg_truncated_to_mode (tmode, inner))
6759
           && (! in_dest
6760
               || (REG_P (inner)
6761
                   && have_insn_for (STRICT_LOW_PART, tmode))))
6762
          || (MEM_P (inner) && pos_rtx == 0
6763
              && (pos
6764
                  % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
6765
                     : BITS_PER_UNIT)) == 0
6766
              /* We can't do this if we are widening INNER_MODE (it
6767
                 may not be aligned, for one thing).  */
6768
              && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
6769
              && (inner_mode == tmode
6770
                  || (! mode_dependent_address_p (XEXP (inner, 0))
6771
                      && ! MEM_VOLATILE_P (inner))))))
6772
    {
6773
      /* If INNER is a MEM, make a new MEM that encompasses just the desired
6774
         field.  If the original and current mode are the same, we need not
6775
         adjust the offset.  Otherwise, we do if bytes big endian.
6776
 
6777
         If INNER is not a MEM, get a piece consisting of just the field
6778
         of interest (in this case POS % BITS_PER_WORD must be 0).  */
6779
 
6780
      if (MEM_P (inner))
6781
        {
6782
          HOST_WIDE_INT offset;
6783
 
6784
          /* POS counts from lsb, but make OFFSET count in memory order.  */
6785
          if (BYTES_BIG_ENDIAN)
6786
            offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
6787
          else
6788
            offset = pos / BITS_PER_UNIT;
6789
 
6790
          new_rtx = adjust_address_nv (inner, tmode, offset);
6791
        }
6792
      else if (REG_P (inner))
6793
        {
6794
          if (tmode != inner_mode)
6795
            {
6796
              /* We can't call gen_lowpart in a DEST since we
6797
                 always want a SUBREG (see below) and it would sometimes
6798
                 return a new hard register.  */
6799
              if (pos || in_dest)
6800
                {
6801
                  HOST_WIDE_INT final_word = pos / BITS_PER_WORD;
6802
 
6803
                  if (WORDS_BIG_ENDIAN
6804
                      && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD)
6805
                    final_word = ((GET_MODE_SIZE (inner_mode)
6806
                                   - GET_MODE_SIZE (tmode))
6807
                                  / UNITS_PER_WORD) - final_word;
6808
 
6809
                  final_word *= UNITS_PER_WORD;
6810
                  if (BYTES_BIG_ENDIAN &&
6811
                      GET_MODE_SIZE (inner_mode) > GET_MODE_SIZE (tmode))
6812
                    final_word += (GET_MODE_SIZE (inner_mode)
6813
                                   - GET_MODE_SIZE (tmode)) % UNITS_PER_WORD;
6814
 
6815
                  /* Avoid creating invalid subregs, for example when
6816
                     simplifying (x>>32)&255.  */
6817
                  if (!validate_subreg (tmode, inner_mode, inner, final_word))
6818
                    return NULL_RTX;
6819
 
6820
                  new_rtx = gen_rtx_SUBREG (tmode, inner, final_word);
6821
                }
6822
              else
6823
                new_rtx = gen_lowpart (tmode, inner);
6824
            }
6825
          else
6826
            new_rtx = inner;
6827
        }
6828
      else
6829
        new_rtx = force_to_mode (inner, tmode,
6830
                             len >= HOST_BITS_PER_WIDE_INT
6831
                             ? ~(unsigned HOST_WIDE_INT) 0
6832
                             : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
6833
                             0);
6834
 
6835
      /* If this extraction is going into the destination of a SET,
6836
         make a STRICT_LOW_PART unless we made a MEM.  */
6837
 
6838
      if (in_dest)
6839
        return (MEM_P (new_rtx) ? new_rtx
6840
                : (GET_CODE (new_rtx) != SUBREG
6841
                   ? gen_rtx_CLOBBER (tmode, const0_rtx)
6842
                   : gen_rtx_STRICT_LOW_PART (VOIDmode, new_rtx)));
6843
 
6844
      if (mode == tmode)
6845
        return new_rtx;
6846
 
6847
      if (CONST_INT_P (new_rtx)
6848
          || GET_CODE (new_rtx) == CONST_DOUBLE)
6849
        return simplify_unary_operation (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
6850
                                         mode, new_rtx, tmode);
6851
 
6852
      /* If we know that no extraneous bits are set, and that the high
6853
         bit is not set, convert the extraction to the cheaper of
6854
         sign and zero extension, that are equivalent in these cases.  */
6855
      if (flag_expensive_optimizations
6856
          && (GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT
6857
              && ((nonzero_bits (new_rtx, tmode)
6858
                   & ~(((unsigned HOST_WIDE_INT)
6859
                        GET_MODE_MASK (tmode))
6860
                       >> 1))
6861
                  == 0)))
6862
        {
6863
          rtx temp = gen_rtx_ZERO_EXTEND (mode, new_rtx);
6864
          rtx temp1 = gen_rtx_SIGN_EXTEND (mode, new_rtx);
6865
 
6866
          /* Prefer ZERO_EXTENSION, since it gives more information to
6867
             backends.  */
6868
          if (rtx_cost (temp, SET, optimize_this_for_speed_p)
6869
              <= rtx_cost (temp1, SET, optimize_this_for_speed_p))
6870
            return temp;
6871
          return temp1;
6872
        }
6873
 
6874
      /* Otherwise, sign- or zero-extend unless we already are in the
6875
         proper mode.  */
6876
 
6877
      return (gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
6878
                             mode, new_rtx));
6879
    }
6880
 
6881
  /* Unless this is a COMPARE or we have a funny memory reference,
6882
     don't do anything with zero-extending field extracts starting at
6883
     the low-order bit since they are simple AND operations.  */
6884
  if (pos_rtx == 0 && pos == 0 && ! in_dest
6885
      && ! in_compare && unsignedp)
6886
    return 0;
6887
 
6888
  /* Unless INNER is not MEM, reject this if we would be spanning bytes or
6889
     if the position is not a constant and the length is not 1.  In all
6890
     other cases, we would only be going outside our object in cases when
6891
     an original shift would have been undefined.  */
6892
  if (MEM_P (inner)
6893
      && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
6894
          || (pos_rtx != 0 && len != 1)))
6895
    return 0;
6896
 
6897
  /* Get the mode to use should INNER not be a MEM, the mode for the position,
6898
     and the mode for the result.  */
6899
  if (in_dest && mode_for_extraction (EP_insv, -1) != MAX_MACHINE_MODE)
6900
    {
6901
      wanted_inner_reg_mode = mode_for_extraction (EP_insv, 0);
6902
      pos_mode = mode_for_extraction (EP_insv, 2);
6903
      extraction_mode = mode_for_extraction (EP_insv, 3);
6904
    }
6905
 
6906
  if (! in_dest && unsignedp
6907
      && mode_for_extraction (EP_extzv, -1) != MAX_MACHINE_MODE)
6908
    {
6909
      wanted_inner_reg_mode = mode_for_extraction (EP_extzv, 1);
6910
      pos_mode = mode_for_extraction (EP_extzv, 3);
6911
      extraction_mode = mode_for_extraction (EP_extzv, 0);
6912
    }
6913
 
6914
  if (! in_dest && ! unsignedp
6915
      && mode_for_extraction (EP_extv, -1) != MAX_MACHINE_MODE)
6916
    {
6917
      wanted_inner_reg_mode = mode_for_extraction (EP_extv, 1);
6918
      pos_mode = mode_for_extraction (EP_extv, 3);
6919
      extraction_mode = mode_for_extraction (EP_extv, 0);
6920
    }
6921
 
6922
  /* Never narrow an object, since that might not be safe.  */
6923
 
6924
  if (mode != VOIDmode
6925
      && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
6926
    extraction_mode = mode;
6927
 
6928
  if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
6929
      && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
6930
    pos_mode = GET_MODE (pos_rtx);
6931
 
6932
  /* If this is not from memory, the desired mode is the preferred mode
6933
     for an extraction pattern's first input operand, or word_mode if there
6934
     is none.  */
6935
  if (!MEM_P (inner))
6936
    wanted_inner_mode = wanted_inner_reg_mode;
6937
  else
6938
    {
6939
      /* Be careful not to go beyond the extracted object and maintain the
6940
         natural alignment of the memory.  */
6941
      wanted_inner_mode = smallest_mode_for_size (len, MODE_INT);
6942
      while (pos % GET_MODE_BITSIZE (wanted_inner_mode) + len
6943
             > GET_MODE_BITSIZE (wanted_inner_mode))
6944
        {
6945
          wanted_inner_mode = GET_MODE_WIDER_MODE (wanted_inner_mode);
6946
          gcc_assert (wanted_inner_mode != VOIDmode);
6947
        }
6948
 
6949
      /* If we have to change the mode of memory and cannot, the desired mode
6950
         is EXTRACTION_MODE.  */
6951
      if (inner_mode != wanted_inner_mode
6952
          && (mode_dependent_address_p (XEXP (inner, 0))
6953
              || MEM_VOLATILE_P (inner)
6954
              || pos_rtx))
6955
        wanted_inner_mode = extraction_mode;
6956
    }
6957
 
6958
  orig_pos = pos;
6959
 
6960
  if (BITS_BIG_ENDIAN)
6961
    {
6962
      /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to
6963
         BITS_BIG_ENDIAN style.  If position is constant, compute new
6964
         position.  Otherwise, build subtraction.
6965
         Note that POS is relative to the mode of the original argument.
6966
         If it's a MEM we need to recompute POS relative to that.
6967
         However, if we're extracting from (or inserting into) a register,
6968
         we want to recompute POS relative to wanted_inner_mode.  */
6969
      int width = (MEM_P (inner)
6970
                   ? GET_MODE_BITSIZE (is_mode)
6971
                   : GET_MODE_BITSIZE (wanted_inner_mode));
6972
 
6973
      if (pos_rtx == 0)
6974
        pos = width - len - pos;
6975
      else
6976
        pos_rtx
6977
          = gen_rtx_MINUS (GET_MODE (pos_rtx), GEN_INT (width - len), pos_rtx);
6978
      /* POS may be less than 0 now, but we check for that below.
6979
         Note that it can only be less than 0 if !MEM_P (inner).  */
6980
    }
6981
 
6982
  /* If INNER has a wider mode, and this is a constant extraction, try to
6983
     make it smaller and adjust the byte to point to the byte containing
6984
     the value.  */
6985
  if (wanted_inner_mode != VOIDmode
6986
      && inner_mode != wanted_inner_mode
6987
      && ! pos_rtx
6988
      && GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode)
6989
      && MEM_P (inner)
6990
      && ! mode_dependent_address_p (XEXP (inner, 0))
6991
      && ! MEM_VOLATILE_P (inner))
6992
    {
6993
      int offset = 0;
6994
 
6995
      /* The computations below will be correct if the machine is big
6996
         endian in both bits and bytes or little endian in bits and bytes.
6997
         If it is mixed, we must adjust.  */
6998
 
6999
      /* If bytes are big endian and we had a paradoxical SUBREG, we must
7000
         adjust OFFSET to compensate.  */
7001
      if (BYTES_BIG_ENDIAN
7002
          && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
7003
        offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
7004
 
7005
      /* We can now move to the desired byte.  */
7006
      offset += (pos / GET_MODE_BITSIZE (wanted_inner_mode))
7007
                * GET_MODE_SIZE (wanted_inner_mode);
7008
      pos %= GET_MODE_BITSIZE (wanted_inner_mode);
7009
 
7010
      if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
7011
          && is_mode != wanted_inner_mode)
7012
        offset = (GET_MODE_SIZE (is_mode)
7013
                  - GET_MODE_SIZE (wanted_inner_mode) - offset);
7014
 
7015
      inner = adjust_address_nv (inner, wanted_inner_mode, offset);
7016
    }
7017
 
7018
  /* If INNER is not memory, get it into the proper mode.  If we are changing
7019
     its mode, POS must be a constant and smaller than the size of the new
7020
     mode.  */
7021
  else if (!MEM_P (inner))
7022
    {
7023
      /* On the LHS, don't create paradoxical subregs implicitely truncating
7024
         the register unless TRULY_NOOP_TRUNCATION.  */
7025
      if (in_dest
7026
          && !TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (inner)),
7027
                                     GET_MODE_BITSIZE (wanted_inner_mode)))
7028
        return NULL_RTX;
7029
 
7030
      if (GET_MODE (inner) != wanted_inner_mode
7031
          && (pos_rtx != 0
7032
              || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode)))
7033
        return NULL_RTX;
7034
 
7035
      if (orig_pos < 0)
7036
        return NULL_RTX;
7037
 
7038
      inner = force_to_mode (inner, wanted_inner_mode,
7039
                             pos_rtx
7040
                             || len + orig_pos >= HOST_BITS_PER_WIDE_INT
7041
                             ? ~(unsigned HOST_WIDE_INT) 0
7042
                             : ((((unsigned HOST_WIDE_INT) 1 << len) - 1)
7043
                                << orig_pos),
7044
                             0);
7045
    }
7046
 
7047
  /* Adjust mode of POS_RTX, if needed.  If we want a wider mode, we
7048
     have to zero extend.  Otherwise, we can just use a SUBREG.  */
7049
  if (pos_rtx != 0
7050
      && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
7051
    {
7052
      rtx temp = gen_rtx_ZERO_EXTEND (pos_mode, pos_rtx);
7053
 
7054
      /* If we know that no extraneous bits are set, and that the high
7055
         bit is not set, convert extraction to cheaper one - either
7056
         SIGN_EXTENSION or ZERO_EXTENSION, that are equivalent in these
7057
         cases.  */
7058
      if (flag_expensive_optimizations
7059
          && (GET_MODE_BITSIZE (GET_MODE (pos_rtx)) <= HOST_BITS_PER_WIDE_INT
7060
              && ((nonzero_bits (pos_rtx, GET_MODE (pos_rtx))
7061
                   & ~(((unsigned HOST_WIDE_INT)
7062
                        GET_MODE_MASK (GET_MODE (pos_rtx)))
7063
                       >> 1))
7064
                  == 0)))
7065
        {
7066
          rtx temp1 = gen_rtx_SIGN_EXTEND (pos_mode, pos_rtx);
7067
 
7068
          /* Prefer ZERO_EXTENSION, since it gives more information to
7069
             backends.  */
7070
          if (rtx_cost (temp1, SET, optimize_this_for_speed_p)
7071
              < rtx_cost (temp, SET, optimize_this_for_speed_p))
7072
            temp = temp1;
7073
        }
7074
      pos_rtx = temp;
7075
    }
7076
  else if (pos_rtx != 0
7077
           && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
7078
    pos_rtx = gen_lowpart (pos_mode, pos_rtx);
7079
 
7080
  /* Make POS_RTX unless we already have it and it is correct.  If we don't
7081
     have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
7082
     be a CONST_INT.  */
7083
  if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
7084
    pos_rtx = orig_pos_rtx;
7085
 
7086
  else if (pos_rtx == 0)
7087
    pos_rtx = GEN_INT (pos);
7088
 
7089
  /* Make the required operation.  See if we can use existing rtx.  */
7090
  new_rtx = gen_rtx_fmt_eee (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
7091
                         extraction_mode, inner, GEN_INT (len), pos_rtx);
7092
  if (! in_dest)
7093
    new_rtx = gen_lowpart (mode, new_rtx);
7094
 
7095
  return new_rtx;
7096
}
7097
 
7098
/* See if X contains an ASHIFT of COUNT or more bits that can be commuted
7099
   with any other operations in X.  Return X without that shift if so.  */
7100
 
7101
static rtx
7102
extract_left_shift (rtx x, int count)
7103
{
7104
  enum rtx_code code = GET_CODE (x);
7105
  enum machine_mode mode = GET_MODE (x);
7106
  rtx tem;
7107
 
7108
  switch (code)
7109
    {
7110
    case ASHIFT:
7111
      /* This is the shift itself.  If it is wide enough, we will return
7112
         either the value being shifted if the shift count is equal to
7113
         COUNT or a shift for the difference.  */
7114
      if (CONST_INT_P (XEXP (x, 1))
7115
          && INTVAL (XEXP (x, 1)) >= count)
7116
        return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
7117
                                     INTVAL (XEXP (x, 1)) - count);
7118
      break;
7119
 
7120
    case NEG:  case NOT:
7121
      if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0)
7122
        return simplify_gen_unary (code, mode, tem, mode);
7123
 
7124
      break;
7125
 
7126
    case PLUS:  case IOR:  case XOR:  case AND:
7127
      /* If we can safely shift this constant and we find the inner shift,
7128
         make a new operation.  */
7129
      if (CONST_INT_P (XEXP (x, 1))
7130
          && (INTVAL (XEXP (x, 1)) & ((((HOST_WIDE_INT) 1 << count)) - 1)) == 0
7131
          && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
7132
        return simplify_gen_binary (code, mode, tem,
7133
                                    GEN_INT (INTVAL (XEXP (x, 1)) >> count));
7134
 
7135
      break;
7136
 
7137
    default:
7138
      break;
7139
    }
7140
 
7141
  return 0;
7142
}
7143
 
7144
/* Look at the expression rooted at X.  Look for expressions
7145
   equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
7146
   Form these expressions.
7147
 
7148
   Return the new rtx, usually just X.
7149
 
7150
   Also, for machines like the VAX that don't have logical shift insns,
7151
   try to convert logical to arithmetic shift operations in cases where
7152
   they are equivalent.  This undoes the canonicalizations to logical
7153
   shifts done elsewhere.
7154
 
7155
   We try, as much as possible, to re-use rtl expressions to save memory.
7156
 
7157
   IN_CODE says what kind of expression we are processing.  Normally, it is
7158
   SET.  In a memory address (inside a MEM, PLUS or minus, the latter two
7159
   being kludges), it is MEM.  When processing the arguments of a comparison
7160
   or a COMPARE against zero, it is COMPARE.  */
7161
 
7162
static rtx
7163
make_compound_operation (rtx x, enum rtx_code in_code)
7164
{
7165
  enum rtx_code code = GET_CODE (x);
7166
  enum machine_mode mode = GET_MODE (x);
7167
  int mode_width = GET_MODE_BITSIZE (mode);
7168
  rtx rhs, lhs;
7169
  enum rtx_code next_code;
7170
  int i, j;
7171
  rtx new_rtx = 0;
7172
  rtx tem;
7173
  const char *fmt;
7174
 
7175
  /* Select the code to be used in recursive calls.  Once we are inside an
7176
     address, we stay there.  If we have a comparison, set to COMPARE,
7177
     but once inside, go back to our default of SET.  */
7178
 
7179
  next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
7180
               : ((code == COMPARE || COMPARISON_P (x))
7181
                  && XEXP (x, 1) == const0_rtx) ? COMPARE
7182
               : in_code == COMPARE ? SET : in_code);
7183
 
7184
  /* Process depending on the code of this operation.  If NEW is set
7185
     nonzero, it will be returned.  */
7186
 
7187
  switch (code)
7188
    {
7189
    case ASHIFT:
7190
      /* Convert shifts by constants into multiplications if inside
7191
         an address.  */
7192
      if (in_code == MEM && CONST_INT_P (XEXP (x, 1))
7193
          && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
7194
          && INTVAL (XEXP (x, 1)) >= 0)
7195
        {
7196
          new_rtx = make_compound_operation (XEXP (x, 0), next_code);
7197
          new_rtx = gen_rtx_MULT (mode, new_rtx,
7198
                              GEN_INT ((HOST_WIDE_INT) 1
7199
                                       << INTVAL (XEXP (x, 1))));
7200
        }
7201
      break;
7202
 
7203
    case AND:
7204
      /* If the second operand is not a constant, we can't do anything
7205
         with it.  */
7206
      if (!CONST_INT_P (XEXP (x, 1)))
7207
        break;
7208
 
7209
      /* If the constant is a power of two minus one and the first operand
7210
         is a logical right shift, make an extraction.  */
7211
      if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
7212
          && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
7213
        {
7214
          new_rtx = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
7215
          new_rtx = make_extraction (mode, new_rtx, 0, XEXP (XEXP (x, 0), 1), i, 1,
7216
                                 0, in_code == COMPARE);
7217
        }
7218
 
7219
      /* Same as previous, but for (subreg (lshiftrt ...)) in first op.  */
7220
      else if (GET_CODE (XEXP (x, 0)) == SUBREG
7221
               && subreg_lowpart_p (XEXP (x, 0))
7222
               && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
7223
               && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
7224
        {
7225
          new_rtx = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
7226
                                         next_code);
7227
          new_rtx = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new_rtx, 0,
7228
                                 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
7229
                                 0, in_code == COMPARE);
7230
        }
7231
      /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)).  */
7232
      else if ((GET_CODE (XEXP (x, 0)) == XOR
7233
                || GET_CODE (XEXP (x, 0)) == IOR)
7234
               && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
7235
               && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
7236
               && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
7237
        {
7238
          /* Apply the distributive law, and then try to make extractions.  */
7239
          new_rtx = gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)), mode,
7240
                                gen_rtx_AND (mode, XEXP (XEXP (x, 0), 0),
7241
                                             XEXP (x, 1)),
7242
                                gen_rtx_AND (mode, XEXP (XEXP (x, 0), 1),
7243
                                             XEXP (x, 1)));
7244
          new_rtx = make_compound_operation (new_rtx, in_code);
7245
        }
7246
 
7247
      /* If we are have (and (rotate X C) M) and C is larger than the number
7248
         of bits in M, this is an extraction.  */
7249
 
7250
      else if (GET_CODE (XEXP (x, 0)) == ROTATE
7251
               && CONST_INT_P (XEXP (XEXP (x, 0), 1))
7252
               && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
7253
               && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
7254
        {
7255
          new_rtx = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
7256
          new_rtx = make_extraction (mode, new_rtx,
7257
                                 (GET_MODE_BITSIZE (mode)
7258
                                  - INTVAL (XEXP (XEXP (x, 0), 1))),
7259
                                 NULL_RTX, i, 1, 0, in_code == COMPARE);
7260
        }
7261
 
7262
      /* On machines without logical shifts, if the operand of the AND is
7263
         a logical shift and our mask turns off all the propagated sign
7264
         bits, we can replace the logical shift with an arithmetic shift.  */
7265
      else if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
7266
               && !have_insn_for (LSHIFTRT, mode)
7267
               && have_insn_for (ASHIFTRT, mode)
7268
               && CONST_INT_P (XEXP (XEXP (x, 0), 1))
7269
               && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
7270
               && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
7271
               && mode_width <= HOST_BITS_PER_WIDE_INT)
7272
        {
7273
          unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
7274
 
7275
          mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
7276
          if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
7277
            SUBST (XEXP (x, 0),
7278
                   gen_rtx_ASHIFTRT (mode,
7279
                                     make_compound_operation
7280
                                     (XEXP (XEXP (x, 0), 0), next_code),
7281
                                     XEXP (XEXP (x, 0), 1)));
7282
        }
7283
 
7284
      /* If the constant is one less than a power of two, this might be
7285
         representable by an extraction even if no shift is present.
7286
         If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
7287
         we are in a COMPARE.  */
7288
      else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
7289
        new_rtx = make_extraction (mode,
7290
                               make_compound_operation (XEXP (x, 0),
7291
                                                        next_code),
7292
                               0, NULL_RTX, i, 1, 0, in_code == COMPARE);
7293
 
7294
      /* If we are in a comparison and this is an AND with a power of two,
7295
         convert this into the appropriate bit extract.  */
7296
      else if (in_code == COMPARE
7297
               && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
7298
        new_rtx = make_extraction (mode,
7299
                               make_compound_operation (XEXP (x, 0),
7300
                                                        next_code),
7301
                               i, NULL_RTX, 1, 1, 0, 1);
7302
 
7303
      break;
7304
 
7305
    case LSHIFTRT:
7306
      /* If the sign bit is known to be zero, replace this with an
7307
         arithmetic shift.  */
7308
      if (have_insn_for (ASHIFTRT, mode)
7309
          && ! have_insn_for (LSHIFTRT, mode)
7310
          && mode_width <= HOST_BITS_PER_WIDE_INT
7311
          && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
7312
        {
7313
          new_rtx = gen_rtx_ASHIFTRT (mode,
7314
                                  make_compound_operation (XEXP (x, 0),
7315
                                                           next_code),
7316
                                  XEXP (x, 1));
7317
          break;
7318
        }
7319
 
7320
      /* ... fall through ...  */
7321
 
7322
    case ASHIFTRT:
7323
      lhs = XEXP (x, 0);
7324
      rhs = XEXP (x, 1);
7325
 
7326
      /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
7327
         this is a SIGN_EXTRACT.  */
7328
      if (CONST_INT_P (rhs)
7329
          && GET_CODE (lhs) == ASHIFT
7330
          && CONST_INT_P (XEXP (lhs, 1))
7331
          && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1))
7332
          && INTVAL (rhs) < mode_width)
7333
        {
7334
          new_rtx = make_compound_operation (XEXP (lhs, 0), next_code);
7335
          new_rtx = make_extraction (mode, new_rtx,
7336
                                 INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
7337
                                 NULL_RTX, mode_width - INTVAL (rhs),
7338
                                 code == LSHIFTRT, 0, in_code == COMPARE);
7339
          break;
7340
        }
7341
 
7342
      /* See if we have operations between an ASHIFTRT and an ASHIFT.
7343
         If so, try to merge the shifts into a SIGN_EXTEND.  We could
7344
         also do this for some cases of SIGN_EXTRACT, but it doesn't
7345
         seem worth the effort; the case checked for occurs on Alpha.  */
7346
 
7347
      if (!OBJECT_P (lhs)
7348
          && ! (GET_CODE (lhs) == SUBREG
7349
                && (OBJECT_P (SUBREG_REG (lhs))))
7350
          && CONST_INT_P (rhs)
7351
          && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
7352
          && INTVAL (rhs) < mode_width
7353
          && (new_rtx = extract_left_shift (lhs, INTVAL (rhs))) != 0)
7354
        new_rtx = make_extraction (mode, make_compound_operation (new_rtx, next_code),
7355
                               0, NULL_RTX, mode_width - INTVAL (rhs),
7356
                               code == LSHIFTRT, 0, in_code == COMPARE);
7357
 
7358
      break;
7359
 
7360
    case SUBREG:
7361
      /* Call ourselves recursively on the inner expression.  If we are
7362
         narrowing the object and it has a different RTL code from
7363
         what it originally did, do this SUBREG as a force_to_mode.  */
7364
 
7365
      tem = make_compound_operation (SUBREG_REG (x), in_code);
7366
 
7367
      {
7368
        rtx simplified = simplify_subreg (mode, tem, GET_MODE (SUBREG_REG (x)),
7369
                                          SUBREG_BYTE (x));
7370
 
7371
        if (simplified)
7372
          tem = simplified;
7373
 
7374
        if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
7375
            && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
7376
            && subreg_lowpart_p (x))
7377
          {
7378
            rtx newer = force_to_mode (tem, mode, ~(HOST_WIDE_INT) 0,
7379
                                       0);
7380
 
7381
            /* If we have something other than a SUBREG, we might have
7382
               done an expansion, so rerun ourselves.  */
7383
            if (GET_CODE (newer) != SUBREG)
7384
              newer = make_compound_operation (newer, in_code);
7385
 
7386
            /* force_to_mode can expand compounds.  If it just re-expanded the
7387
               compound use gen_lowpart instead to convert to the desired
7388
               mode.  */
7389
            if (rtx_equal_p (newer, x))
7390
              return gen_lowpart (GET_MODE (x), tem);
7391
 
7392
            return newer;
7393
          }
7394
 
7395
        if (simplified)
7396
          return tem;
7397
      }
7398
      break;
7399
 
7400
    default:
7401
      break;
7402
    }
7403
 
7404
  if (new_rtx)
7405
    {
7406
      x = gen_lowpart (mode, new_rtx);
7407
      code = GET_CODE (x);
7408
    }
7409
 
7410
  /* Now recursively process each operand of this operation.  */
7411
  fmt = GET_RTX_FORMAT (code);
7412
  for (i = 0; i < GET_RTX_LENGTH (code); i++)
7413
    if (fmt[i] == 'e')
7414
      {
7415
        new_rtx = make_compound_operation (XEXP (x, i), next_code);
7416
        SUBST (XEXP (x, i), new_rtx);
7417
      }
7418
    else if (fmt[i] == 'E')
7419
      for (j = 0; j < XVECLEN (x, i); j++)
7420
        {
7421
          new_rtx = make_compound_operation (XVECEXP (x, i, j), next_code);
7422
          SUBST (XVECEXP (x, i, j), new_rtx);
7423
        }
7424
 
7425
  /* If this is a commutative operation, the changes to the operands
7426
     may have made it noncanonical.  */
7427
  if (COMMUTATIVE_ARITH_P (x)
7428
      && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
7429
    {
7430
      tem = XEXP (x, 0);
7431
      SUBST (XEXP (x, 0), XEXP (x, 1));
7432
      SUBST (XEXP (x, 1), tem);
7433
    }
7434
 
7435
  return x;
7436
}
7437
 
7438
/* Given M see if it is a value that would select a field of bits
7439
   within an item, but not the entire word.  Return -1 if not.
7440
   Otherwise, return the starting position of the field, where 0 is the
7441
   low-order bit.
7442
 
7443
   *PLEN is set to the length of the field.  */
7444
 
7445
static int
7446
get_pos_from_mask (unsigned HOST_WIDE_INT m, unsigned HOST_WIDE_INT *plen)
7447
{
7448
  /* Get the bit number of the first 1 bit from the right, -1 if none.  */
7449
  int pos = exact_log2 (m & -m);
7450
  int len = 0;
7451
 
7452
  if (pos >= 0)
7453
    /* Now shift off the low-order zero bits and see if we have a
7454
       power of two minus 1.  */
7455
    len = exact_log2 ((m >> pos) + 1);
7456
 
7457
  if (len <= 0)
7458
    pos = -1;
7459
 
7460
  *plen = len;
7461
  return pos;
7462
}
7463
 
7464
/* If X refers to a register that equals REG in value, replace these
7465
   references with REG.  */
7466
static rtx
7467
canon_reg_for_combine (rtx x, rtx reg)
7468
{
7469
  rtx op0, op1, op2;
7470
  const char *fmt;
7471
  int i;
7472
  bool copied;
7473
 
7474
  enum rtx_code code = GET_CODE (x);
7475
  switch (GET_RTX_CLASS (code))
7476
    {
7477
    case RTX_UNARY:
7478
      op0 = canon_reg_for_combine (XEXP (x, 0), reg);
7479
      if (op0 != XEXP (x, 0))
7480
        return simplify_gen_unary (GET_CODE (x), GET_MODE (x), op0,
7481
                                   GET_MODE (reg));
7482
      break;
7483
 
7484
    case RTX_BIN_ARITH:
7485
    case RTX_COMM_ARITH:
7486
      op0 = canon_reg_for_combine (XEXP (x, 0), reg);
7487
      op1 = canon_reg_for_combine (XEXP (x, 1), reg);
7488
      if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
7489
        return simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0, op1);
7490
      break;
7491
 
7492
    case RTX_COMPARE:
7493
    case RTX_COMM_COMPARE:
7494
      op0 = canon_reg_for_combine (XEXP (x, 0), reg);
7495
      op1 = canon_reg_for_combine (XEXP (x, 1), reg);
7496
      if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
7497
        return simplify_gen_relational (GET_CODE (x), GET_MODE (x),
7498
                                        GET_MODE (op0), op0, op1);
7499
      break;
7500
 
7501
    case RTX_TERNARY:
7502
    case RTX_BITFIELD_OPS:
7503
      op0 = canon_reg_for_combine (XEXP (x, 0), reg);
7504
      op1 = canon_reg_for_combine (XEXP (x, 1), reg);
7505
      op2 = canon_reg_for_combine (XEXP (x, 2), reg);
7506
      if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1) || op2 != XEXP (x, 2))
7507
        return simplify_gen_ternary (GET_CODE (x), GET_MODE (x),
7508
                                     GET_MODE (op0), op0, op1, op2);
7509
 
7510
    case RTX_OBJ:
7511
      if (REG_P (x))
7512
        {
7513
          if (rtx_equal_p (get_last_value (reg), x)
7514
              || rtx_equal_p (reg, get_last_value (x)))
7515
            return reg;
7516
          else
7517
            break;
7518
        }
7519
 
7520
      /* fall through */
7521
 
7522
    default:
7523
      fmt = GET_RTX_FORMAT (code);
7524
      copied = false;
7525
      for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7526
        if (fmt[i] == 'e')
7527
          {
7528
            rtx op = canon_reg_for_combine (XEXP (x, i), reg);
7529
            if (op != XEXP (x, i))
7530
              {
7531
                if (!copied)
7532
                  {
7533
                    copied = true;
7534
                    x = copy_rtx (x);
7535
                  }
7536
                XEXP (x, i) = op;
7537
              }
7538
          }
7539
        else if (fmt[i] == 'E')
7540
          {
7541
            int j;
7542
            for (j = 0; j < XVECLEN (x, i); j++)
7543
              {
7544
                rtx op = canon_reg_for_combine (XVECEXP (x, i, j), reg);
7545
                if (op != XVECEXP (x, i, j))
7546
                  {
7547
                    if (!copied)
7548
                      {
7549
                        copied = true;
7550
                        x = copy_rtx (x);
7551
                      }
7552
                    XVECEXP (x, i, j) = op;
7553
                  }
7554
              }
7555
          }
7556
 
7557
      break;
7558
    }
7559
 
7560
  return x;
7561
}
7562
 
7563
/* Return X converted to MODE.  If the value is already truncated to
7564
   MODE we can just return a subreg even though in the general case we
7565
   would need an explicit truncation.  */
7566
 
7567
static rtx
7568
gen_lowpart_or_truncate (enum machine_mode mode, rtx x)
7569
{
7570
  if (!CONST_INT_P (x)
7571
      && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x))
7572
      && !TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
7573
                                 GET_MODE_BITSIZE (GET_MODE (x)))
7574
      && !(REG_P (x) && reg_truncated_to_mode (mode, x)))
7575
    {
7576
      /* Bit-cast X into an integer mode.  */
7577
      if (!SCALAR_INT_MODE_P (GET_MODE (x)))
7578
        x = gen_lowpart (int_mode_for_mode (GET_MODE (x)), x);
7579
      x = simplify_gen_unary (TRUNCATE, int_mode_for_mode (mode),
7580
                              x, GET_MODE (x));
7581
    }
7582
 
7583
  return gen_lowpart (mode, x);
7584
}
7585
 
7586
/* See if X can be simplified knowing that we will only refer to it in
7587
   MODE and will only refer to those bits that are nonzero in MASK.
7588
   If other bits are being computed or if masking operations are done
7589
   that select a superset of the bits in MASK, they can sometimes be
7590
   ignored.
7591
 
7592
   Return a possibly simplified expression, but always convert X to
7593
   MODE.  If X is a CONST_INT, AND the CONST_INT with MASK.
7594
 
7595
   If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
7596
   are all off in X.  This is used when X will be complemented, by either
7597
   NOT, NEG, or XOR.  */
7598
 
7599
static rtx
7600
force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask,
7601
               int just_select)
7602
{
7603
  enum rtx_code code = GET_CODE (x);
7604
  int next_select = just_select || code == XOR || code == NOT || code == NEG;
7605
  enum machine_mode op_mode;
7606
  unsigned HOST_WIDE_INT fuller_mask, nonzero;
7607
  rtx op0, op1, temp;
7608
 
7609
  /* If this is a CALL or ASM_OPERANDS, don't do anything.  Some of the
7610
     code below will do the wrong thing since the mode of such an
7611
     expression is VOIDmode.
7612
 
7613
     Also do nothing if X is a CLOBBER; this can happen if X was
7614
     the return value from a call to gen_lowpart.  */
7615
  if (code == CALL || code == ASM_OPERANDS || code == CLOBBER)
7616
    return x;
7617
 
7618
  /* We want to perform the operation is its present mode unless we know
7619
     that the operation is valid in MODE, in which case we do the operation
7620
     in MODE.  */
7621
  op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
7622
              && have_insn_for (code, mode))
7623
             ? mode : GET_MODE (x));
7624
 
7625
  /* It is not valid to do a right-shift in a narrower mode
7626
     than the one it came in with.  */
7627
  if ((code == LSHIFTRT || code == ASHIFTRT)
7628
      && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x)))
7629
    op_mode = GET_MODE (x);
7630
 
7631
  /* Truncate MASK to fit OP_MODE.  */
7632
  if (op_mode)
7633
    mask &= GET_MODE_MASK (op_mode);
7634
 
7635
  /* When we have an arithmetic operation, or a shift whose count we
7636
     do not know, we need to assume that all bits up to the highest-order
7637
     bit in MASK will be needed.  This is how we form such a mask.  */
7638
  if (mask & ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)))
7639
    fuller_mask = ~(unsigned HOST_WIDE_INT) 0;
7640
  else
7641
    fuller_mask = (((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1))
7642
                   - 1);
7643
 
7644
  /* Determine what bits of X are guaranteed to be (non)zero.  */
7645
  nonzero = nonzero_bits (x, mode);
7646
 
7647
  /* If none of the bits in X are needed, return a zero.  */
7648
  if (!just_select && (nonzero & mask) == 0 && !side_effects_p (x))
7649
    x = const0_rtx;
7650
 
7651
  /* If X is a CONST_INT, return a new one.  Do this here since the
7652
     test below will fail.  */
7653
  if (CONST_INT_P (x))
7654
    {
7655
      if (SCALAR_INT_MODE_P (mode))
7656
        return gen_int_mode (INTVAL (x) & mask, mode);
7657
      else
7658
        {
7659
          x = GEN_INT (INTVAL (x) & mask);
7660
          return gen_lowpart_common (mode, x);
7661
        }
7662
    }
7663
 
7664
  /* If X is narrower than MODE and we want all the bits in X's mode, just
7665
     get X in the proper mode.  */
7666
  if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
7667
      && (GET_MODE_MASK (GET_MODE (x)) & ~mask) == 0)
7668
    return gen_lowpart (mode, x);
7669
 
7670
  /* We can ignore the effect of a SUBREG if it narrows the mode or
7671
     if the constant masks to zero all the bits the mode doesn't have.  */
7672
  if (GET_CODE (x) == SUBREG
7673
      && subreg_lowpart_p (x)
7674
      && ((GET_MODE_SIZE (GET_MODE (x))
7675
           < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
7676
          || (0 == (mask
7677
                    & GET_MODE_MASK (GET_MODE (x))
7678
                    & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
7679
    return force_to_mode (SUBREG_REG (x), mode, mask, next_select);
7680
 
7681
  /* The arithmetic simplifications here only work for scalar integer modes.  */
7682
  if (!SCALAR_INT_MODE_P (mode) || !SCALAR_INT_MODE_P (GET_MODE (x)))
7683
    return gen_lowpart_or_truncate (mode, x);
7684
 
7685
  switch (code)
7686
    {
7687
    case CLOBBER:
7688
      /* If X is a (clobber (const_int)), return it since we know we are
7689
         generating something that won't match.  */
7690
      return x;
7691
 
7692
    case SIGN_EXTEND:
7693
    case ZERO_EXTEND:
7694
    case ZERO_EXTRACT:
7695
    case SIGN_EXTRACT:
7696
      x = expand_compound_operation (x);
7697
      if (GET_CODE (x) != code)
7698
        return force_to_mode (x, mode, mask, next_select);
7699
      break;
7700
 
7701
    case TRUNCATE:
7702
      /* Similarly for a truncate.  */
7703
      return force_to_mode (XEXP (x, 0), mode, mask, next_select);
7704
 
7705
    case AND:
7706
      /* If this is an AND with a constant, convert it into an AND
7707
         whose constant is the AND of that constant with MASK.  If it
7708
         remains an AND of MASK, delete it since it is redundant.  */
7709
 
7710
      if (CONST_INT_P (XEXP (x, 1)))
7711
        {
7712
          x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
7713
                                      mask & INTVAL (XEXP (x, 1)));
7714
 
7715
          /* If X is still an AND, see if it is an AND with a mask that
7716
             is just some low-order bits.  If so, and it is MASK, we don't
7717
             need it.  */
7718
 
7719
          if (GET_CODE (x) == AND && CONST_INT_P (XEXP (x, 1))
7720
              && ((INTVAL (XEXP (x, 1)) & GET_MODE_MASK (GET_MODE (x)))
7721
                  == mask))
7722
            x = XEXP (x, 0);
7723
 
7724
          /* If it remains an AND, try making another AND with the bits
7725
             in the mode mask that aren't in MASK turned on.  If the
7726
             constant in the AND is wide enough, this might make a
7727
             cheaper constant.  */
7728
 
7729
          if (GET_CODE (x) == AND && CONST_INT_P (XEXP (x, 1))
7730
              && GET_MODE_MASK (GET_MODE (x)) != mask
7731
              && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
7732
            {
7733
              HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1))
7734
                                    | (GET_MODE_MASK (GET_MODE (x)) & ~mask));
7735
              int width = GET_MODE_BITSIZE (GET_MODE (x));
7736
              rtx y;
7737
 
7738
              /* If MODE is narrower than HOST_WIDE_INT and CVAL is a negative
7739
                 number, sign extend it.  */
7740
              if (width > 0 && width < HOST_BITS_PER_WIDE_INT
7741
                  && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
7742
                cval |= (HOST_WIDE_INT) -1 << width;
7743
 
7744
              y = simplify_gen_binary (AND, GET_MODE (x),
7745
                                       XEXP (x, 0), GEN_INT (cval));
7746
              if (rtx_cost (y, SET, optimize_this_for_speed_p)
7747
                  < rtx_cost (x, SET, optimize_this_for_speed_p))
7748
                x = y;
7749
            }
7750
 
7751
          break;
7752
        }
7753
 
7754
      goto binop;
7755
 
7756
    case PLUS:
7757
      /* In (and (plus FOO C1) M), if M is a mask that just turns off
7758
         low-order bits (as in an alignment operation) and FOO is already
7759
         aligned to that boundary, mask C1 to that boundary as well.
7760
         This may eliminate that PLUS and, later, the AND.  */
7761
 
7762
      {
7763
        unsigned int width = GET_MODE_BITSIZE (mode);
7764
        unsigned HOST_WIDE_INT smask = mask;
7765
 
7766
        /* If MODE is narrower than HOST_WIDE_INT and mask is a negative
7767
           number, sign extend it.  */
7768
 
7769
        if (width < HOST_BITS_PER_WIDE_INT
7770
            && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
7771
          smask |= (HOST_WIDE_INT) -1 << width;
7772
 
7773
        if (CONST_INT_P (XEXP (x, 1))
7774
            && exact_log2 (- smask) >= 0
7775
            && (nonzero_bits (XEXP (x, 0), mode) & ~smask) == 0
7776
            && (INTVAL (XEXP (x, 1)) & ~smask) != 0)
7777
          return force_to_mode (plus_constant (XEXP (x, 0),
7778
                                               (INTVAL (XEXP (x, 1)) & smask)),
7779
                                mode, smask, next_select);
7780
      }
7781
 
7782
      /* ... fall through ...  */
7783
 
7784
    case MULT:
7785
      /* For PLUS, MINUS and MULT, we need any bits less significant than the
7786
         most significant bit in MASK since carries from those bits will
7787
         affect the bits we are interested in.  */
7788
      mask = fuller_mask;
7789
      goto binop;
7790
 
7791
    case MINUS:
7792
      /* If X is (minus C Y) where C's least set bit is larger than any bit
7793
         in the mask, then we may replace with (neg Y).  */
7794
      if (CONST_INT_P (XEXP (x, 0))
7795
          && (((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 0))
7796
                                        & -INTVAL (XEXP (x, 0))))
7797
              > mask))
7798
        {
7799
          x = simplify_gen_unary (NEG, GET_MODE (x), XEXP (x, 1),
7800
                                  GET_MODE (x));
7801
          return force_to_mode (x, mode, mask, next_select);
7802
        }
7803
 
7804
      /* Similarly, if C contains every bit in the fuller_mask, then we may
7805
         replace with (not Y).  */
7806
      if (CONST_INT_P (XEXP (x, 0))
7807
          && ((INTVAL (XEXP (x, 0)) | (HOST_WIDE_INT) fuller_mask)
7808
              == INTVAL (XEXP (x, 0))))
7809
        {
7810
          x = simplify_gen_unary (NOT, GET_MODE (x),
7811
                                  XEXP (x, 1), GET_MODE (x));
7812
          return force_to_mode (x, mode, mask, next_select);
7813
        }
7814
 
7815
      mask = fuller_mask;
7816
      goto binop;
7817
 
7818
    case IOR:
7819
    case XOR:
7820
      /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
7821
         LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
7822
         operation which may be a bitfield extraction.  Ensure that the
7823
         constant we form is not wider than the mode of X.  */
7824
 
7825
      if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
7826
          && CONST_INT_P (XEXP (XEXP (x, 0), 1))
7827
          && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
7828
          && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
7829
          && CONST_INT_P (XEXP (x, 1))
7830
          && ((INTVAL (XEXP (XEXP (x, 0), 1))
7831
               + floor_log2 (INTVAL (XEXP (x, 1))))
7832
              < GET_MODE_BITSIZE (GET_MODE (x)))
7833
          && (INTVAL (XEXP (x, 1))
7834
              & ~nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0)
7835
        {
7836
          temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
7837
                          << INTVAL (XEXP (XEXP (x, 0), 1)));
7838
          temp = simplify_gen_binary (GET_CODE (x), GET_MODE (x),
7839
                                      XEXP (XEXP (x, 0), 0), temp);
7840
          x = simplify_gen_binary (LSHIFTRT, GET_MODE (x), temp,
7841
                                   XEXP (XEXP (x, 0), 1));
7842
          return force_to_mode (x, mode, mask, next_select);
7843
        }
7844
 
7845
    binop:
7846
      /* For most binary operations, just propagate into the operation and
7847
         change the mode if we have an operation of that mode.  */
7848
 
7849
      op0 = force_to_mode (XEXP (x, 0), mode, mask, next_select);
7850
      op1 = force_to_mode (XEXP (x, 1), mode, mask, next_select);
7851
 
7852
      /* If we ended up truncating both operands, truncate the result of the
7853
         operation instead.  */
7854
      if (GET_CODE (op0) == TRUNCATE
7855
          && GET_CODE (op1) == TRUNCATE)
7856
        {
7857
          op0 = XEXP (op0, 0);
7858
          op1 = XEXP (op1, 0);
7859
        }
7860
 
7861
      op0 = gen_lowpart_or_truncate (op_mode, op0);
7862
      op1 = gen_lowpart_or_truncate (op_mode, op1);
7863
 
7864
      if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
7865
        x = simplify_gen_binary (code, op_mode, op0, op1);
7866
      break;
7867
 
7868
    case ASHIFT:
7869
      /* For left shifts, do the same, but just for the first operand.
7870
         However, we cannot do anything with shifts where we cannot
7871
         guarantee that the counts are smaller than the size of the mode
7872
         because such a count will have a different meaning in a
7873
         wider mode.  */
7874
 
7875
      if (! (CONST_INT_P (XEXP (x, 1))
7876
             && INTVAL (XEXP (x, 1)) >= 0
7877
             && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
7878
          && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
7879
                && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
7880
                    < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
7881
        break;
7882
 
7883
      /* If the shift count is a constant and we can do arithmetic in
7884
         the mode of the shift, refine which bits we need.  Otherwise, use the
7885
         conservative form of the mask.  */
7886
      if (CONST_INT_P (XEXP (x, 1))
7887
          && INTVAL (XEXP (x, 1)) >= 0
7888
          && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
7889
          && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
7890
        mask >>= INTVAL (XEXP (x, 1));
7891
      else
7892
        mask = fuller_mask;
7893
 
7894
      op0 = gen_lowpart_or_truncate (op_mode,
7895
                                     force_to_mode (XEXP (x, 0), op_mode,
7896
                                                    mask, next_select));
7897
 
7898
      if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
7899
        x = simplify_gen_binary (code, op_mode, op0, XEXP (x, 1));
7900
      break;
7901
 
7902
    case LSHIFTRT:
7903
      /* Here we can only do something if the shift count is a constant,
7904
         this shift constant is valid for the host, and we can do arithmetic
7905
         in OP_MODE.  */
7906
 
7907
      if (CONST_INT_P (XEXP (x, 1))
7908
          && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
7909
          && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
7910
        {
7911
          rtx inner = XEXP (x, 0);
7912
          unsigned HOST_WIDE_INT inner_mask;
7913
 
7914
          /* Select the mask of the bits we need for the shift operand.  */
7915
          inner_mask = mask << INTVAL (XEXP (x, 1));
7916
 
7917
          /* We can only change the mode of the shift if we can do arithmetic
7918
             in the mode of the shift and INNER_MASK is no wider than the
7919
             width of X's mode.  */
7920
          if ((inner_mask & ~GET_MODE_MASK (GET_MODE (x))) != 0)
7921
            op_mode = GET_MODE (x);
7922
 
7923
          inner = force_to_mode (inner, op_mode, inner_mask, next_select);
7924
 
7925
          if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
7926
            x = simplify_gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
7927
        }
7928
 
7929
      /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
7930
         shift and AND produces only copies of the sign bit (C2 is one less
7931
         than a power of two), we can do this with just a shift.  */
7932
 
7933
      if (GET_CODE (x) == LSHIFTRT
7934
          && CONST_INT_P (XEXP (x, 1))
7935
          /* The shift puts one of the sign bit copies in the least significant
7936
             bit.  */
7937
          && ((INTVAL (XEXP (x, 1))
7938
               + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
7939
              >= GET_MODE_BITSIZE (GET_MODE (x)))
7940
          && exact_log2 (mask + 1) >= 0
7941
          /* Number of bits left after the shift must be more than the mask
7942
             needs.  */
7943
          && ((INTVAL (XEXP (x, 1)) + exact_log2 (mask + 1))
7944
              <= GET_MODE_BITSIZE (GET_MODE (x)))
7945
          /* Must be more sign bit copies than the mask needs.  */
7946
          && ((int) num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
7947
              >= exact_log2 (mask + 1)))
7948
        x = simplify_gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
7949
                                 GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
7950
                                          - exact_log2 (mask + 1)));
7951
 
7952
      goto shiftrt;
7953
 
7954
    case ASHIFTRT:
7955
      /* If we are just looking for the sign bit, we don't need this shift at
7956
         all, even if it has a variable count.  */
7957
      if (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
7958
          && (mask == ((unsigned HOST_WIDE_INT) 1
7959
                       << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
7960
        return force_to_mode (XEXP (x, 0), mode, mask, next_select);
7961
 
7962
      /* If this is a shift by a constant, get a mask that contains those bits
7963
         that are not copies of the sign bit.  We then have two cases:  If
7964
         MASK only includes those bits, this can be a logical shift, which may
7965
         allow simplifications.  If MASK is a single-bit field not within
7966
         those bits, we are requesting a copy of the sign bit and hence can
7967
         shift the sign bit to the appropriate location.  */
7968
 
7969
      if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) >= 0
7970
          && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
7971
        {
7972
          int i;
7973
 
7974
          /* If the considered data is wider than HOST_WIDE_INT, we can't
7975
             represent a mask for all its bits in a single scalar.
7976
             But we only care about the lower bits, so calculate these.  */
7977
 
7978
          if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT)
7979
            {
7980
              nonzero = ~(HOST_WIDE_INT) 0;
7981
 
7982
              /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7983
                 is the number of bits a full-width mask would have set.
7984
                 We need only shift if these are fewer than nonzero can
7985
                 hold.  If not, we must keep all bits set in nonzero.  */
7986
 
7987
              if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7988
                  < HOST_BITS_PER_WIDE_INT)
7989
                nonzero >>= INTVAL (XEXP (x, 1))
7990
                            + HOST_BITS_PER_WIDE_INT
7991
                            - GET_MODE_BITSIZE (GET_MODE (x)) ;
7992
            }
7993
          else
7994
            {
7995
              nonzero = GET_MODE_MASK (GET_MODE (x));
7996
              nonzero >>= INTVAL (XEXP (x, 1));
7997
            }
7998
 
7999
          if ((mask & ~nonzero) == 0)
8000
            {
8001
              x = simplify_shift_const (NULL_RTX, LSHIFTRT, GET_MODE (x),
8002
                                        XEXP (x, 0), INTVAL (XEXP (x, 1)));
8003
              if (GET_CODE (x) != ASHIFTRT)
8004
                return force_to_mode (x, mode, mask, next_select);
8005
            }
8006
 
8007
          else if ((i = exact_log2 (mask)) >= 0)
8008
            {
8009
              x = simplify_shift_const
8010
                  (NULL_RTX, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
8011
                   GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
8012
 
8013
              if (GET_CODE (x) != ASHIFTRT)
8014
                return force_to_mode (x, mode, mask, next_select);
8015
            }
8016
        }
8017
 
8018
      /* If MASK is 1, convert this to an LSHIFTRT.  This can be done
8019
         even if the shift count isn't a constant.  */
8020
      if (mask == 1)
8021
        x = simplify_gen_binary (LSHIFTRT, GET_MODE (x),
8022
                                 XEXP (x, 0), XEXP (x, 1));
8023
 
8024
    shiftrt:
8025
 
8026
      /* If this is a zero- or sign-extension operation that just affects bits
8027
         we don't care about, remove it.  Be sure the call above returned
8028
         something that is still a shift.  */
8029
 
8030
      if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
8031
          && CONST_INT_P (XEXP (x, 1))
8032
          && INTVAL (XEXP (x, 1)) >= 0
8033
          && (INTVAL (XEXP (x, 1))
8034
              <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
8035
          && GET_CODE (XEXP (x, 0)) == ASHIFT
8036
          && XEXP (XEXP (x, 0), 1) == XEXP (x, 1))
8037
        return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
8038
                              next_select);
8039
 
8040
      break;
8041
 
8042
    case ROTATE:
8043
    case ROTATERT:
8044
      /* If the shift count is constant and we can do computations
8045
         in the mode of X, compute where the bits we care about are.
8046
         Otherwise, we can't do anything.  Don't change the mode of
8047
         the shift or propagate MODE into the shift, though.  */
8048
      if (CONST_INT_P (XEXP (x, 1))
8049
          && INTVAL (XEXP (x, 1)) >= 0)
8050
        {
8051
          temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
8052
                                            GET_MODE (x), GEN_INT (mask),
8053
                                            XEXP (x, 1));
8054
          if (temp && CONST_INT_P (temp))
8055
            SUBST (XEXP (x, 0),
8056
                   force_to_mode (XEXP (x, 0), GET_MODE (x),
8057
                                  INTVAL (temp), next_select));
8058
        }
8059
      break;
8060
 
8061
    case NEG:
8062
      /* If we just want the low-order bit, the NEG isn't needed since it
8063
         won't change the low-order bit.  */
8064
      if (mask == 1)
8065
        return force_to_mode (XEXP (x, 0), mode, mask, just_select);
8066
 
8067
      /* We need any bits less significant than the most significant bit in
8068
         MASK since carries from those bits will affect the bits we are
8069
         interested in.  */
8070
      mask = fuller_mask;
8071
      goto unop;
8072
 
8073
    case NOT:
8074
      /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
8075
         same as the XOR case above.  Ensure that the constant we form is not
8076
         wider than the mode of X.  */
8077
 
8078
      if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
8079
          && CONST_INT_P (XEXP (XEXP (x, 0), 1))
8080
          && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
8081
          && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
8082
              < GET_MODE_BITSIZE (GET_MODE (x)))
8083
          && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
8084
        {
8085
          temp = gen_int_mode (mask << INTVAL (XEXP (XEXP (x, 0), 1)),
8086
                               GET_MODE (x));
8087
          temp = simplify_gen_binary (XOR, GET_MODE (x),
8088
                                      XEXP (XEXP (x, 0), 0), temp);
8089
          x = simplify_gen_binary (LSHIFTRT, GET_MODE (x),
8090
                                   temp, XEXP (XEXP (x, 0), 1));
8091
 
8092
          return force_to_mode (x, mode, mask, next_select);
8093
        }
8094
 
8095
      /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must
8096
         use the full mask inside the NOT.  */
8097
      mask = fuller_mask;
8098
 
8099
    unop:
8100
      op0 = gen_lowpart_or_truncate (op_mode,
8101
                                     force_to_mode (XEXP (x, 0), mode, mask,
8102
                                                    next_select));
8103
      if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
8104
        x = simplify_gen_unary (code, op_mode, op0, op_mode);
8105
      break;
8106
 
8107
    case NE:
8108
      /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
8109
         in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
8110
         which is equal to STORE_FLAG_VALUE.  */
8111
      if ((mask & ~STORE_FLAG_VALUE) == 0 && XEXP (x, 1) == const0_rtx
8112
          && GET_MODE (XEXP (x, 0)) == mode
8113
          && exact_log2 (nonzero_bits (XEXP (x, 0), mode)) >= 0
8114
          && (nonzero_bits (XEXP (x, 0), mode)
8115
              == (unsigned HOST_WIDE_INT) STORE_FLAG_VALUE))
8116
        return force_to_mode (XEXP (x, 0), mode, mask, next_select);
8117
 
8118
      break;
8119
 
8120
    case IF_THEN_ELSE:
8121
      /* We have no way of knowing if the IF_THEN_ELSE can itself be
8122
         written in a narrower mode.  We play it safe and do not do so.  */
8123
 
8124
      SUBST (XEXP (x, 1),
8125
             gen_lowpart_or_truncate (GET_MODE (x),
8126
                                      force_to_mode (XEXP (x, 1), mode,
8127
                                                     mask, next_select)));
8128
      SUBST (XEXP (x, 2),
8129
             gen_lowpart_or_truncate (GET_MODE (x),
8130
                                      force_to_mode (XEXP (x, 2), mode,
8131
                                                     mask, next_select)));
8132
      break;
8133
 
8134
    default:
8135
      break;
8136
    }
8137
 
8138
  /* Ensure we return a value of the proper mode.  */
8139
  return gen_lowpart_or_truncate (mode, x);
8140
}
8141
 
8142
/* Return nonzero if X is an expression that has one of two values depending on
8143
   whether some other value is zero or nonzero.  In that case, we return the
8144
   value that is being tested, *PTRUE is set to the value if the rtx being
8145
   returned has a nonzero value, and *PFALSE is set to the other alternative.
8146
 
8147
   If we return zero, we set *PTRUE and *PFALSE to X.  */
8148
 
8149
static rtx
8150
if_then_else_cond (rtx x, rtx *ptrue, rtx *pfalse)
8151
{
8152
  enum machine_mode mode = GET_MODE (x);
8153
  enum rtx_code code = GET_CODE (x);
8154
  rtx cond0, cond1, true0, true1, false0, false1;
8155
  unsigned HOST_WIDE_INT nz;
8156
 
8157
  /* If we are comparing a value against zero, we are done.  */
8158
  if ((code == NE || code == EQ)
8159
      && XEXP (x, 1) == const0_rtx)
8160
    {
8161
      *ptrue = (code == NE) ? const_true_rtx : const0_rtx;
8162
      *pfalse = (code == NE) ? const0_rtx : const_true_rtx;
8163
      return XEXP (x, 0);
8164
    }
8165
 
8166
  /* If this is a unary operation whose operand has one of two values, apply
8167
     our opcode to compute those values.  */
8168
  else if (UNARY_P (x)
8169
           && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
8170
    {
8171
      *ptrue = simplify_gen_unary (code, mode, true0, GET_MODE (XEXP (x, 0)));
8172
      *pfalse = simplify_gen_unary (code, mode, false0,
8173
                                    GET_MODE (XEXP (x, 0)));
8174
      return cond0;
8175
    }
8176
 
8177
  /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
8178
     make can't possibly match and would suppress other optimizations.  */
8179
  else if (code == COMPARE)
8180
    ;
8181
 
8182
  /* If this is a binary operation, see if either side has only one of two
8183
     values.  If either one does or if both do and they are conditional on
8184
     the same value, compute the new true and false values.  */
8185
  else if (BINARY_P (x))
8186
    {
8187
      cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
8188
      cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
8189
 
8190
      if ((cond0 != 0 || cond1 != 0)
8191
          && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
8192
        {
8193
          /* If if_then_else_cond returned zero, then true/false are the
8194
             same rtl.  We must copy one of them to prevent invalid rtl
8195
             sharing.  */
8196
          if (cond0 == 0)
8197
            true0 = copy_rtx (true0);
8198
          else if (cond1 == 0)
8199
            true1 = copy_rtx (true1);
8200
 
8201
          if (COMPARISON_P (x))
8202
            {
8203
              *ptrue = simplify_gen_relational (code, mode, VOIDmode,
8204
                                                true0, true1);
8205
              *pfalse = simplify_gen_relational (code, mode, VOIDmode,
8206
                                                 false0, false1);
8207
             }
8208
          else
8209
            {
8210
              *ptrue = simplify_gen_binary (code, mode, true0, true1);
8211
              *pfalse = simplify_gen_binary (code, mode, false0, false1);
8212
            }
8213
 
8214
          return cond0 ? cond0 : cond1;
8215
        }
8216
 
8217
      /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
8218
         operands is zero when the other is nonzero, and vice-versa,
8219
         and STORE_FLAG_VALUE is 1 or -1.  */
8220
 
8221
      if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8222
          && (code == PLUS || code == IOR || code == XOR || code == MINUS
8223
              || code == UMAX)
8224
          && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
8225
        {
8226
          rtx op0 = XEXP (XEXP (x, 0), 1);
8227
          rtx op1 = XEXP (XEXP (x, 1), 1);
8228
 
8229
          cond0 = XEXP (XEXP (x, 0), 0);
8230
          cond1 = XEXP (XEXP (x, 1), 0);
8231
 
8232
          if (COMPARISON_P (cond0)
8233
              && COMPARISON_P (cond1)
8234
              && ((GET_CODE (cond0) == reversed_comparison_code (cond1, NULL)
8235
                   && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
8236
                   && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
8237
                  || ((swap_condition (GET_CODE (cond0))
8238
                       == reversed_comparison_code (cond1, NULL))
8239
                      && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
8240
                      && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
8241
              && ! side_effects_p (x))
8242
            {
8243
              *ptrue = simplify_gen_binary (MULT, mode, op0, const_true_rtx);
8244
              *pfalse = simplify_gen_binary (MULT, mode,
8245
                                             (code == MINUS
8246
                                              ? simplify_gen_unary (NEG, mode,
8247
                                                                    op1, mode)
8248
                                              : op1),
8249
                                              const_true_rtx);
8250
              return cond0;
8251
            }
8252
        }
8253
 
8254
      /* Similarly for MULT, AND and UMIN, except that for these the result
8255
         is always zero.  */
8256
      if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8257
          && (code == MULT || code == AND || code == UMIN)
8258
          && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
8259
        {
8260
          cond0 = XEXP (XEXP (x, 0), 0);
8261
          cond1 = XEXP (XEXP (x, 1), 0);
8262
 
8263
          if (COMPARISON_P (cond0)
8264
              && COMPARISON_P (cond1)
8265
              && ((GET_CODE (cond0) == reversed_comparison_code (cond1, NULL)
8266
                   && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
8267
                   && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
8268
                  || ((swap_condition (GET_CODE (cond0))
8269
                       == reversed_comparison_code (cond1, NULL))
8270
                      && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
8271
                      && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
8272
              && ! side_effects_p (x))
8273
            {
8274
              *ptrue = *pfalse = const0_rtx;
8275
              return cond0;
8276
            }
8277
        }
8278
    }
8279
 
8280
  else if (code == IF_THEN_ELSE)
8281
    {
8282
      /* If we have IF_THEN_ELSE already, extract the condition and
8283
         canonicalize it if it is NE or EQ.  */
8284
      cond0 = XEXP (x, 0);
8285
      *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
8286
      if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
8287
        return XEXP (cond0, 0);
8288
      else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
8289
        {
8290
          *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
8291
          return XEXP (cond0, 0);
8292
        }
8293
      else
8294
        return cond0;
8295
    }
8296
 
8297
  /* If X is a SUBREG, we can narrow both the true and false values
8298
     if the inner expression, if there is a condition.  */
8299
  else if (code == SUBREG
8300
           && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
8301
                                               &true0, &false0)))
8302
    {
8303
      true0 = simplify_gen_subreg (mode, true0,
8304
                                   GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
8305
      false0 = simplify_gen_subreg (mode, false0,
8306
                                    GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
8307
      if (true0 && false0)
8308
        {
8309
          *ptrue = true0;
8310
          *pfalse = false0;
8311
          return cond0;
8312
        }
8313
    }
8314
 
8315
  /* If X is a constant, this isn't special and will cause confusions
8316
     if we treat it as such.  Likewise if it is equivalent to a constant.  */
8317
  else if (CONSTANT_P (x)
8318
           || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
8319
    ;
8320
 
8321
  /* If we're in BImode, canonicalize on 0 and STORE_FLAG_VALUE, as that
8322
     will be least confusing to the rest of the compiler.  */
8323
  else if (mode == BImode)
8324
    {
8325
      *ptrue = GEN_INT (STORE_FLAG_VALUE), *pfalse = const0_rtx;
8326
      return x;
8327
    }
8328
 
8329
  /* If X is known to be either 0 or -1, those are the true and
8330
     false values when testing X.  */
8331
  else if (x == constm1_rtx || x == const0_rtx
8332
           || (mode != VOIDmode
8333
               && num_sign_bit_copies (x, mode) == GET_MODE_BITSIZE (mode)))
8334
    {
8335
      *ptrue = constm1_rtx, *pfalse = const0_rtx;
8336
      return x;
8337
    }
8338
 
8339
  /* Likewise for 0 or a single bit.  */
8340
  else if (SCALAR_INT_MODE_P (mode)
8341
           && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8342
           && exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
8343
    {
8344
      *ptrue = gen_int_mode (nz, mode), *pfalse = const0_rtx;
8345
      return x;
8346
    }
8347
 
8348
  /* Otherwise fail; show no condition with true and false values the same.  */
8349
  *ptrue = *pfalse = x;
8350
  return 0;
8351
}
8352
 
8353
/* Return the value of expression X given the fact that condition COND
8354
   is known to be true when applied to REG as its first operand and VAL
8355
   as its second.  X is known to not be shared and so can be modified in
8356
   place.
8357
 
8358
   We only handle the simplest cases, and specifically those cases that
8359
   arise with IF_THEN_ELSE expressions.  */
8360
 
8361
static rtx
8362
known_cond (rtx x, enum rtx_code cond, rtx reg, rtx val)
8363
{
8364
  enum rtx_code code = GET_CODE (x);
8365
  rtx temp;
8366
  const char *fmt;
8367
  int i, j;
8368
 
8369
  if (side_effects_p (x))
8370
    return x;
8371
 
8372
  /* If either operand of the condition is a floating point value,
8373
     then we have to avoid collapsing an EQ comparison.  */
8374
  if (cond == EQ
8375
      && rtx_equal_p (x, reg)
8376
      && ! FLOAT_MODE_P (GET_MODE (x))
8377
      && ! FLOAT_MODE_P (GET_MODE (val)))
8378
    return val;
8379
 
8380
  if (cond == UNEQ && rtx_equal_p (x, reg))
8381
    return val;
8382
 
8383
  /* If X is (abs REG) and we know something about REG's relationship
8384
     with zero, we may be able to simplify this.  */
8385
 
8386
  if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
8387
    switch (cond)
8388
      {
8389
      case GE:  case GT:  case EQ:
8390
        return XEXP (x, 0);
8391
      case LT:  case LE:
8392
        return simplify_gen_unary (NEG, GET_MODE (XEXP (x, 0)),
8393
                                   XEXP (x, 0),
8394
                                   GET_MODE (XEXP (x, 0)));
8395
      default:
8396
        break;
8397
      }
8398
 
8399
  /* The only other cases we handle are MIN, MAX, and comparisons if the
8400
     operands are the same as REG and VAL.  */
8401
 
8402
  else if (COMPARISON_P (x) || COMMUTATIVE_ARITH_P (x))
8403
    {
8404
      if (rtx_equal_p (XEXP (x, 0), val))
8405
        cond = swap_condition (cond), temp = val, val = reg, reg = temp;
8406
 
8407
      if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
8408
        {
8409
          if (COMPARISON_P (x))
8410
            {
8411
              if (comparison_dominates_p (cond, code))
8412
                return const_true_rtx;
8413
 
8414
              code = reversed_comparison_code (x, NULL);
8415
              if (code != UNKNOWN
8416
                  && comparison_dominates_p (cond, code))
8417
                return const0_rtx;
8418
              else
8419
                return x;
8420
            }
8421
          else if (code == SMAX || code == SMIN
8422
                   || code == UMIN || code == UMAX)
8423
            {
8424
              int unsignedp = (code == UMIN || code == UMAX);
8425
 
8426
              /* Do not reverse the condition when it is NE or EQ.
8427
                 This is because we cannot conclude anything about
8428
                 the value of 'SMAX (x, y)' when x is not equal to y,
8429
                 but we can when x equals y.  */
8430
              if ((code == SMAX || code == UMAX)
8431
                  && ! (cond == EQ || cond == NE))
8432
                cond = reverse_condition (cond);
8433
 
8434
              switch (cond)
8435
                {
8436
                case GE:   case GT:
8437
                  return unsignedp ? x : XEXP (x, 1);
8438
                case LE:   case LT:
8439
                  return unsignedp ? x : XEXP (x, 0);
8440
                case GEU:  case GTU:
8441
                  return unsignedp ? XEXP (x, 1) : x;
8442
                case LEU:  case LTU:
8443
                  return unsignedp ? XEXP (x, 0) : x;
8444
                default:
8445
                  break;
8446
                }
8447
            }
8448
        }
8449
    }
8450
  else if (code == SUBREG)
8451
    {
8452
      enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
8453
      rtx new_rtx, r = known_cond (SUBREG_REG (x), cond, reg, val);
8454
 
8455
      if (SUBREG_REG (x) != r)
8456
        {
8457
          /* We must simplify subreg here, before we lose track of the
8458
             original inner_mode.  */
8459
          new_rtx = simplify_subreg (GET_MODE (x), r,
8460
                                 inner_mode, SUBREG_BYTE (x));
8461
          if (new_rtx)
8462
            return new_rtx;
8463
          else
8464
            SUBST (SUBREG_REG (x), r);
8465
        }
8466
 
8467
      return x;
8468
    }
8469
  /* We don't have to handle SIGN_EXTEND here, because even in the
8470
     case of replacing something with a modeless CONST_INT, a
8471
     CONST_INT is already (supposed to be) a valid sign extension for
8472
     its narrower mode, which implies it's already properly
8473
     sign-extended for the wider mode.  Now, for ZERO_EXTEND, the
8474
     story is different.  */
8475
  else if (code == ZERO_EXTEND)
8476
    {
8477
      enum machine_mode inner_mode = GET_MODE (XEXP (x, 0));
8478
      rtx new_rtx, r = known_cond (XEXP (x, 0), cond, reg, val);
8479
 
8480
      if (XEXP (x, 0) != r)
8481
        {
8482
          /* We must simplify the zero_extend here, before we lose
8483
             track of the original inner_mode.  */
8484
          new_rtx = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
8485
                                          r, inner_mode);
8486
          if (new_rtx)
8487
            return new_rtx;
8488
          else
8489
            SUBST (XEXP (x, 0), r);
8490
        }
8491
 
8492
      return x;
8493
    }
8494
 
8495
  fmt = GET_RTX_FORMAT (code);
8496
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8497
    {
8498
      if (fmt[i] == 'e')
8499
        SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
8500
      else if (fmt[i] == 'E')
8501
        for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8502
          SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
8503
                                                cond, reg, val));
8504
    }
8505
 
8506
  return x;
8507
}
8508
 
8509
/* See if X and Y are equal for the purposes of seeing if we can rewrite an
8510
   assignment as a field assignment.  */
8511
 
8512
static int
8513
rtx_equal_for_field_assignment_p (rtx x, rtx y)
8514
{
8515
  if (x == y || rtx_equal_p (x, y))
8516
    return 1;
8517
 
8518
  if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y))
8519
    return 0;
8520
 
8521
  /* Check for a paradoxical SUBREG of a MEM compared with the MEM.
8522
     Note that all SUBREGs of MEM are paradoxical; otherwise they
8523
     would have been rewritten.  */
8524
  if (MEM_P (x) && GET_CODE (y) == SUBREG
8525
      && MEM_P (SUBREG_REG (y))
8526
      && rtx_equal_p (SUBREG_REG (y),
8527
                      gen_lowpart (GET_MODE (SUBREG_REG (y)), x)))
8528
    return 1;
8529
 
8530
  if (MEM_P (y) && GET_CODE (x) == SUBREG
8531
      && MEM_P (SUBREG_REG (x))
8532
      && rtx_equal_p (SUBREG_REG (x),
8533
                      gen_lowpart (GET_MODE (SUBREG_REG (x)), y)))
8534
    return 1;
8535
 
8536
  /* We used to see if get_last_value of X and Y were the same but that's
8537
     not correct.  In one direction, we'll cause the assignment to have
8538
     the wrong destination and in the case, we'll import a register into this
8539
     insn that might have already have been dead.   So fail if none of the
8540
     above cases are true.  */
8541
  return 0;
8542
}
8543
 
8544
/* See if X, a SET operation, can be rewritten as a bit-field assignment.
8545
   Return that assignment if so.
8546
 
8547
   We only handle the most common cases.  */
8548
 
8549
static rtx
8550
make_field_assignment (rtx x)
8551
{
8552
  rtx dest = SET_DEST (x);
8553
  rtx src = SET_SRC (x);
8554
  rtx assign;
8555
  rtx rhs, lhs;
8556
  HOST_WIDE_INT c1;
8557
  HOST_WIDE_INT pos;
8558
  unsigned HOST_WIDE_INT len;
8559
  rtx other;
8560
  enum machine_mode mode;
8561
 
8562
  /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
8563
     a clear of a one-bit field.  We will have changed it to
8564
     (and (rotate (const_int -2) POS) DEST), so check for that.  Also check
8565
     for a SUBREG.  */
8566
 
8567
  if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
8568
      && CONST_INT_P (XEXP (XEXP (src, 0), 0))
8569
      && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
8570
      && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
8571
    {
8572
      assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
8573
                                1, 1, 1, 0);
8574
      if (assign != 0)
8575
        return gen_rtx_SET (VOIDmode, assign, const0_rtx);
8576
      return x;
8577
    }
8578
 
8579
  if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
8580
      && subreg_lowpart_p (XEXP (src, 0))
8581
      && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
8582
          < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
8583
      && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
8584
      && CONST_INT_P (XEXP (SUBREG_REG (XEXP (src, 0)), 0))
8585
      && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
8586
      && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
8587
    {
8588
      assign = make_extraction (VOIDmode, dest, 0,
8589
                                XEXP (SUBREG_REG (XEXP (src, 0)), 1),
8590
                                1, 1, 1, 0);
8591
      if (assign != 0)
8592
        return gen_rtx_SET (VOIDmode, assign, const0_rtx);
8593
      return x;
8594
    }
8595
 
8596
  /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
8597
     one-bit field.  */
8598
  if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
8599
      && XEXP (XEXP (src, 0), 0) == const1_rtx
8600
      && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
8601
    {
8602
      assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
8603
                                1, 1, 1, 0);
8604
      if (assign != 0)
8605
        return gen_rtx_SET (VOIDmode, assign, const1_rtx);
8606
      return x;
8607
    }
8608
 
8609
  /* If DEST is already a field assignment, i.e. ZERO_EXTRACT, and the
8610
     SRC is an AND with all bits of that field set, then we can discard
8611
     the AND.  */
8612
  if (GET_CODE (dest) == ZERO_EXTRACT
8613
      && CONST_INT_P (XEXP (dest, 1))
8614
      && GET_CODE (src) == AND
8615
      && CONST_INT_P (XEXP (src, 1)))
8616
    {
8617
      HOST_WIDE_INT width = INTVAL (XEXP (dest, 1));
8618
      unsigned HOST_WIDE_INT and_mask = INTVAL (XEXP (src, 1));
8619
      unsigned HOST_WIDE_INT ze_mask;
8620
 
8621
      if (width >= HOST_BITS_PER_WIDE_INT)
8622
        ze_mask = -1;
8623
      else
8624
        ze_mask = ((unsigned HOST_WIDE_INT)1 << width) - 1;
8625
 
8626
      /* Complete overlap.  We can remove the source AND.  */
8627
      if ((and_mask & ze_mask) == ze_mask)
8628
        return gen_rtx_SET (VOIDmode, dest, XEXP (src, 0));
8629
 
8630
      /* Partial overlap.  We can reduce the source AND.  */
8631
      if ((and_mask & ze_mask) != and_mask)
8632
        {
8633
          mode = GET_MODE (src);
8634
          src = gen_rtx_AND (mode, XEXP (src, 0),
8635
                             gen_int_mode (and_mask & ze_mask, mode));
8636
          return gen_rtx_SET (VOIDmode, dest, src);
8637
        }
8638
    }
8639
 
8640
  /* The other case we handle is assignments into a constant-position
8641
     field.  They look like (ior/xor (and DEST C1) OTHER).  If C1 represents
8642
     a mask that has all one bits except for a group of zero bits and
8643
     OTHER is known to have zeros where C1 has ones, this is such an
8644
     assignment.  Compute the position and length from C1.  Shift OTHER
8645
     to the appropriate position, force it to the required mode, and
8646
     make the extraction.  Check for the AND in both operands.  */
8647
 
8648
  if (GET_CODE (src) != IOR && GET_CODE (src) != XOR)
8649
    return x;
8650
 
8651
  rhs = expand_compound_operation (XEXP (src, 0));
8652
  lhs = expand_compound_operation (XEXP (src, 1));
8653
 
8654
  if (GET_CODE (rhs) == AND
8655
      && CONST_INT_P (XEXP (rhs, 1))
8656
      && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest))
8657
    c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
8658
  else if (GET_CODE (lhs) == AND
8659
           && CONST_INT_P (XEXP (lhs, 1))
8660
           && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest))
8661
    c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
8662
  else
8663
    return x;
8664
 
8665
  pos = get_pos_from_mask ((~c1) & GET_MODE_MASK (GET_MODE (dest)), &len);
8666
  if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
8667
      || GET_MODE_BITSIZE (GET_MODE (dest)) > HOST_BITS_PER_WIDE_INT
8668
      || (c1 & nonzero_bits (other, GET_MODE (dest))) != 0)
8669
    return x;
8670
 
8671
  assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
8672
  if (assign == 0)
8673
    return x;
8674
 
8675
  /* The mode to use for the source is the mode of the assignment, or of
8676
     what is inside a possible STRICT_LOW_PART.  */
8677
  mode = (GET_CODE (assign) == STRICT_LOW_PART
8678
          ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
8679
 
8680
  /* Shift OTHER right POS places and make it the source, restricting it
8681
     to the proper length and mode.  */
8682
 
8683
  src = canon_reg_for_combine (simplify_shift_const (NULL_RTX, LSHIFTRT,
8684
                                                     GET_MODE (src),
8685
                                                     other, pos),
8686
                               dest);
8687
  src = force_to_mode (src, mode,
8688
                       GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
8689
                       ? ~(unsigned HOST_WIDE_INT) 0
8690
                       : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
8691
                       0);
8692
 
8693
  /* If SRC is masked by an AND that does not make a difference in
8694
     the value being stored, strip it.  */
8695
  if (GET_CODE (assign) == ZERO_EXTRACT
8696
      && CONST_INT_P (XEXP (assign, 1))
8697
      && INTVAL (XEXP (assign, 1)) < HOST_BITS_PER_WIDE_INT
8698
      && GET_CODE (src) == AND
8699
      && CONST_INT_P (XEXP (src, 1))
8700
      && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (src, 1))
8701
          == ((unsigned HOST_WIDE_INT) 1 << INTVAL (XEXP (assign, 1))) - 1))
8702
    src = XEXP (src, 0);
8703
 
8704
  return gen_rtx_SET (VOIDmode, assign, src);
8705
}
8706
 
8707
/* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
8708
   if so.  */
8709
 
8710
static rtx
8711
apply_distributive_law (rtx x)
8712
{
8713
  enum rtx_code code = GET_CODE (x);
8714
  enum rtx_code inner_code;
8715
  rtx lhs, rhs, other;
8716
  rtx tem;
8717
 
8718
  /* Distributivity is not true for floating point as it can change the
8719
     value.  So we don't do it unless -funsafe-math-optimizations.  */
8720
  if (FLOAT_MODE_P (GET_MODE (x))
8721
      && ! flag_unsafe_math_optimizations)
8722
    return x;
8723
 
8724
  /* The outer operation can only be one of the following:  */
8725
  if (code != IOR && code != AND && code != XOR
8726
      && code != PLUS && code != MINUS)
8727
    return x;
8728
 
8729
  lhs = XEXP (x, 0);
8730
  rhs = XEXP (x, 1);
8731
 
8732
  /* If either operand is a primitive we can't do anything, so get out
8733
     fast.  */
8734
  if (OBJECT_P (lhs) || OBJECT_P (rhs))
8735
    return x;
8736
 
8737
  lhs = expand_compound_operation (lhs);
8738
  rhs = expand_compound_operation (rhs);
8739
  inner_code = GET_CODE (lhs);
8740
  if (inner_code != GET_CODE (rhs))
8741
    return x;
8742
 
8743
  /* See if the inner and outer operations distribute.  */
8744
  switch (inner_code)
8745
    {
8746
    case LSHIFTRT:
8747
    case ASHIFTRT:
8748
    case AND:
8749
    case IOR:
8750
      /* These all distribute except over PLUS.  */
8751
      if (code == PLUS || code == MINUS)
8752
        return x;
8753
      break;
8754
 
8755
    case MULT:
8756
      if (code != PLUS && code != MINUS)
8757
        return x;
8758
      break;
8759
 
8760
    case ASHIFT:
8761
      /* This is also a multiply, so it distributes over everything.  */
8762
      break;
8763
 
8764
    case SUBREG:
8765
      /* Non-paradoxical SUBREGs distributes over all operations,
8766
         provided the inner modes and byte offsets are the same, this
8767
         is an extraction of a low-order part, we don't convert an fp
8768
         operation to int or vice versa, this is not a vector mode,
8769
         and we would not be converting a single-word operation into a
8770
         multi-word operation.  The latter test is not required, but
8771
         it prevents generating unneeded multi-word operations.  Some
8772
         of the previous tests are redundant given the latter test,
8773
         but are retained because they are required for correctness.
8774
 
8775
         We produce the result slightly differently in this case.  */
8776
 
8777
      if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
8778
          || SUBREG_BYTE (lhs) != SUBREG_BYTE (rhs)
8779
          || ! subreg_lowpart_p (lhs)
8780
          || (GET_MODE_CLASS (GET_MODE (lhs))
8781
              != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
8782
          || (GET_MODE_SIZE (GET_MODE (lhs))
8783
              > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
8784
          || VECTOR_MODE_P (GET_MODE (lhs))
8785
          || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD
8786
          /* Result might need to be truncated.  Don't change mode if
8787
             explicit truncation is needed.  */
8788
          || !TRULY_NOOP_TRUNCATION
8789
               (GET_MODE_BITSIZE (GET_MODE (x)),
8790
                GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (lhs)))))
8791
        return x;
8792
 
8793
      tem = simplify_gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
8794
                                 SUBREG_REG (lhs), SUBREG_REG (rhs));
8795
      return gen_lowpart (GET_MODE (x), tem);
8796
 
8797
    default:
8798
      return x;
8799
    }
8800
 
8801
  /* Set LHS and RHS to the inner operands (A and B in the example
8802
     above) and set OTHER to the common operand (C in the example).
8803
     There is only one way to do this unless the inner operation is
8804
     commutative.  */
8805
  if (COMMUTATIVE_ARITH_P (lhs)
8806
      && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
8807
    other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
8808
  else if (COMMUTATIVE_ARITH_P (lhs)
8809
           && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
8810
    other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
8811
  else if (COMMUTATIVE_ARITH_P (lhs)
8812
           && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
8813
    other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
8814
  else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
8815
    other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
8816
  else
8817
    return x;
8818
 
8819
  /* Form the new inner operation, seeing if it simplifies first.  */
8820
  tem = simplify_gen_binary (code, GET_MODE (x), lhs, rhs);
8821
 
8822
  /* There is one exception to the general way of distributing:
8823
     (a | c) ^ (b | c) -> (a ^ b) & ~c  */
8824
  if (code == XOR && inner_code == IOR)
8825
    {
8826
      inner_code = AND;
8827
      other = simplify_gen_unary (NOT, GET_MODE (x), other, GET_MODE (x));
8828
    }
8829
 
8830
  /* We may be able to continuing distributing the result, so call
8831
     ourselves recursively on the inner operation before forming the
8832
     outer operation, which we return.  */
8833
  return simplify_gen_binary (inner_code, GET_MODE (x),
8834
                              apply_distributive_law (tem), other);
8835
}
8836
 
8837
/* See if X is of the form (* (+ A B) C), and if so convert to
8838
   (+ (* A C) (* B C)) and try to simplify.
8839
 
8840
   Most of the time, this results in no change.  However, if some of
8841
   the operands are the same or inverses of each other, simplifications
8842
   will result.
8843
 
8844
   For example, (and (ior A B) (not B)) can occur as the result of
8845
   expanding a bit field assignment.  When we apply the distributive
8846
   law to this, we get (ior (and (A (not B))) (and (B (not B)))),
8847
   which then simplifies to (and (A (not B))).
8848
 
8849
   Note that no checks happen on the validity of applying the inverse
8850
   distributive law.  This is pointless since we can do it in the
8851
   few places where this routine is called.
8852
 
8853
   N is the index of the term that is decomposed (the arithmetic operation,
8854
   i.e. (+ A B) in the first example above).  !N is the index of the term that
8855
   is distributed, i.e. of C in the first example above.  */
8856
static rtx
8857
distribute_and_simplify_rtx (rtx x, int n)
8858
{
8859
  enum machine_mode mode;
8860
  enum rtx_code outer_code, inner_code;
8861
  rtx decomposed, distributed, inner_op0, inner_op1, new_op0, new_op1, tmp;
8862
 
8863
  /* Distributivity is not true for floating point as it can change the
8864
     value.  So we don't do it unless -funsafe-math-optimizations.  */
8865
  if (FLOAT_MODE_P (GET_MODE (x))
8866
      && ! flag_unsafe_math_optimizations)
8867
    return NULL_RTX;
8868
 
8869
  decomposed = XEXP (x, n);
8870
  if (!ARITHMETIC_P (decomposed))
8871
    return NULL_RTX;
8872
 
8873
  mode = GET_MODE (x);
8874
  outer_code = GET_CODE (x);
8875
  distributed = XEXP (x, !n);
8876
 
8877
  inner_code = GET_CODE (decomposed);
8878
  inner_op0 = XEXP (decomposed, 0);
8879
  inner_op1 = XEXP (decomposed, 1);
8880
 
8881
  /* Special case (and (xor B C) (not A)), which is equivalent to
8882
     (xor (ior A B) (ior A C))  */
8883
  if (outer_code == AND && inner_code == XOR && GET_CODE (distributed) == NOT)
8884
    {
8885
      distributed = XEXP (distributed, 0);
8886
      outer_code = IOR;
8887
    }
8888
 
8889
  if (n == 0)
8890
    {
8891
      /* Distribute the second term.  */
8892
      new_op0 = simplify_gen_binary (outer_code, mode, inner_op0, distributed);
8893
      new_op1 = simplify_gen_binary (outer_code, mode, inner_op1, distributed);
8894
    }
8895
  else
8896
    {
8897
      /* Distribute the first term.  */
8898
      new_op0 = simplify_gen_binary (outer_code, mode, distributed, inner_op0);
8899
      new_op1 = simplify_gen_binary (outer_code, mode, distributed, inner_op1);
8900
    }
8901
 
8902
  tmp = apply_distributive_law (simplify_gen_binary (inner_code, mode,
8903
                                                     new_op0, new_op1));
8904
  if (GET_CODE (tmp) != outer_code
8905
      && rtx_cost (tmp, SET, optimize_this_for_speed_p)
8906
         < rtx_cost (x, SET, optimize_this_for_speed_p))
8907
    return tmp;
8908
 
8909
  return NULL_RTX;
8910
}
8911
 
8912
/* Simplify a logical `and' of VAROP with the constant CONSTOP, to be done
8913
   in MODE.  Return an equivalent form, if different from (and VAROP
8914
   (const_int CONSTOP)).  Otherwise, return NULL_RTX.  */
8915
 
8916
static rtx
8917
simplify_and_const_int_1 (enum machine_mode mode, rtx varop,
8918
                          unsigned HOST_WIDE_INT constop)
8919
{
8920
  unsigned HOST_WIDE_INT nonzero;
8921
  unsigned HOST_WIDE_INT orig_constop;
8922
  rtx orig_varop;
8923
  int i;
8924
 
8925
  orig_varop = varop;
8926
  orig_constop = constop;
8927
  if (GET_CODE (varop) == CLOBBER)
8928
    return NULL_RTX;
8929
 
8930
  /* Simplify VAROP knowing that we will be only looking at some of the
8931
     bits in it.
8932
 
8933
     Note by passing in CONSTOP, we guarantee that the bits not set in
8934
     CONSTOP are not significant and will never be examined.  We must
8935
     ensure that is the case by explicitly masking out those bits
8936
     before returning.  */
8937
  varop = force_to_mode (varop, mode, constop, 0);
8938
 
8939
  /* If VAROP is a CLOBBER, we will fail so return it.  */
8940
  if (GET_CODE (varop) == CLOBBER)
8941
    return varop;
8942
 
8943
  /* If VAROP is a CONST_INT, then we need to apply the mask in CONSTOP
8944
     to VAROP and return the new constant.  */
8945
  if (CONST_INT_P (varop))
8946
    return gen_int_mode (INTVAL (varop) & constop, mode);
8947
 
8948
  /* See what bits may be nonzero in VAROP.  Unlike the general case of
8949
     a call to nonzero_bits, here we don't care about bits outside
8950
     MODE.  */
8951
 
8952
  nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
8953
 
8954
  /* Turn off all bits in the constant that are known to already be zero.
8955
     Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
8956
     which is tested below.  */
8957
 
8958
  constop &= nonzero;
8959
 
8960
  /* If we don't have any bits left, return zero.  */
8961
  if (constop == 0)
8962
    return const0_rtx;
8963
 
8964
  /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
8965
     a power of two, we can replace this with an ASHIFT.  */
8966
  if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
8967
      && (i = exact_log2 (constop)) >= 0)
8968
    return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
8969
 
8970
  /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
8971
     or XOR, then try to apply the distributive law.  This may eliminate
8972
     operations if either branch can be simplified because of the AND.
8973
     It may also make some cases more complex, but those cases probably
8974
     won't match a pattern either with or without this.  */
8975
 
8976
  if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
8977
    return
8978
      gen_lowpart
8979
        (mode,
8980
         apply_distributive_law
8981
         (simplify_gen_binary (GET_CODE (varop), GET_MODE (varop),
8982
                               simplify_and_const_int (NULL_RTX,
8983
                                                       GET_MODE (varop),
8984
                                                       XEXP (varop, 0),
8985
                                                       constop),
8986
                               simplify_and_const_int (NULL_RTX,
8987
                                                       GET_MODE (varop),
8988
                                                       XEXP (varop, 1),
8989
                                                       constop))));
8990
 
8991
  /* If VAROP is PLUS, and the constant is a mask of low bits, distribute
8992
     the AND and see if one of the operands simplifies to zero.  If so, we
8993
     may eliminate it.  */
8994
 
8995
  if (GET_CODE (varop) == PLUS
8996
      && exact_log2 (constop + 1) >= 0)
8997
    {
8998
      rtx o0, o1;
8999
 
9000
      o0 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 0), constop);
9001
      o1 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 1), constop);
9002
      if (o0 == const0_rtx)
9003
        return o1;
9004
      if (o1 == const0_rtx)
9005
        return o0;
9006
    }
9007
 
9008
  /* Make a SUBREG if necessary.  If we can't make it, fail.  */
9009
  varop = gen_lowpart (mode, varop);
9010
  if (varop == NULL_RTX || GET_CODE (varop) == CLOBBER)
9011
    return NULL_RTX;
9012
 
9013
  /* If we are only masking insignificant bits, return VAROP.  */
9014
  if (constop == nonzero)
9015
    return varop;
9016
 
9017
  if (varop == orig_varop && constop == orig_constop)
9018
    return NULL_RTX;
9019
 
9020
  /* Otherwise, return an AND.  */
9021
  return simplify_gen_binary (AND, mode, varop, gen_int_mode (constop, mode));
9022
}
9023
 
9024
 
9025
/* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
9026
   in MODE.
9027
 
9028
   Return an equivalent form, if different from X.  Otherwise, return X.  If
9029
   X is zero, we are to always construct the equivalent form.  */
9030
 
9031
static rtx
9032
simplify_and_const_int (rtx x, enum machine_mode mode, rtx varop,
9033
                        unsigned HOST_WIDE_INT constop)
9034
{
9035
  rtx tem = simplify_and_const_int_1 (mode, varop, constop);
9036
  if (tem)
9037
    return tem;
9038
 
9039
  if (!x)
9040
    x = simplify_gen_binary (AND, GET_MODE (varop), varop,
9041
                             gen_int_mode (constop, mode));
9042
  if (GET_MODE (x) != mode)
9043
    x = gen_lowpart (mode, x);
9044
  return x;
9045
}
9046
 
9047
/* Given a REG, X, compute which bits in X can be nonzero.
9048
   We don't care about bits outside of those defined in MODE.
9049
 
9050
   For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
9051
   a shift, AND, or zero_extract, we can do better.  */
9052
 
9053
static rtx
9054
reg_nonzero_bits_for_combine (const_rtx x, enum machine_mode mode,
9055
                              const_rtx known_x ATTRIBUTE_UNUSED,
9056
                              enum machine_mode known_mode ATTRIBUTE_UNUSED,
9057
                              unsigned HOST_WIDE_INT known_ret ATTRIBUTE_UNUSED,
9058
                              unsigned HOST_WIDE_INT *nonzero)
9059
{
9060
  rtx tem;
9061
  reg_stat_type *rsp;
9062
 
9063
  /* If X is a register whose nonzero bits value is current, use it.
9064
     Otherwise, if X is a register whose value we can find, use that
9065
     value.  Otherwise, use the previously-computed global nonzero bits
9066
     for this register.  */
9067
 
9068
  rsp = VEC_index (reg_stat_type, reg_stat, REGNO (x));
9069
  if (rsp->last_set_value != 0
9070
      && (rsp->last_set_mode == mode
9071
          || (GET_MODE_CLASS (rsp->last_set_mode) == MODE_INT
9072
              && GET_MODE_CLASS (mode) == MODE_INT))
9073
      && ((rsp->last_set_label >= label_tick_ebb_start
9074
           && rsp->last_set_label < label_tick)
9075
          || (rsp->last_set_label == label_tick
9076
              && DF_INSN_LUID (rsp->last_set) < subst_low_luid)
9077
          || (REGNO (x) >= FIRST_PSEUDO_REGISTER
9078
              && REG_N_SETS (REGNO (x)) == 1
9079
              && !REGNO_REG_SET_P
9080
                  (DF_LR_IN (ENTRY_BLOCK_PTR->next_bb), REGNO (x)))))
9081
    {
9082
      *nonzero &= rsp->last_set_nonzero_bits;
9083
      return NULL;
9084
    }
9085
 
9086
  tem = get_last_value (x);
9087
 
9088
  if (tem)
9089
    {
9090
#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
9091
      /* If X is narrower than MODE and TEM is a non-negative
9092
         constant that would appear negative in the mode of X,
9093
         sign-extend it for use in reg_nonzero_bits because some
9094
         machines (maybe most) will actually do the sign-extension
9095
         and this is the conservative approach.
9096
 
9097
         ??? For 2.5, try to tighten up the MD files in this regard
9098
         instead of this kludge.  */
9099
 
9100
      if (GET_MODE_BITSIZE (GET_MODE (x)) < GET_MODE_BITSIZE (mode)
9101
          && CONST_INT_P (tem)
9102
          && INTVAL (tem) > 0
9103
          && 0 != (INTVAL (tem)
9104
                   & ((HOST_WIDE_INT) 1
9105
                      << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
9106
        tem = GEN_INT (INTVAL (tem)
9107
                       | ((HOST_WIDE_INT) (-1)
9108
                          << GET_MODE_BITSIZE (GET_MODE (x))));
9109
#endif
9110
      return tem;
9111
    }
9112
  else if (nonzero_sign_valid && rsp->nonzero_bits)
9113
    {
9114
      unsigned HOST_WIDE_INT mask = rsp->nonzero_bits;
9115
 
9116
      if (GET_MODE_BITSIZE (GET_MODE (x)) < GET_MODE_BITSIZE (mode))
9117
        /* We don't know anything about the upper bits.  */
9118
        mask |= GET_MODE_MASK (mode) ^ GET_MODE_MASK (GET_MODE (x));
9119
      *nonzero &= mask;
9120
    }
9121
 
9122
  return NULL;
9123
}
9124
 
9125
/* Return the number of bits at the high-order end of X that are known to
9126
   be equal to the sign bit.  X will be used in mode MODE; if MODE is
9127
   VOIDmode, X will be used in its own mode.  The returned value  will always
9128
   be between 1 and the number of bits in MODE.  */
9129
 
9130
static rtx
9131
reg_num_sign_bit_copies_for_combine (const_rtx x, enum machine_mode mode,
9132
                                     const_rtx known_x ATTRIBUTE_UNUSED,
9133
                                     enum machine_mode known_mode
9134
                                     ATTRIBUTE_UNUSED,
9135
                                     unsigned int known_ret ATTRIBUTE_UNUSED,
9136
                                     unsigned int *result)
9137
{
9138
  rtx tem;
9139
  reg_stat_type *rsp;
9140
 
9141
  rsp = VEC_index (reg_stat_type, reg_stat, REGNO (x));
9142
  if (rsp->last_set_value != 0
9143
      && rsp->last_set_mode == mode
9144
      && ((rsp->last_set_label >= label_tick_ebb_start
9145
           && rsp->last_set_label < label_tick)
9146
          || (rsp->last_set_label == label_tick
9147
              && DF_INSN_LUID (rsp->last_set) < subst_low_luid)
9148
          || (REGNO (x) >= FIRST_PSEUDO_REGISTER
9149
              && REG_N_SETS (REGNO (x)) == 1
9150
              && !REGNO_REG_SET_P
9151
                  (DF_LR_IN (ENTRY_BLOCK_PTR->next_bb), REGNO (x)))))
9152
    {
9153
      *result = rsp->last_set_sign_bit_copies;
9154
      return NULL;
9155
    }
9156
 
9157
  tem = get_last_value (x);
9158
  if (tem != 0)
9159
    return tem;
9160
 
9161
  if (nonzero_sign_valid && rsp->sign_bit_copies != 0
9162
      && GET_MODE_BITSIZE (GET_MODE (x)) == GET_MODE_BITSIZE (mode))
9163
    *result = rsp->sign_bit_copies;
9164
 
9165
  return NULL;
9166
}
9167
 
9168
/* Return the number of "extended" bits there are in X, when interpreted
9169
   as a quantity in MODE whose signedness is indicated by UNSIGNEDP.  For
9170
   unsigned quantities, this is the number of high-order zero bits.
9171
   For signed quantities, this is the number of copies of the sign bit
9172
   minus 1.  In both case, this function returns the number of "spare"
9173
   bits.  For example, if two quantities for which this function returns
9174
   at least 1 are added, the addition is known not to overflow.
9175
 
9176
   This function will always return 0 unless called during combine, which
9177
   implies that it must be called from a define_split.  */
9178
 
9179
unsigned int
9180
extended_count (const_rtx x, enum machine_mode mode, int unsignedp)
9181
{
9182
  if (nonzero_sign_valid == 0)
9183
    return 0;
9184
 
9185
  return (unsignedp
9186
          ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
9187
             ? (unsigned int) (GET_MODE_BITSIZE (mode) - 1
9188
                               - floor_log2 (nonzero_bits (x, mode)))
9189
             : 0)
9190
          : num_sign_bit_copies (x, mode) - 1);
9191
}
9192
 
9193
/* This function is called from `simplify_shift_const' to merge two
9194
   outer operations.  Specifically, we have already found that we need
9195
   to perform operation *POP0 with constant *PCONST0 at the outermost
9196
   position.  We would now like to also perform OP1 with constant CONST1
9197
   (with *POP0 being done last).
9198
 
9199
   Return 1 if we can do the operation and update *POP0 and *PCONST0 with
9200
   the resulting operation.  *PCOMP_P is set to 1 if we would need to
9201
   complement the innermost operand, otherwise it is unchanged.
9202
 
9203
   MODE is the mode in which the operation will be done.  No bits outside
9204
   the width of this mode matter.  It is assumed that the width of this mode
9205
   is smaller than or equal to HOST_BITS_PER_WIDE_INT.
9206
 
9207
   If *POP0 or OP1 are UNKNOWN, it means no operation is required.  Only NEG, PLUS,
9208
   IOR, XOR, and AND are supported.  We may set *POP0 to SET if the proper
9209
   result is simply *PCONST0.
9210
 
9211
   If the resulting operation cannot be expressed as one operation, we
9212
   return 0 and do not change *POP0, *PCONST0, and *PCOMP_P.  */
9213
 
9214
static int
9215
merge_outer_ops (enum rtx_code *pop0, HOST_WIDE_INT *pconst0, enum rtx_code op1, HOST_WIDE_INT const1, enum machine_mode mode, int *pcomp_p)
9216
{
9217
  enum rtx_code op0 = *pop0;
9218
  HOST_WIDE_INT const0 = *pconst0;
9219
 
9220
  const0 &= GET_MODE_MASK (mode);
9221
  const1 &= GET_MODE_MASK (mode);
9222
 
9223
  /* If OP0 is an AND, clear unimportant bits in CONST1.  */
9224
  if (op0 == AND)
9225
    const1 &= const0;
9226
 
9227
  /* If OP0 or OP1 is UNKNOWN, this is easy.  Similarly if they are the same or
9228
     if OP0 is SET.  */
9229
 
9230
  if (op1 == UNKNOWN || op0 == SET)
9231
    return 1;
9232
 
9233
  else if (op0 == UNKNOWN)
9234
    op0 = op1, const0 = const1;
9235
 
9236
  else if (op0 == op1)
9237
    {
9238
      switch (op0)
9239
        {
9240
        case AND:
9241
          const0 &= const1;
9242
          break;
9243
        case IOR:
9244
          const0 |= const1;
9245
          break;
9246
        case XOR:
9247
          const0 ^= const1;
9248
          break;
9249
        case PLUS:
9250
          const0 += const1;
9251
          break;
9252
        case NEG:
9253
          op0 = UNKNOWN;
9254
          break;
9255
        default:
9256
          break;
9257
        }
9258
    }
9259
 
9260
  /* Otherwise, if either is a PLUS or NEG, we can't do anything.  */
9261
  else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
9262
    return 0;
9263
 
9264
  /* If the two constants aren't the same, we can't do anything.  The
9265
     remaining six cases can all be done.  */
9266
  else if (const0 != const1)
9267
    return 0;
9268
 
9269
  else
9270
    switch (op0)
9271
      {
9272
      case IOR:
9273
        if (op1 == AND)
9274
          /* (a & b) | b == b */
9275
          op0 = SET;
9276
        else /* op1 == XOR */
9277
          /* (a ^ b) | b == a | b */
9278
          {;}
9279
        break;
9280
 
9281
      case XOR:
9282
        if (op1 == AND)
9283
          /* (a & b) ^ b == (~a) & b */
9284
          op0 = AND, *pcomp_p = 1;
9285
        else /* op1 == IOR */
9286
          /* (a | b) ^ b == a & ~b */
9287
          op0 = AND, const0 = ~const0;
9288
        break;
9289
 
9290
      case AND:
9291
        if (op1 == IOR)
9292
          /* (a | b) & b == b */
9293
        op0 = SET;
9294
        else /* op1 == XOR */
9295
          /* (a ^ b) & b) == (~a) & b */
9296
          *pcomp_p = 1;
9297
        break;
9298
      default:
9299
        break;
9300
      }
9301
 
9302
  /* Check for NO-OP cases.  */
9303
  const0 &= GET_MODE_MASK (mode);
9304
  if (const0 == 0
9305
      && (op0 == IOR || op0 == XOR || op0 == PLUS))
9306
    op0 = UNKNOWN;
9307
  else if (const0 == 0 && op0 == AND)
9308
    op0 = SET;
9309
  else if ((unsigned HOST_WIDE_INT) const0 == GET_MODE_MASK (mode)
9310
           && op0 == AND)
9311
    op0 = UNKNOWN;
9312
 
9313
  *pop0 = op0;
9314
 
9315
  /* ??? Slightly redundant with the above mask, but not entirely.
9316
     Moving this above means we'd have to sign-extend the mode mask
9317
     for the final test.  */
9318
  if (op0 != UNKNOWN && op0 != NEG)
9319
    *pconst0 = trunc_int_for_mode (const0, mode);
9320
 
9321
  return 1;
9322
}
9323
 
9324
/* A helper to simplify_shift_const_1 to determine the mode we can perform
9325
   the shift in.  The original shift operation CODE is performed on OP in
9326
   ORIG_MODE.  Return the wider mode MODE if we can perform the operation
9327
   in that mode.  Return ORIG_MODE otherwise.  We can also assume that the
9328
   result of the shift is subject to operation OUTER_CODE with operand
9329
   OUTER_CONST.  */
9330
 
9331
static enum machine_mode
9332
try_widen_shift_mode (enum rtx_code code, rtx op, int count,
9333
                      enum machine_mode orig_mode, enum machine_mode mode,
9334
                      enum rtx_code outer_code, HOST_WIDE_INT outer_const)
9335
{
9336
  if (orig_mode == mode)
9337
    return mode;
9338
  gcc_assert (GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (orig_mode));
9339
 
9340
  /* In general we can't perform in wider mode for right shift and rotate.  */
9341
  switch (code)
9342
    {
9343
    case ASHIFTRT:
9344
      /* We can still widen if the bits brought in from the left are identical
9345
         to the sign bit of ORIG_MODE.  */
9346
      if (num_sign_bit_copies (op, mode)
9347
          > (unsigned) (GET_MODE_BITSIZE (mode)
9348
                        - GET_MODE_BITSIZE (orig_mode)))
9349
        return mode;
9350
      return orig_mode;
9351
 
9352
    case LSHIFTRT:
9353
      /* Similarly here but with zero bits.  */
9354
      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
9355
          && (nonzero_bits (op, mode) & ~GET_MODE_MASK (orig_mode)) == 0)
9356
        return mode;
9357
 
9358
      /* We can also widen if the bits brought in will be masked off.  This
9359
         operation is performed in ORIG_MODE.  */
9360
      if (outer_code == AND)
9361
        {
9362
          int care_bits = low_bitmask_len (orig_mode, outer_const);
9363
 
9364
          if (care_bits >= 0
9365
              && GET_MODE_BITSIZE (orig_mode) - care_bits >= count)
9366
            return mode;
9367
        }
9368
      /* fall through */
9369
 
9370
    case ROTATE:
9371
      return orig_mode;
9372
 
9373
    case ROTATERT:
9374
      gcc_unreachable ();
9375
 
9376
    default:
9377
      return mode;
9378
    }
9379
}
9380
 
9381
/* Simplify a shift of VAROP by COUNT bits.  CODE says what kind of shift.
9382
   The result of the shift is RESULT_MODE.  Return NULL_RTX if we cannot
9383
   simplify it.  Otherwise, return a simplified value.
9384
 
9385
   The shift is normally computed in the widest mode we find in VAROP, as
9386
   long as it isn't a different number of words than RESULT_MODE.  Exceptions
9387
   are ASHIFTRT and ROTATE, which are always done in their original mode.  */
9388
 
9389
static rtx
9390
simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
9391
                        rtx varop, int orig_count)
9392
{
9393
  enum rtx_code orig_code = code;
9394
  rtx orig_varop = varop;
9395
  int count;
9396
  enum machine_mode mode = result_mode;
9397
  enum machine_mode shift_mode, tmode;
9398
  unsigned int mode_words
9399
    = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
9400
  /* We form (outer_op (code varop count) (outer_const)).  */
9401
  enum rtx_code outer_op = UNKNOWN;
9402
  HOST_WIDE_INT outer_const = 0;
9403
  int complement_p = 0;
9404
  rtx new_rtx, x;
9405
 
9406
  /* Make sure and truncate the "natural" shift on the way in.  We don't
9407
     want to do this inside the loop as it makes it more difficult to
9408
     combine shifts.  */
9409
  if (SHIFT_COUNT_TRUNCATED)
9410
    orig_count &= GET_MODE_BITSIZE (mode) - 1;
9411
 
9412
  /* If we were given an invalid count, don't do anything except exactly
9413
     what was requested.  */
9414
 
9415
  if (orig_count < 0 || orig_count >= (int) GET_MODE_BITSIZE (mode))
9416
    return NULL_RTX;
9417
 
9418
  count = orig_count;
9419
 
9420
  /* Unless one of the branches of the `if' in this loop does a `continue',
9421
     we will `break' the loop after the `if'.  */
9422
 
9423
  while (count != 0)
9424
    {
9425
      /* If we have an operand of (clobber (const_int 0)), fail.  */
9426
      if (GET_CODE (varop) == CLOBBER)
9427
        return NULL_RTX;
9428
 
9429
      /* Convert ROTATERT to ROTATE.  */
9430
      if (code == ROTATERT)
9431
        {
9432
          unsigned int bitsize = GET_MODE_BITSIZE (result_mode);;
9433
          code = ROTATE;
9434
          if (VECTOR_MODE_P (result_mode))
9435
            count = bitsize / GET_MODE_NUNITS (result_mode) - count;
9436
          else
9437
            count = bitsize - count;
9438
        }
9439
 
9440
      shift_mode = try_widen_shift_mode (code, varop, count, result_mode,
9441
                                         mode, outer_op, outer_const);
9442
 
9443
      /* Handle cases where the count is greater than the size of the mode
9444
         minus 1.  For ASHIFT, use the size minus one as the count (this can
9445
         occur when simplifying (lshiftrt (ashiftrt ..))).  For rotates,
9446
         take the count modulo the size.  For other shifts, the result is
9447
         zero.
9448
 
9449
         Since these shifts are being produced by the compiler by combining
9450
         multiple operations, each of which are defined, we know what the
9451
         result is supposed to be.  */
9452
 
9453
      if (count > (GET_MODE_BITSIZE (shift_mode) - 1))
9454
        {
9455
          if (code == ASHIFTRT)
9456
            count = GET_MODE_BITSIZE (shift_mode) - 1;
9457
          else if (code == ROTATE || code == ROTATERT)
9458
            count %= GET_MODE_BITSIZE (shift_mode);
9459
          else
9460
            {
9461
              /* We can't simply return zero because there may be an
9462
                 outer op.  */
9463
              varop = const0_rtx;
9464
              count = 0;
9465
              break;
9466
            }
9467
        }
9468
 
9469
      /* If we discovered we had to complement VAROP, leave.  Making a NOT
9470
         here would cause an infinite loop.  */
9471
      if (complement_p)
9472
        break;
9473
 
9474
      /* An arithmetic right shift of a quantity known to be -1 or 0
9475
         is a no-op.  */
9476
      if (code == ASHIFTRT
9477
          && (num_sign_bit_copies (varop, shift_mode)
9478
              == GET_MODE_BITSIZE (shift_mode)))
9479
        {
9480
          count = 0;
9481
          break;
9482
        }
9483
 
9484
      /* If we are doing an arithmetic right shift and discarding all but
9485
         the sign bit copies, this is equivalent to doing a shift by the
9486
         bitsize minus one.  Convert it into that shift because it will often
9487
         allow other simplifications.  */
9488
 
9489
      if (code == ASHIFTRT
9490
          && (count + num_sign_bit_copies (varop, shift_mode)
9491
              >= GET_MODE_BITSIZE (shift_mode)))
9492
        count = GET_MODE_BITSIZE (shift_mode) - 1;
9493
 
9494
      /* We simplify the tests below and elsewhere by converting
9495
         ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
9496
         `make_compound_operation' will convert it to an ASHIFTRT for
9497
         those machines (such as VAX) that don't have an LSHIFTRT.  */
9498
      if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
9499
          && code == ASHIFTRT
9500
          && ((nonzero_bits (varop, shift_mode)
9501
               & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
9502
              == 0))
9503
        code = LSHIFTRT;
9504
 
9505
      if (((code == LSHIFTRT
9506
            && GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
9507
            && !(nonzero_bits (varop, shift_mode) >> count))
9508
           || (code == ASHIFT
9509
               && GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
9510
               && !((nonzero_bits (varop, shift_mode) << count)
9511
                    & GET_MODE_MASK (shift_mode))))
9512
          && !side_effects_p (varop))
9513
        varop = const0_rtx;
9514
 
9515
      switch (GET_CODE (varop))
9516
        {
9517
        case SIGN_EXTEND:
9518
        case ZERO_EXTEND:
9519
        case SIGN_EXTRACT:
9520
        case ZERO_EXTRACT:
9521
          new_rtx = expand_compound_operation (varop);
9522
          if (new_rtx != varop)
9523
            {
9524
              varop = new_rtx;
9525
              continue;
9526
            }
9527
          break;
9528
 
9529
        case MEM:
9530
          /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
9531
             minus the width of a smaller mode, we can do this with a
9532
             SIGN_EXTEND or ZERO_EXTEND from the narrower memory location.  */
9533
          if ((code == ASHIFTRT || code == LSHIFTRT)
9534
              && ! mode_dependent_address_p (XEXP (varop, 0))
9535
              && ! MEM_VOLATILE_P (varop)
9536
              && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
9537
                                         MODE_INT, 1)) != BLKmode)
9538
            {
9539
              new_rtx = adjust_address_nv (varop, tmode,
9540
                                       BYTES_BIG_ENDIAN ? 0
9541
                                       : count / BITS_PER_UNIT);
9542
 
9543
              varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND
9544
                                     : ZERO_EXTEND, mode, new_rtx);
9545
              count = 0;
9546
              continue;
9547
            }
9548
          break;
9549
 
9550
        case SUBREG:
9551
          /* If VAROP is a SUBREG, strip it as long as the inner operand has
9552
             the same number of words as what we've seen so far.  Then store
9553
             the widest mode in MODE.  */
9554
          if (subreg_lowpart_p (varop)
9555
              && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
9556
                  > GET_MODE_SIZE (GET_MODE (varop)))
9557
              && (unsigned int) ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
9558
                                  + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
9559 378 julius
                 == mode_words
9560
              && GET_MODE_CLASS (GET_MODE (varop)) == MODE_INT
9561
              && GET_MODE_CLASS (GET_MODE (SUBREG_REG (varop))) == MODE_INT)
9562 280 jeremybenn
            {
9563
              varop = SUBREG_REG (varop);
9564
              if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
9565
                mode = GET_MODE (varop);
9566
              continue;
9567
            }
9568
          break;
9569
 
9570
        case MULT:
9571
          /* Some machines use MULT instead of ASHIFT because MULT
9572
             is cheaper.  But it is still better on those machines to
9573
             merge two shifts into one.  */
9574
          if (CONST_INT_P (XEXP (varop, 1))
9575
              && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
9576
            {
9577
              varop
9578
                = simplify_gen_binary (ASHIFT, GET_MODE (varop),
9579
                                       XEXP (varop, 0),
9580
                                       GEN_INT (exact_log2 (
9581
                                                INTVAL (XEXP (varop, 1)))));
9582
              continue;
9583
            }
9584
          break;
9585
 
9586
        case UDIV:
9587
          /* Similar, for when divides are cheaper.  */
9588
          if (CONST_INT_P (XEXP (varop, 1))
9589
              && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
9590
            {
9591
              varop
9592
                = simplify_gen_binary (LSHIFTRT, GET_MODE (varop),
9593
                                       XEXP (varop, 0),
9594
                                       GEN_INT (exact_log2 (
9595
                                                INTVAL (XEXP (varop, 1)))));
9596
              continue;
9597
            }
9598
          break;
9599
 
9600
        case ASHIFTRT:
9601
          /* If we are extracting just the sign bit of an arithmetic
9602
             right shift, that shift is not needed.  However, the sign
9603
             bit of a wider mode may be different from what would be
9604
             interpreted as the sign bit in a narrower mode, so, if
9605
             the result is narrower, don't discard the shift.  */
9606
          if (code == LSHIFTRT
9607
              && count == (GET_MODE_BITSIZE (result_mode) - 1)
9608
              && (GET_MODE_BITSIZE (result_mode)
9609
                  >= GET_MODE_BITSIZE (GET_MODE (varop))))
9610
            {
9611
              varop = XEXP (varop, 0);
9612
              continue;
9613
            }
9614
 
9615
          /* ... fall through ...  */
9616
 
9617
        case LSHIFTRT:
9618
        case ASHIFT:
9619
        case ROTATE:
9620
          /* Here we have two nested shifts.  The result is usually the
9621
             AND of a new shift with a mask.  We compute the result below.  */
9622
          if (CONST_INT_P (XEXP (varop, 1))
9623
              && INTVAL (XEXP (varop, 1)) >= 0
9624
              && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
9625
              && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9626
              && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
9627
              && !VECTOR_MODE_P (result_mode))
9628
            {
9629
              enum rtx_code first_code = GET_CODE (varop);
9630
              unsigned int first_count = INTVAL (XEXP (varop, 1));
9631
              unsigned HOST_WIDE_INT mask;
9632
              rtx mask_rtx;
9633
 
9634
              /* We have one common special case.  We can't do any merging if
9635
                 the inner code is an ASHIFTRT of a smaller mode.  However, if
9636
                 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
9637
                 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
9638
                 we can convert it to
9639
                 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
9640
                 This simplifies certain SIGN_EXTEND operations.  */
9641
              if (code == ASHIFT && first_code == ASHIFTRT
9642
                  && count == (GET_MODE_BITSIZE (result_mode)
9643
                               - GET_MODE_BITSIZE (GET_MODE (varop))))
9644
                {
9645
                  /* C3 has the low-order C1 bits zero.  */
9646
 
9647
                  mask = (GET_MODE_MASK (mode)
9648
                          & ~(((HOST_WIDE_INT) 1 << first_count) - 1));
9649
 
9650
                  varop = simplify_and_const_int (NULL_RTX, result_mode,
9651
                                                  XEXP (varop, 0), mask);
9652
                  varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
9653
                                                varop, count);
9654
                  count = first_count;
9655
                  code = ASHIFTRT;
9656
                  continue;
9657
                }
9658
 
9659
              /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
9660
                 than C1 high-order bits equal to the sign bit, we can convert
9661
                 this to either an ASHIFT or an ASHIFTRT depending on the
9662
                 two counts.
9663
 
9664
                 We cannot do this if VAROP's mode is not SHIFT_MODE.  */
9665
 
9666
              if (code == ASHIFTRT && first_code == ASHIFT
9667
                  && GET_MODE (varop) == shift_mode
9668
                  && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
9669
                      > first_count))
9670
                {
9671
                  varop = XEXP (varop, 0);
9672
                  count -= first_count;
9673
                  if (count < 0)
9674
                    {
9675
                      count = -count;
9676
                      code = ASHIFT;
9677
                    }
9678
 
9679
                  continue;
9680
                }
9681
 
9682
              /* There are some cases we can't do.  If CODE is ASHIFTRT,
9683
                 we can only do this if FIRST_CODE is also ASHIFTRT.
9684
 
9685
                 We can't do the case when CODE is ROTATE and FIRST_CODE is
9686
                 ASHIFTRT.
9687
 
9688
                 If the mode of this shift is not the mode of the outer shift,
9689
                 we can't do this if either shift is a right shift or ROTATE.
9690
 
9691
                 Finally, we can't do any of these if the mode is too wide
9692
                 unless the codes are the same.
9693
 
9694
                 Handle the case where the shift codes are the same
9695
                 first.  */
9696
 
9697
              if (code == first_code)
9698
                {
9699
                  if (GET_MODE (varop) != result_mode
9700
                      && (code == ASHIFTRT || code == LSHIFTRT
9701
                          || code == ROTATE))
9702
                    break;
9703
 
9704
                  count += first_count;
9705
                  varop = XEXP (varop, 0);
9706
                  continue;
9707
                }
9708
 
9709
              if (code == ASHIFTRT
9710
                  || (code == ROTATE && first_code == ASHIFTRT)
9711
                  || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
9712
                  || (GET_MODE (varop) != result_mode
9713
                      && (first_code == ASHIFTRT || first_code == LSHIFTRT
9714
                          || first_code == ROTATE
9715
                          || code == ROTATE)))
9716
                break;
9717
 
9718
              /* To compute the mask to apply after the shift, shift the
9719
                 nonzero bits of the inner shift the same way the
9720
                 outer shift will.  */
9721
 
9722
              mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
9723
 
9724
              mask_rtx
9725
                = simplify_const_binary_operation (code, result_mode, mask_rtx,
9726
                                                   GEN_INT (count));
9727
 
9728
              /* Give up if we can't compute an outer operation to use.  */
9729
              if (mask_rtx == 0
9730
                  || !CONST_INT_P (mask_rtx)
9731
                  || ! merge_outer_ops (&outer_op, &outer_const, AND,
9732
                                        INTVAL (mask_rtx),
9733
                                        result_mode, &complement_p))
9734
                break;
9735
 
9736
              /* If the shifts are in the same direction, we add the
9737
                 counts.  Otherwise, we subtract them.  */
9738
              if ((code == ASHIFTRT || code == LSHIFTRT)
9739
                  == (first_code == ASHIFTRT || first_code == LSHIFTRT))
9740
                count += first_count;
9741
              else
9742
                count -= first_count;
9743
 
9744
              /* If COUNT is positive, the new shift is usually CODE,
9745
                 except for the two exceptions below, in which case it is
9746
                 FIRST_CODE.  If the count is negative, FIRST_CODE should
9747
                 always be used  */
9748
              if (count > 0
9749
                  && ((first_code == ROTATE && code == ASHIFT)
9750
                      || (first_code == ASHIFTRT && code == LSHIFTRT)))
9751
                code = first_code;
9752
              else if (count < 0)
9753
                code = first_code, count = -count;
9754
 
9755
              varop = XEXP (varop, 0);
9756
              continue;
9757
            }
9758
 
9759
          /* If we have (A << B << C) for any shift, we can convert this to
9760
             (A << C << B).  This wins if A is a constant.  Only try this if
9761
             B is not a constant.  */
9762
 
9763
          else if (GET_CODE (varop) == code
9764
                   && CONST_INT_P (XEXP (varop, 0))
9765
                   && !CONST_INT_P (XEXP (varop, 1)))
9766
            {
9767
              rtx new_rtx = simplify_const_binary_operation (code, mode,
9768
                                                         XEXP (varop, 0),
9769
                                                         GEN_INT (count));
9770
              varop = gen_rtx_fmt_ee (code, mode, new_rtx, XEXP (varop, 1));
9771
              count = 0;
9772
              continue;
9773
            }
9774
          break;
9775
 
9776
        case NOT:
9777
          if (VECTOR_MODE_P (mode))
9778
            break;
9779
 
9780
          /* Make this fit the case below.  */
9781
          varop = gen_rtx_XOR (mode, XEXP (varop, 0),
9782
                               GEN_INT (GET_MODE_MASK (mode)));
9783
          continue;
9784
 
9785
        case IOR:
9786
        case AND:
9787
        case XOR:
9788
          /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
9789
             with C the size of VAROP - 1 and the shift is logical if
9790
             STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9791
             we have an (le X 0) operation.   If we have an arithmetic shift
9792
             and STORE_FLAG_VALUE is 1 or we have a logical shift with
9793
             STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation.  */
9794
 
9795
          if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
9796
              && XEXP (XEXP (varop, 0), 1) == constm1_rtx
9797
              && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9798
              && (code == LSHIFTRT || code == ASHIFTRT)
9799
              && count == (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)
9800
              && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9801
            {
9802
              count = 0;
9803
              varop = gen_rtx_LE (GET_MODE (varop), XEXP (varop, 1),
9804
                                  const0_rtx);
9805
 
9806
              if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
9807
                varop = gen_rtx_NEG (GET_MODE (varop), varop);
9808
 
9809
              continue;
9810
            }
9811
 
9812
          /* If we have (shift (logical)), move the logical to the outside
9813
             to allow it to possibly combine with another logical and the
9814
             shift to combine with another shift.  This also canonicalizes to
9815
             what a ZERO_EXTRACT looks like.  Also, some machines have
9816
             (and (shift)) insns.  */
9817
 
9818
          if (CONST_INT_P (XEXP (varop, 1))
9819
              /* We can't do this if we have (ashiftrt (xor))  and the
9820
                 constant has its sign bit set in shift_mode.  */
9821
              && !(code == ASHIFTRT && GET_CODE (varop) == XOR
9822
                   && 0 > trunc_int_for_mode (INTVAL (XEXP (varop, 1)),
9823
                                              shift_mode))
9824
              && (new_rtx = simplify_const_binary_operation (code, result_mode,
9825
                                                         XEXP (varop, 1),
9826
                                                         GEN_INT (count))) != 0
9827
              && CONST_INT_P (new_rtx)
9828
              && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
9829
                                  INTVAL (new_rtx), result_mode, &complement_p))
9830
            {
9831
              varop = XEXP (varop, 0);
9832
              continue;
9833
            }
9834
 
9835
          /* If we can't do that, try to simplify the shift in each arm of the
9836
             logical expression, make a new logical expression, and apply
9837
             the inverse distributive law.  This also can't be done
9838
             for some (ashiftrt (xor)).  */
9839
          if (CONST_INT_P (XEXP (varop, 1))
9840
             && !(code == ASHIFTRT && GET_CODE (varop) == XOR
9841
                  && 0 > trunc_int_for_mode (INTVAL (XEXP (varop, 1)),
9842
                                             shift_mode)))
9843
            {
9844
              rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
9845
                                              XEXP (varop, 0), count);
9846
              rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
9847
                                              XEXP (varop, 1), count);
9848
 
9849
              varop = simplify_gen_binary (GET_CODE (varop), shift_mode,
9850
                                           lhs, rhs);
9851
              varop = apply_distributive_law (varop);
9852
 
9853
              count = 0;
9854
              continue;
9855
            }
9856
          break;
9857
 
9858
        case EQ:
9859
          /* Convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
9860
             says that the sign bit can be tested, FOO has mode MODE, C is
9861
             GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
9862
             that may be nonzero.  */
9863
          if (code == LSHIFTRT
9864
              && XEXP (varop, 1) == const0_rtx
9865
              && GET_MODE (XEXP (varop, 0)) == result_mode
9866
              && count == (GET_MODE_BITSIZE (result_mode) - 1)
9867
              && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9868
              && STORE_FLAG_VALUE == -1
9869
              && nonzero_bits (XEXP (varop, 0), result_mode) == 1
9870
              && merge_outer_ops (&outer_op, &outer_const, XOR,
9871
                                  (HOST_WIDE_INT) 1, result_mode,
9872
                                  &complement_p))
9873
            {
9874
              varop = XEXP (varop, 0);
9875
              count = 0;
9876
              continue;
9877
            }
9878
          break;
9879
 
9880
        case NEG:
9881
          /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
9882
             than the number of bits in the mode is equivalent to A.  */
9883
          if (code == LSHIFTRT
9884
              && count == (GET_MODE_BITSIZE (result_mode) - 1)
9885
              && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
9886
            {
9887
              varop = XEXP (varop, 0);
9888
              count = 0;
9889
              continue;
9890
            }
9891
 
9892
          /* NEG commutes with ASHIFT since it is multiplication.  Move the
9893
             NEG outside to allow shifts to combine.  */
9894
          if (code == ASHIFT
9895
              && merge_outer_ops (&outer_op, &outer_const, NEG,
9896
                                  (HOST_WIDE_INT) 0, result_mode,
9897
                                  &complement_p))
9898
            {
9899
              varop = XEXP (varop, 0);
9900
              continue;
9901
            }
9902
          break;
9903
 
9904
        case PLUS:
9905
          /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
9906
             is one less than the number of bits in the mode is
9907
             equivalent to (xor A 1).  */
9908
          if (code == LSHIFTRT
9909
              && count == (GET_MODE_BITSIZE (result_mode) - 1)
9910
              && XEXP (varop, 1) == constm1_rtx
9911
              && nonzero_bits (XEXP (varop, 0), result_mode) == 1
9912
              && merge_outer_ops (&outer_op, &outer_const, XOR,
9913
                                  (HOST_WIDE_INT) 1, result_mode,
9914
                                  &complement_p))
9915
            {
9916
              count = 0;
9917
              varop = XEXP (varop, 0);
9918
              continue;
9919
            }
9920
 
9921
          /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
9922
             that might be nonzero in BAR are those being shifted out and those
9923
             bits are known zero in FOO, we can replace the PLUS with FOO.
9924
             Similarly in the other operand order.  This code occurs when
9925
             we are computing the size of a variable-size array.  */
9926
 
9927
          if ((code == ASHIFTRT || code == LSHIFTRT)
9928
              && count < HOST_BITS_PER_WIDE_INT
9929
              && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
9930
              && (nonzero_bits (XEXP (varop, 1), result_mode)
9931
                  & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
9932
            {
9933
              varop = XEXP (varop, 0);
9934
              continue;
9935
            }
9936
          else if ((code == ASHIFTRT || code == LSHIFTRT)
9937
                   && count < HOST_BITS_PER_WIDE_INT
9938
                   && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
9939
                   && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9940
                            >> count)
9941
                   && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9942
                            & nonzero_bits (XEXP (varop, 1),
9943
                                                 result_mode)))
9944
            {
9945
              varop = XEXP (varop, 1);
9946
              continue;
9947
            }
9948
 
9949
          /* (ashift (plus foo C) N) is (plus (ashift foo N) C').  */
9950
          if (code == ASHIFT
9951
              && CONST_INT_P (XEXP (varop, 1))
9952
              && (new_rtx = simplify_const_binary_operation (ASHIFT, result_mode,
9953
                                                         XEXP (varop, 1),
9954
                                                         GEN_INT (count))) != 0
9955
              && CONST_INT_P (new_rtx)
9956
              && merge_outer_ops (&outer_op, &outer_const, PLUS,
9957
                                  INTVAL (new_rtx), result_mode, &complement_p))
9958
            {
9959
              varop = XEXP (varop, 0);
9960
              continue;
9961
            }
9962
 
9963
          /* Check for 'PLUS signbit', which is the canonical form of 'XOR
9964
             signbit', and attempt to change the PLUS to an XOR and move it to
9965
             the outer operation as is done above in the AND/IOR/XOR case
9966
             leg for shift(logical). See details in logical handling above
9967
             for reasoning in doing so.  */
9968
          if (code == LSHIFTRT
9969
              && CONST_INT_P (XEXP (varop, 1))
9970
              && mode_signbit_p (result_mode, XEXP (varop, 1))
9971
              && (new_rtx = simplify_const_binary_operation (code, result_mode,
9972
                                                         XEXP (varop, 1),
9973
                                                         GEN_INT (count))) != 0
9974
              && CONST_INT_P (new_rtx)
9975
              && merge_outer_ops (&outer_op, &outer_const, XOR,
9976
                                  INTVAL (new_rtx), result_mode, &complement_p))
9977
            {
9978
              varop = XEXP (varop, 0);
9979
              continue;
9980
            }
9981
 
9982
          break;
9983
 
9984
        case MINUS:
9985
          /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
9986
             with C the size of VAROP - 1 and the shift is logical if
9987
             STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9988
             we have a (gt X 0) operation.  If the shift is arithmetic with
9989
             STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
9990
             we have a (neg (gt X 0)) operation.  */
9991
 
9992
          if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9993
              && GET_CODE (XEXP (varop, 0)) == ASHIFTRT
9994
              && count == (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)
9995
              && (code == LSHIFTRT || code == ASHIFTRT)
9996
              && CONST_INT_P (XEXP (XEXP (varop, 0), 1))
9997
              && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
9998
              && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9999
            {
10000
              count = 0;
10001
              varop = gen_rtx_GT (GET_MODE (varop), XEXP (varop, 1),
10002
                                  const0_rtx);
10003
 
10004
              if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
10005
                varop = gen_rtx_NEG (GET_MODE (varop), varop);
10006
 
10007
              continue;
10008
            }
10009
          break;
10010
 
10011
        case TRUNCATE:
10012
          /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt))
10013
             if the truncate does not affect the value.  */
10014
          if (code == LSHIFTRT
10015
              && GET_CODE (XEXP (varop, 0)) == LSHIFTRT
10016
              && CONST_INT_P (XEXP (XEXP (varop, 0), 1))
10017
              && (INTVAL (XEXP (XEXP (varop, 0), 1))
10018
                  >= (GET_MODE_BITSIZE (GET_MODE (XEXP (varop, 0)))
10019
                      - GET_MODE_BITSIZE (GET_MODE (varop)))))
10020
            {
10021
              rtx varop_inner = XEXP (varop, 0);
10022
 
10023
              varop_inner
10024
                = gen_rtx_LSHIFTRT (GET_MODE (varop_inner),
10025
                                    XEXP (varop_inner, 0),
10026
                                    GEN_INT
10027
                                    (count + INTVAL (XEXP (varop_inner, 1))));
10028
              varop = gen_rtx_TRUNCATE (GET_MODE (varop), varop_inner);
10029
              count = 0;
10030
              continue;
10031
            }
10032
          break;
10033
 
10034
        default:
10035
          break;
10036
        }
10037
 
10038
      break;
10039
    }
10040
 
10041
  shift_mode = try_widen_shift_mode (code, varop, count, result_mode, mode,
10042
                                     outer_op, outer_const);
10043
 
10044
  /* We have now finished analyzing the shift.  The result should be
10045
     a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places.  If
10046
     OUTER_OP is non-UNKNOWN, it is an operation that needs to be applied
10047
     to the result of the shift.  OUTER_CONST is the relevant constant,
10048
     but we must turn off all bits turned off in the shift.  */
10049
 
10050
  if (outer_op == UNKNOWN
10051
      && orig_code == code && orig_count == count
10052
      && varop == orig_varop
10053
      && shift_mode == GET_MODE (varop))
10054
    return NULL_RTX;
10055
 
10056
  /* Make a SUBREG if necessary.  If we can't make it, fail.  */
10057
  varop = gen_lowpart (shift_mode, varop);
10058
  if (varop == NULL_RTX || GET_CODE (varop) == CLOBBER)
10059
    return NULL_RTX;
10060
 
10061
  /* If we have an outer operation and we just made a shift, it is
10062
     possible that we could have simplified the shift were it not
10063
     for the outer operation.  So try to do the simplification
10064
     recursively.  */
10065
 
10066
  if (outer_op != UNKNOWN)
10067
    x = simplify_shift_const_1 (code, shift_mode, varop, count);
10068
  else
10069
    x = NULL_RTX;
10070
 
10071
  if (x == NULL_RTX)
10072
    x = simplify_gen_binary (code, shift_mode, varop, GEN_INT (count));
10073
 
10074
  /* If we were doing an LSHIFTRT in a wider mode than it was originally,
10075
     turn off all the bits that the shift would have turned off.  */
10076
  if (orig_code == LSHIFTRT && result_mode != shift_mode)
10077
    x = simplify_and_const_int (NULL_RTX, shift_mode, x,
10078
                                GET_MODE_MASK (result_mode) >> orig_count);
10079
 
10080
  /* Do the remainder of the processing in RESULT_MODE.  */
10081
  x = gen_lowpart_or_truncate (result_mode, x);
10082
 
10083
  /* If COMPLEMENT_P is set, we have to complement X before doing the outer
10084
     operation.  */
10085
  if (complement_p)
10086
    x = simplify_gen_unary (NOT, result_mode, x, result_mode);
10087
 
10088
  if (outer_op != UNKNOWN)
10089
    {
10090
      if (GET_RTX_CLASS (outer_op) != RTX_UNARY
10091
          && GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
10092
        outer_const = trunc_int_for_mode (outer_const, result_mode);
10093
 
10094
      if (outer_op == AND)
10095
        x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
10096
      else if (outer_op == SET)
10097
        {
10098
          /* This means that we have determined that the result is
10099
             equivalent to a constant.  This should be rare.  */
10100
          if (!side_effects_p (x))
10101
            x = GEN_INT (outer_const);
10102
        }
10103
      else if (GET_RTX_CLASS (outer_op) == RTX_UNARY)
10104
        x = simplify_gen_unary (outer_op, result_mode, x, result_mode);
10105
      else
10106
        x = simplify_gen_binary (outer_op, result_mode, x,
10107
                                 GEN_INT (outer_const));
10108
    }
10109
 
10110
  return x;
10111
}
10112
 
10113
/* Simplify a shift of VAROP by COUNT bits.  CODE says what kind of shift.
10114
   The result of the shift is RESULT_MODE.  If we cannot simplify it,
10115
   return X or, if it is NULL, synthesize the expression with
10116
   simplify_gen_binary.  Otherwise, return a simplified value.
10117
 
10118
   The shift is normally computed in the widest mode we find in VAROP, as
10119
   long as it isn't a different number of words than RESULT_MODE.  Exceptions
10120
   are ASHIFTRT and ROTATE, which are always done in their original mode.  */
10121
 
10122
static rtx
10123
simplify_shift_const (rtx x, enum rtx_code code, enum machine_mode result_mode,
10124
                      rtx varop, int count)
10125
{
10126
  rtx tem = simplify_shift_const_1 (code, result_mode, varop, count);
10127
  if (tem)
10128
    return tem;
10129
 
10130
  if (!x)
10131
    x = simplify_gen_binary (code, GET_MODE (varop), varop, GEN_INT (count));
10132
  if (GET_MODE (x) != result_mode)
10133
    x = gen_lowpart (result_mode, x);
10134
  return x;
10135
}
10136
 
10137
 
10138
/* Like recog, but we receive the address of a pointer to a new pattern.
10139
   We try to match the rtx that the pointer points to.
10140
   If that fails, we may try to modify or replace the pattern,
10141
   storing the replacement into the same pointer object.
10142
 
10143
   Modifications include deletion or addition of CLOBBERs.
10144
 
10145
   PNOTES is a pointer to a location where any REG_UNUSED notes added for
10146
   the CLOBBERs are placed.
10147
 
10148
   The value is the final insn code from the pattern ultimately matched,
10149
   or -1.  */
10150
 
10151
static int
10152
recog_for_combine (rtx *pnewpat, rtx insn, rtx *pnotes)
10153
{
10154
  rtx pat = *pnewpat;
10155
  int insn_code_number;
10156
  int num_clobbers_to_add = 0;
10157
  int i;
10158
  rtx notes = 0;
10159
  rtx old_notes, old_pat;
10160
 
10161
  /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
10162
     we use to indicate that something didn't match.  If we find such a
10163
     thing, force rejection.  */
10164
  if (GET_CODE (pat) == PARALLEL)
10165
    for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
10166
      if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
10167
          && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
10168
        return -1;
10169
 
10170
  old_pat = PATTERN (insn);
10171
  old_notes = REG_NOTES (insn);
10172
  PATTERN (insn) = pat;
10173
  REG_NOTES (insn) = 0;
10174
 
10175
  insn_code_number = recog (pat, insn, &num_clobbers_to_add);
10176
  if (dump_file && (dump_flags & TDF_DETAILS))
10177
    {
10178
      if (insn_code_number < 0)
10179
        fputs ("Failed to match this instruction:\n", dump_file);
10180
      else
10181
        fputs ("Successfully matched this instruction:\n", dump_file);
10182
      print_rtl_single (dump_file, pat);
10183
    }
10184
 
10185
  /* If it isn't, there is the possibility that we previously had an insn
10186
     that clobbered some register as a side effect, but the combined
10187
     insn doesn't need to do that.  So try once more without the clobbers
10188
     unless this represents an ASM insn.  */
10189
 
10190
  if (insn_code_number < 0 && ! check_asm_operands (pat)
10191
      && GET_CODE (pat) == PARALLEL)
10192
    {
10193
      int pos;
10194
 
10195
      for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
10196
        if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
10197
          {
10198
            if (i != pos)
10199
              SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
10200
            pos++;
10201
          }
10202
 
10203
      SUBST_INT (XVECLEN (pat, 0), pos);
10204
 
10205
      if (pos == 1)
10206
        pat = XVECEXP (pat, 0, 0);
10207
 
10208
      PATTERN (insn) = pat;
10209
      insn_code_number = recog (pat, insn, &num_clobbers_to_add);
10210
      if (dump_file && (dump_flags & TDF_DETAILS))
10211
        {
10212
          if (insn_code_number < 0)
10213
            fputs ("Failed to match this instruction:\n", dump_file);
10214
          else
10215
            fputs ("Successfully matched this instruction:\n", dump_file);
10216
          print_rtl_single (dump_file, pat);
10217
        }
10218
    }
10219
  PATTERN (insn) = old_pat;
10220
  REG_NOTES (insn) = old_notes;
10221
 
10222
  /* Recognize all noop sets, these will be killed by followup pass.  */
10223
  if (insn_code_number < 0 && GET_CODE (pat) == SET && set_noop_p (pat))
10224
    insn_code_number = NOOP_MOVE_INSN_CODE, num_clobbers_to_add = 0;
10225
 
10226
  /* If we had any clobbers to add, make a new pattern than contains
10227
     them.  Then check to make sure that all of them are dead.  */
10228
  if (num_clobbers_to_add)
10229
    {
10230
      rtx newpat = gen_rtx_PARALLEL (VOIDmode,
10231
                                     rtvec_alloc (GET_CODE (pat) == PARALLEL
10232
                                                  ? (XVECLEN (pat, 0)
10233
                                                     + num_clobbers_to_add)
10234
                                                  : num_clobbers_to_add + 1));
10235
 
10236
      if (GET_CODE (pat) == PARALLEL)
10237
        for (i = 0; i < XVECLEN (pat, 0); i++)
10238
          XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
10239
      else
10240
        XVECEXP (newpat, 0, 0) = pat;
10241
 
10242
      add_clobbers (newpat, insn_code_number);
10243
 
10244
      for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
10245
           i < XVECLEN (newpat, 0); i++)
10246
        {
10247
          if (REG_P (XEXP (XVECEXP (newpat, 0, i), 0))
10248
              && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
10249
            return -1;
10250
          if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) != SCRATCH)
10251
            {
10252
              gcc_assert (REG_P (XEXP (XVECEXP (newpat, 0, i), 0)));
10253
              notes = alloc_reg_note (REG_UNUSED,
10254
                                      XEXP (XVECEXP (newpat, 0, i), 0), notes);
10255
            }
10256
        }
10257
      pat = newpat;
10258
    }
10259
 
10260
  *pnewpat = pat;
10261
  *pnotes = notes;
10262
 
10263
  return insn_code_number;
10264
}
10265
 
10266
/* Like gen_lowpart_general but for use by combine.  In combine it
10267
   is not possible to create any new pseudoregs.  However, it is
10268
   safe to create invalid memory addresses, because combine will
10269
   try to recognize them and all they will do is make the combine
10270
   attempt fail.
10271
 
10272
   If for some reason this cannot do its job, an rtx
10273
   (clobber (const_int 0)) is returned.
10274
   An insn containing that will not be recognized.  */
10275
 
10276
static rtx
10277
gen_lowpart_for_combine (enum machine_mode omode, rtx x)
10278
{
10279
  enum machine_mode imode = GET_MODE (x);
10280
  unsigned int osize = GET_MODE_SIZE (omode);
10281
  unsigned int isize = GET_MODE_SIZE (imode);
10282
  rtx result;
10283
 
10284
  if (omode == imode)
10285
    return x;
10286
 
10287
  /* Return identity if this is a CONST or symbolic reference.  */
10288
  if (omode == Pmode
10289
      && (GET_CODE (x) == CONST
10290
          || GET_CODE (x) == SYMBOL_REF
10291
          || GET_CODE (x) == LABEL_REF))
10292
    return x;
10293
 
10294
  /* We can only support MODE being wider than a word if X is a
10295
     constant integer or has a mode the same size.  */
10296
  if (GET_MODE_SIZE (omode) > UNITS_PER_WORD
10297
      && ! ((imode == VOIDmode
10298
             && (CONST_INT_P (x)
10299
                 || GET_CODE (x) == CONST_DOUBLE))
10300
            || isize == osize))
10301
    goto fail;
10302
 
10303
  /* X might be a paradoxical (subreg (mem)).  In that case, gen_lowpart
10304
     won't know what to do.  So we will strip off the SUBREG here and
10305
     process normally.  */
10306
  if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
10307
    {
10308
      x = SUBREG_REG (x);
10309
 
10310
      /* For use in case we fall down into the address adjustments
10311
         further below, we need to adjust the known mode and size of
10312
         x; imode and isize, since we just adjusted x.  */
10313
      imode = GET_MODE (x);
10314
 
10315
      if (imode == omode)
10316
        return x;
10317
 
10318
      isize = GET_MODE_SIZE (imode);
10319
    }
10320
 
10321
  result = gen_lowpart_common (omode, x);
10322
 
10323
  if (result)
10324
    return result;
10325
 
10326
  if (MEM_P (x))
10327
    {
10328
      int offset = 0;
10329
 
10330
      /* Refuse to work on a volatile memory ref or one with a mode-dependent
10331
         address.  */
10332
      if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
10333
        goto fail;
10334
 
10335
      /* If we want to refer to something bigger than the original memref,
10336
         generate a paradoxical subreg instead.  That will force a reload
10337
         of the original memref X.  */
10338
      if (isize < osize)
10339
        return gen_rtx_SUBREG (omode, x, 0);
10340
 
10341
      if (WORDS_BIG_ENDIAN)
10342
        offset = MAX (isize, UNITS_PER_WORD) - MAX (osize, UNITS_PER_WORD);
10343
 
10344
      /* Adjust the address so that the address-after-the-data is
10345
         unchanged.  */
10346
      if (BYTES_BIG_ENDIAN)
10347
        offset -= MIN (UNITS_PER_WORD, osize) - MIN (UNITS_PER_WORD, isize);
10348
 
10349
      return adjust_address_nv (x, omode, offset);
10350
    }
10351
 
10352
  /* If X is a comparison operator, rewrite it in a new mode.  This
10353
     probably won't match, but may allow further simplifications.  */
10354
  else if (COMPARISON_P (x))
10355
    return gen_rtx_fmt_ee (GET_CODE (x), omode, XEXP (x, 0), XEXP (x, 1));
10356
 
10357
  /* If we couldn't simplify X any other way, just enclose it in a
10358
     SUBREG.  Normally, this SUBREG won't match, but some patterns may
10359
     include an explicit SUBREG or we may simplify it further in combine.  */
10360
  else
10361
    {
10362
      int offset = 0;
10363
      rtx res;
10364
 
10365
      offset = subreg_lowpart_offset (omode, imode);
10366
      if (imode == VOIDmode)
10367
        {
10368
          imode = int_mode_for_mode (omode);
10369
          x = gen_lowpart_common (imode, x);
10370
          if (x == NULL)
10371
            goto fail;
10372
        }
10373
      res = simplify_gen_subreg (omode, x, imode, offset);
10374
      if (res)
10375
        return res;
10376
    }
10377
 
10378
 fail:
10379
  return gen_rtx_CLOBBER (omode, const0_rtx);
10380
}
10381
 
10382
/* Simplify a comparison between *POP0 and *POP1 where CODE is the
10383
   comparison code that will be tested.
10384
 
10385
   The result is a possibly different comparison code to use.  *POP0 and
10386
   *POP1 may be updated.
10387
 
10388
   It is possible that we might detect that a comparison is either always
10389
   true or always false.  However, we do not perform general constant
10390
   folding in combine, so this knowledge isn't useful.  Such tautologies
10391
   should have been detected earlier.  Hence we ignore all such cases.  */
10392
 
10393
static enum rtx_code
10394
simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
10395
{
10396
  rtx op0 = *pop0;
10397
  rtx op1 = *pop1;
10398
  rtx tem, tem1;
10399
  int i;
10400
  enum machine_mode mode, tmode;
10401
 
10402
  /* Try a few ways of applying the same transformation to both operands.  */
10403
  while (1)
10404
    {
10405
#ifndef WORD_REGISTER_OPERATIONS
10406
      /* The test below this one won't handle SIGN_EXTENDs on these machines,
10407
         so check specially.  */
10408
      if (code != GTU && code != GEU && code != LTU && code != LEU
10409
          && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
10410
          && GET_CODE (XEXP (op0, 0)) == ASHIFT
10411
          && GET_CODE (XEXP (op1, 0)) == ASHIFT
10412
          && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
10413
          && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
10414
          && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
10415
              == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
10416
          && CONST_INT_P (XEXP (op0, 1))
10417
          && XEXP (op0, 1) == XEXP (op1, 1)
10418
          && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
10419
          && XEXP (op0, 1) == XEXP (XEXP (op1, 0), 1)
10420
          && (INTVAL (XEXP (op0, 1))
10421
              == (GET_MODE_BITSIZE (GET_MODE (op0))
10422
                  - (GET_MODE_BITSIZE
10423
                     (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
10424
        {
10425
          op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
10426
          op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
10427
        }
10428
#endif
10429
 
10430
      /* If both operands are the same constant shift, see if we can ignore the
10431
         shift.  We can if the shift is a rotate or if the bits shifted out of
10432
         this shift are known to be zero for both inputs and if the type of
10433
         comparison is compatible with the shift.  */
10434
      if (GET_CODE (op0) == GET_CODE (op1)
10435
          && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
10436
          && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
10437
              || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
10438
                  && (code != GT && code != LT && code != GE && code != LE))
10439
              || (GET_CODE (op0) == ASHIFTRT
10440
                  && (code != GTU && code != LTU
10441
                      && code != GEU && code != LEU)))
10442
          && CONST_INT_P (XEXP (op0, 1))
10443
          && INTVAL (XEXP (op0, 1)) >= 0
10444
          && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
10445
          && XEXP (op0, 1) == XEXP (op1, 1))
10446
        {
10447
          enum machine_mode mode = GET_MODE (op0);
10448
          unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
10449
          int shift_count = INTVAL (XEXP (op0, 1));
10450
 
10451
          if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
10452
            mask &= (mask >> shift_count) << shift_count;
10453
          else if (GET_CODE (op0) == ASHIFT)
10454
            mask = (mask & (mask << shift_count)) >> shift_count;
10455
 
10456
          if ((nonzero_bits (XEXP (op0, 0), mode) & ~mask) == 0
10457
              && (nonzero_bits (XEXP (op1, 0), mode) & ~mask) == 0)
10458
            op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
10459
          else
10460
            break;
10461
        }
10462
 
10463
      /* If both operands are AND's of a paradoxical SUBREG by constant, the
10464
         SUBREGs are of the same mode, and, in both cases, the AND would
10465
         be redundant if the comparison was done in the narrower mode,
10466
         do the comparison in the narrower mode (e.g., we are AND'ing with 1
10467
         and the operand's possibly nonzero bits are 0xffffff01; in that case
10468
         if we only care about QImode, we don't need the AND).  This case
10469
         occurs if the output mode of an scc insn is not SImode and
10470
         STORE_FLAG_VALUE == 1 (e.g., the 386).
10471
 
10472
         Similarly, check for a case where the AND's are ZERO_EXTEND
10473
         operations from some narrower mode even though a SUBREG is not
10474
         present.  */
10475
 
10476
      else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
10477
               && CONST_INT_P (XEXP (op0, 1))
10478
               && CONST_INT_P (XEXP (op1, 1)))
10479
        {
10480
          rtx inner_op0 = XEXP (op0, 0);
10481
          rtx inner_op1 = XEXP (op1, 0);
10482
          HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
10483
          HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
10484
          int changed = 0;
10485
 
10486
          if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG
10487
              && (GET_MODE_SIZE (GET_MODE (inner_op0))
10488
                  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0))))
10489
              && (GET_MODE (SUBREG_REG (inner_op0))
10490
                  == GET_MODE (SUBREG_REG (inner_op1)))
10491
              && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (inner_op0)))
10492
                  <= HOST_BITS_PER_WIDE_INT)
10493
              && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0),
10494
                                             GET_MODE (SUBREG_REG (inner_op0)))))
10495
              && (0 == ((~c1) & nonzero_bits (SUBREG_REG (inner_op1),
10496
                                             GET_MODE (SUBREG_REG (inner_op1))))))
10497
            {
10498
              op0 = SUBREG_REG (inner_op0);
10499
              op1 = SUBREG_REG (inner_op1);
10500
 
10501
              /* The resulting comparison is always unsigned since we masked
10502
                 off the original sign bit.  */
10503
              code = unsigned_condition (code);
10504
 
10505
              changed = 1;
10506
            }
10507
 
10508
          else if (c0 == c1)
10509
            for (tmode = GET_CLASS_NARROWEST_MODE
10510
                 (GET_MODE_CLASS (GET_MODE (op0)));
10511
                 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
10512
              if ((unsigned HOST_WIDE_INT) c0 == GET_MODE_MASK (tmode))
10513
                {
10514
                  op0 = gen_lowpart (tmode, inner_op0);
10515
                  op1 = gen_lowpart (tmode, inner_op1);
10516
                  code = unsigned_condition (code);
10517
                  changed = 1;
10518
                  break;
10519
                }
10520
 
10521
          if (! changed)
10522
            break;
10523
        }
10524
 
10525
      /* If both operands are NOT, we can strip off the outer operation
10526
         and adjust the comparison code for swapped operands; similarly for
10527
         NEG, except that this must be an equality comparison.  */
10528
      else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
10529
               || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
10530
                   && (code == EQ || code == NE)))
10531
        op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
10532
 
10533
      else
10534
        break;
10535
    }
10536
 
10537
  /* If the first operand is a constant, swap the operands and adjust the
10538
     comparison code appropriately, but don't do this if the second operand
10539
     is already a constant integer.  */
10540
  if (swap_commutative_operands_p (op0, op1))
10541
    {
10542
      tem = op0, op0 = op1, op1 = tem;
10543
      code = swap_condition (code);
10544
    }
10545
 
10546
  /* We now enter a loop during which we will try to simplify the comparison.
10547
     For the most part, we only are concerned with comparisons with zero,
10548
     but some things may really be comparisons with zero but not start
10549
     out looking that way.  */
10550
 
10551
  while (CONST_INT_P (op1))
10552
    {
10553
      enum machine_mode mode = GET_MODE (op0);
10554
      unsigned int mode_width = GET_MODE_BITSIZE (mode);
10555
      unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
10556
      int equality_comparison_p;
10557
      int sign_bit_comparison_p;
10558
      int unsigned_comparison_p;
10559
      HOST_WIDE_INT const_op;
10560
 
10561
      /* We only want to handle integral modes.  This catches VOIDmode,
10562
         CCmode, and the floating-point modes.  An exception is that we
10563
         can handle VOIDmode if OP0 is a COMPARE or a comparison
10564
         operation.  */
10565
 
10566
      if (GET_MODE_CLASS (mode) != MODE_INT
10567
          && ! (mode == VOIDmode
10568
                && (GET_CODE (op0) == COMPARE || COMPARISON_P (op0))))
10569
        break;
10570
 
10571
      /* Get the constant we are comparing against and turn off all bits
10572
         not on in our mode.  */
10573
      const_op = INTVAL (op1);
10574
      if (mode != VOIDmode)
10575
        const_op = trunc_int_for_mode (const_op, mode);
10576
      op1 = GEN_INT (const_op);
10577
 
10578
      /* If we are comparing against a constant power of two and the value
10579
         being compared can only have that single bit nonzero (e.g., it was
10580
         `and'ed with that bit), we can replace this with a comparison
10581
         with zero.  */
10582
      if (const_op
10583
          && (code == EQ || code == NE || code == GE || code == GEU
10584
              || code == LT || code == LTU)
10585
          && mode_width <= HOST_BITS_PER_WIDE_INT
10586
          && exact_log2 (const_op) >= 0
10587
          && nonzero_bits (op0, mode) == (unsigned HOST_WIDE_INT) const_op)
10588
        {
10589
          code = (code == EQ || code == GE || code == GEU ? NE : EQ);
10590
          op1 = const0_rtx, const_op = 0;
10591
        }
10592
 
10593
      /* Similarly, if we are comparing a value known to be either -1 or
10594
 
10595
 
10596
      if (const_op == -1
10597
          && (code == EQ || code == NE || code == GT || code == LE
10598
              || code == GEU || code == LTU)
10599
          && num_sign_bit_copies (op0, mode) == mode_width)
10600
        {
10601
          code = (code == EQ || code == LE || code == GEU ? NE : EQ);
10602
          op1 = const0_rtx, const_op = 0;
10603
        }
10604
 
10605
      /* Do some canonicalizations based on the comparison code.  We prefer
10606
         comparisons against zero and then prefer equality comparisons.
10607
         If we can reduce the size of a constant, we will do that too.  */
10608
 
10609
      switch (code)
10610
        {
10611
        case LT:
10612
          /* < C is equivalent to <= (C - 1) */
10613
          if (const_op > 0)
10614
            {
10615
              const_op -= 1;
10616
              op1 = GEN_INT (const_op);
10617
              code = LE;
10618
              /* ... fall through to LE case below.  */
10619
            }
10620
          else
10621
            break;
10622
 
10623
        case LE:
10624
          /* <= C is equivalent to < (C + 1); we do this for C < 0  */
10625
          if (const_op < 0)
10626
            {
10627
              const_op += 1;
10628
              op1 = GEN_INT (const_op);
10629
              code = LT;
10630
            }
10631
 
10632
          /* If we are doing a <= 0 comparison on a value known to have
10633
             a zero sign bit, we can replace this with == 0.  */
10634
          else if (const_op == 0
10635
                   && mode_width <= HOST_BITS_PER_WIDE_INT
10636
                   && (nonzero_bits (op0, mode)
10637
                       & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
10638
            code = EQ;
10639
          break;
10640
 
10641
        case GE:
10642
          /* >= C is equivalent to > (C - 1).  */
10643
          if (const_op > 0)
10644
            {
10645
              const_op -= 1;
10646
              op1 = GEN_INT (const_op);
10647
              code = GT;
10648
              /* ... fall through to GT below.  */
10649
            }
10650
          else
10651
            break;
10652
 
10653
        case GT:
10654
          /* > C is equivalent to >= (C + 1); we do this for C < 0.  */
10655
          if (const_op < 0)
10656
            {
10657
              const_op += 1;
10658
              op1 = GEN_INT (const_op);
10659
              code = GE;
10660
            }
10661
 
10662
          /* If we are doing a > 0 comparison on a value known to have
10663
             a zero sign bit, we can replace this with != 0.  */
10664
          else if (const_op == 0
10665
                   && mode_width <= HOST_BITS_PER_WIDE_INT
10666
                   && (nonzero_bits (op0, mode)
10667
                       & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
10668
            code = NE;
10669
          break;
10670
 
10671
        case LTU:
10672
          /* < C is equivalent to <= (C - 1).  */
10673
          if (const_op > 0)
10674
            {
10675
              const_op -= 1;
10676
              op1 = GEN_INT (const_op);
10677
              code = LEU;
10678
              /* ... fall through ...  */
10679
            }
10680
 
10681
          /* (unsigned) < 0x80000000 is equivalent to >= 0.  */
10682
          else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10683
                   && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
10684
            {
10685
              const_op = 0, op1 = const0_rtx;
10686
              code = GE;
10687
              break;
10688
            }
10689
          else
10690
            break;
10691
 
10692
        case LEU:
10693
          /* unsigned <= 0 is equivalent to == 0 */
10694
          if (const_op == 0)
10695
            code = EQ;
10696
 
10697
          /* (unsigned) <= 0x7fffffff is equivalent to >= 0.  */
10698
          else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10699
                   && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
10700
            {
10701
              const_op = 0, op1 = const0_rtx;
10702
              code = GE;
10703
            }
10704
          break;
10705
 
10706
        case GEU:
10707
          /* >= C is equivalent to > (C - 1).  */
10708
          if (const_op > 1)
10709
            {
10710
              const_op -= 1;
10711
              op1 = GEN_INT (const_op);
10712
              code = GTU;
10713
              /* ... fall through ...  */
10714
            }
10715
 
10716
          /* (unsigned) >= 0x80000000 is equivalent to < 0.  */
10717
          else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10718
                   && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
10719
            {
10720
              const_op = 0, op1 = const0_rtx;
10721
              code = LT;
10722
              break;
10723
            }
10724
          else
10725
            break;
10726
 
10727
        case GTU:
10728
          /* unsigned > 0 is equivalent to != 0 */
10729
          if (const_op == 0)
10730
            code = NE;
10731
 
10732
          /* (unsigned) > 0x7fffffff is equivalent to < 0.  */
10733
          else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10734
                   && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
10735
            {
10736
              const_op = 0, op1 = const0_rtx;
10737
              code = LT;
10738
            }
10739
          break;
10740
 
10741
        default:
10742
          break;
10743
        }
10744
 
10745
      /* Compute some predicates to simplify code below.  */
10746
 
10747
      equality_comparison_p = (code == EQ || code == NE);
10748
      sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
10749
      unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
10750
                               || code == GEU);
10751
 
10752
      /* If this is a sign bit comparison and we can do arithmetic in
10753
         MODE, say that we will only be needing the sign bit of OP0.  */
10754
      if (sign_bit_comparison_p
10755
          && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10756
        op0 = force_to_mode (op0, mode,
10757
                             ((HOST_WIDE_INT) 1
10758
                              << (GET_MODE_BITSIZE (mode) - 1)),
10759
                             0);
10760
 
10761
      /* Now try cases based on the opcode of OP0.  If none of the cases
10762
         does a "continue", we exit this loop immediately after the
10763
         switch.  */
10764
 
10765
      switch (GET_CODE (op0))
10766
        {
10767
        case ZERO_EXTRACT:
10768
          /* If we are extracting a single bit from a variable position in
10769
             a constant that has only a single bit set and are comparing it
10770
             with zero, we can convert this into an equality comparison
10771
             between the position and the location of the single bit.  */
10772
          /* Except we can't if SHIFT_COUNT_TRUNCATED is set, since we might
10773
             have already reduced the shift count modulo the word size.  */
10774
          if (!SHIFT_COUNT_TRUNCATED
10775
              && CONST_INT_P (XEXP (op0, 0))
10776
              && XEXP (op0, 1) == const1_rtx
10777
              && equality_comparison_p && const_op == 0
10778
              && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
10779
            {
10780
              if (BITS_BIG_ENDIAN)
10781
                {
10782
                  enum machine_mode new_mode
10783
                    = mode_for_extraction (EP_extzv, 1);
10784
                  if (new_mode == MAX_MACHINE_MODE)
10785
                    i = BITS_PER_WORD - 1 - i;
10786
                  else
10787
                    {
10788
                      mode = new_mode;
10789
                      i = (GET_MODE_BITSIZE (mode) - 1 - i);
10790
                    }
10791
                }
10792
 
10793
              op0 = XEXP (op0, 2);
10794
              op1 = GEN_INT (i);
10795
              const_op = i;
10796
 
10797
              /* Result is nonzero iff shift count is equal to I.  */
10798
              code = reverse_condition (code);
10799
              continue;
10800
            }
10801
 
10802
          /* ... fall through ...  */
10803
 
10804
        case SIGN_EXTRACT:
10805
          tem = expand_compound_operation (op0);
10806
          if (tem != op0)
10807
            {
10808
              op0 = tem;
10809
              continue;
10810
            }
10811
          break;
10812
 
10813
        case NOT:
10814
          /* If testing for equality, we can take the NOT of the constant.  */
10815
          if (equality_comparison_p
10816
              && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
10817
            {
10818
              op0 = XEXP (op0, 0);
10819
              op1 = tem;
10820
              continue;
10821
            }
10822
 
10823
          /* If just looking at the sign bit, reverse the sense of the
10824
             comparison.  */
10825
          if (sign_bit_comparison_p)
10826
            {
10827
              op0 = XEXP (op0, 0);
10828
              code = (code == GE ? LT : GE);
10829
              continue;
10830
            }
10831
          break;
10832
 
10833
        case NEG:
10834
          /* If testing for equality, we can take the NEG of the constant.  */
10835
          if (equality_comparison_p
10836
              && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
10837
            {
10838
              op0 = XEXP (op0, 0);
10839
              op1 = tem;
10840
              continue;
10841
            }
10842
 
10843
          /* The remaining cases only apply to comparisons with zero.  */
10844
          if (const_op != 0)
10845
            break;
10846
 
10847
          /* When X is ABS or is known positive,
10848
             (neg X) is < 0 if and only if X != 0.  */
10849
 
10850
          if (sign_bit_comparison_p
10851
              && (GET_CODE (XEXP (op0, 0)) == ABS
10852
                  || (mode_width <= HOST_BITS_PER_WIDE_INT
10853
                      && (nonzero_bits (XEXP (op0, 0), mode)
10854
                          & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
10855
            {
10856
              op0 = XEXP (op0, 0);
10857
              code = (code == LT ? NE : EQ);
10858
              continue;
10859
            }
10860
 
10861
          /* If we have NEG of something whose two high-order bits are the
10862
             same, we know that "(-a) < 0" is equivalent to "a > 0".  */
10863
          if (num_sign_bit_copies (op0, mode) >= 2)
10864
            {
10865
              op0 = XEXP (op0, 0);
10866
              code = swap_condition (code);
10867
              continue;
10868
            }
10869
          break;
10870
 
10871
        case ROTATE:
10872
          /* If we are testing equality and our count is a constant, we
10873
             can perform the inverse operation on our RHS.  */
10874
          if (equality_comparison_p && CONST_INT_P (XEXP (op0, 1))
10875
              && (tem = simplify_binary_operation (ROTATERT, mode,
10876
                                                   op1, XEXP (op0, 1))) != 0)
10877
            {
10878
              op0 = XEXP (op0, 0);
10879
              op1 = tem;
10880
              continue;
10881
            }
10882
 
10883
          /* If we are doing a < 0 or >= 0 comparison, it means we are testing
10884
             a particular bit.  Convert it to an AND of a constant of that
10885
             bit.  This will be converted into a ZERO_EXTRACT.  */
10886
          if (const_op == 0 && sign_bit_comparison_p
10887
              && CONST_INT_P (XEXP (op0, 1))
10888
              && mode_width <= HOST_BITS_PER_WIDE_INT)
10889
            {
10890
              op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10891
                                            ((HOST_WIDE_INT) 1
10892
                                             << (mode_width - 1
10893
                                                 - INTVAL (XEXP (op0, 1)))));
10894
              code = (code == LT ? NE : EQ);
10895
              continue;
10896
            }
10897
 
10898
          /* Fall through.  */
10899
 
10900
        case ABS:
10901
          /* ABS is ignorable inside an equality comparison with zero.  */
10902
          if (const_op == 0 && equality_comparison_p)
10903
            {
10904
              op0 = XEXP (op0, 0);
10905
              continue;
10906
            }
10907
          break;
10908
 
10909
        case SIGN_EXTEND:
10910
          /* Can simplify (compare (zero/sign_extend FOO) CONST) to
10911
             (compare FOO CONST) if CONST fits in FOO's mode and we
10912
             are either testing inequality or have an unsigned
10913
             comparison with ZERO_EXTEND or a signed comparison with
10914
             SIGN_EXTEND.  But don't do it if we don't have a compare
10915
             insn of the given mode, since we'd have to revert it
10916
             later on, and then we wouldn't know whether to sign- or
10917
             zero-extend.  */
10918
          mode = GET_MODE (XEXP (op0, 0));
10919
          if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
10920
              && ! unsigned_comparison_p
10921
              && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10922
              && ((unsigned HOST_WIDE_INT) const_op
10923
                  < (((unsigned HOST_WIDE_INT) 1
10924
                      << (GET_MODE_BITSIZE (mode) - 1))))
10925
              && have_insn_for (COMPARE, mode))
10926
            {
10927
              op0 = XEXP (op0, 0);
10928
              continue;
10929
            }
10930
          break;
10931
 
10932
        case SUBREG:
10933
          /* Check for the case where we are comparing A - C1 with C2, that is
10934
 
10935
               (subreg:MODE (plus (A) (-C1))) op (C2)
10936
 
10937
             with C1 a constant, and try to lift the SUBREG, i.e. to do the
10938
             comparison in the wider mode.  One of the following two conditions
10939
             must be true in order for this to be valid:
10940
 
10941
               1. The mode extension results in the same bit pattern being added
10942
                  on both sides and the comparison is equality or unsigned.  As
10943
                  C2 has been truncated to fit in MODE, the pattern can only be
10944
                  all 0s or all 1s.
10945
 
10946
               2. The mode extension results in the sign bit being copied on
10947
                  each side.
10948
 
10949
             The difficulty here is that we have predicates for A but not for
10950
             (A - C1) so we need to check that C1 is within proper bounds so
10951
             as to perturbate A as little as possible.  */
10952
 
10953
          if (mode_width <= HOST_BITS_PER_WIDE_INT
10954
              && subreg_lowpart_p (op0)
10955
              && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) > mode_width
10956
              && GET_CODE (SUBREG_REG (op0)) == PLUS
10957
              && CONST_INT_P (XEXP (SUBREG_REG (op0), 1)))
10958
            {
10959
              enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
10960
              rtx a = XEXP (SUBREG_REG (op0), 0);
10961
              HOST_WIDE_INT c1 = -INTVAL (XEXP (SUBREG_REG (op0), 1));
10962
 
10963
              if ((c1 > 0
10964
                   && (unsigned HOST_WIDE_INT) c1
10965
                       < (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)
10966
                   && (equality_comparison_p || unsigned_comparison_p)
10967
                   /* (A - C1) zero-extends if it is positive and sign-extends
10968
                      if it is negative, C2 both zero- and sign-extends.  */
10969
                   && ((0 == (nonzero_bits (a, inner_mode)
10970
                              & ~GET_MODE_MASK (mode))
10971
                        && const_op >= 0)
10972
                       /* (A - C1) sign-extends if it is positive and 1-extends
10973
                          if it is negative, C2 both sign- and 1-extends.  */
10974
                       || (num_sign_bit_copies (a, inner_mode)
10975
                           > (unsigned int) (GET_MODE_BITSIZE (inner_mode)
10976
                                             - mode_width)
10977
                           && const_op < 0)))
10978
                  || ((unsigned HOST_WIDE_INT) c1
10979
                       < (unsigned HOST_WIDE_INT) 1 << (mode_width - 2)
10980
                      /* (A - C1) always sign-extends, like C2.  */
10981
                      && num_sign_bit_copies (a, inner_mode)
10982
                         > (unsigned int) (GET_MODE_BITSIZE (inner_mode)
10983
                                           - (mode_width - 1))))
10984
                {
10985
                  op0 = SUBREG_REG (op0);
10986
                  continue;
10987
                }
10988
            }
10989
 
10990
          /* If the inner mode is narrower and we are extracting the low part,
10991
             we can treat the SUBREG as if it were a ZERO_EXTEND.  */
10992
          if (subreg_lowpart_p (op0)
10993
              && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
10994
            /* Fall through */ ;
10995
          else
10996
            break;
10997
 
10998
          /* ... fall through ...  */
10999
 
11000
        case ZERO_EXTEND:
11001
          mode = GET_MODE (XEXP (op0, 0));
11002
          if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
11003
              && (unsigned_comparison_p || equality_comparison_p)
11004
              && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
11005
              && ((unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode))
11006
              && have_insn_for (COMPARE, mode))
11007
            {
11008
              op0 = XEXP (op0, 0);
11009
              continue;
11010
            }
11011
          break;
11012
 
11013
        case PLUS:
11014
          /* (eq (plus X A) B) -> (eq X (minus B A)).  We can only do
11015
             this for equality comparisons due to pathological cases involving
11016
             overflows.  */
11017
          if (equality_comparison_p
11018
              && 0 != (tem = simplify_binary_operation (MINUS, mode,
11019
                                                        op1, XEXP (op0, 1))))
11020
            {
11021
              op0 = XEXP (op0, 0);
11022
              op1 = tem;
11023
              continue;
11024
            }
11025
 
11026
          /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0.  */
11027
          if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
11028
              && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
11029
            {
11030
              op0 = XEXP (XEXP (op0, 0), 0);
11031
              code = (code == LT ? EQ : NE);
11032
              continue;
11033
            }
11034
          break;
11035
 
11036
        case MINUS:
11037
          /* We used to optimize signed comparisons against zero, but that
11038
             was incorrect.  Unsigned comparisons against zero (GTU, LEU)
11039
             arrive here as equality comparisons, or (GEU, LTU) are
11040
             optimized away.  No need to special-case them.  */
11041
 
11042
          /* (eq (minus A B) C) -> (eq A (plus B C)) or
11043
             (eq B (minus A C)), whichever simplifies.  We can only do
11044
             this for equality comparisons due to pathological cases involving
11045
             overflows.  */
11046
          if (equality_comparison_p
11047
              && 0 != (tem = simplify_binary_operation (PLUS, mode,
11048
                                                        XEXP (op0, 1), op1)))
11049
            {
11050
              op0 = XEXP (op0, 0);
11051
              op1 = tem;
11052
              continue;
11053
            }
11054
 
11055
          if (equality_comparison_p
11056
              && 0 != (tem = simplify_binary_operation (MINUS, mode,
11057
                                                        XEXP (op0, 0), op1)))
11058
            {
11059
              op0 = XEXP (op0, 1);
11060
              op1 = tem;
11061
              continue;
11062
            }
11063
 
11064
          /* The sign bit of (minus (ashiftrt X C) X), where C is the number
11065
             of bits in X minus 1, is one iff X > 0.  */
11066
          if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
11067
              && CONST_INT_P (XEXP (XEXP (op0, 0), 1))
11068
              && (unsigned HOST_WIDE_INT) INTVAL (XEXP (XEXP (op0, 0), 1))
11069
                 == mode_width - 1
11070
              && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
11071
            {
11072
              op0 = XEXP (op0, 1);
11073
              code = (code == GE ? LE : GT);
11074
              continue;
11075
            }
11076
          break;
11077
 
11078
        case XOR:
11079
          /* (eq (xor A B) C) -> (eq A (xor B C)).  This is a simplification
11080
             if C is zero or B is a constant.  */
11081
          if (equality_comparison_p
11082
              && 0 != (tem = simplify_binary_operation (XOR, mode,
11083
                                                        XEXP (op0, 1), op1)))
11084
            {
11085
              op0 = XEXP (op0, 0);
11086
              op1 = tem;
11087
              continue;
11088
            }
11089
          break;
11090
 
11091
        case EQ:  case NE:
11092
        case UNEQ:  case LTGT:
11093
        case LT:  case LTU:  case UNLT:  case LE:  case LEU:  case UNLE:
11094
        case GT:  case GTU:  case UNGT:  case GE:  case GEU:  case UNGE:
11095
        case UNORDERED: case ORDERED:
11096
          /* We can't do anything if OP0 is a condition code value, rather
11097
             than an actual data value.  */
11098
          if (const_op != 0
11099
              || CC0_P (XEXP (op0, 0))
11100
              || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
11101
            break;
11102
 
11103
          /* Get the two operands being compared.  */
11104
          if (GET_CODE (XEXP (op0, 0)) == COMPARE)
11105
            tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
11106
          else
11107
            tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
11108
 
11109
          /* Check for the cases where we simply want the result of the
11110
             earlier test or the opposite of that result.  */
11111
          if (code == NE || code == EQ
11112
              || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
11113
                  && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
11114
                  && (STORE_FLAG_VALUE
11115
                      & (((HOST_WIDE_INT) 1
11116
                          << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
11117
                  && (code == LT || code == GE)))
11118
            {
11119
              enum rtx_code new_code;
11120
              if (code == LT || code == NE)
11121
                new_code = GET_CODE (op0);
11122
              else
11123
                new_code = reversed_comparison_code (op0, NULL);
11124
 
11125
              if (new_code != UNKNOWN)
11126
                {
11127
                  code = new_code;
11128
                  op0 = tem;
11129
                  op1 = tem1;
11130
                  continue;
11131
                }
11132
            }
11133
          break;
11134
 
11135
        case IOR:
11136
          /* The sign bit of (ior (plus X (const_int -1)) X) is nonzero
11137
             iff X <= 0.  */
11138
          if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
11139
              && XEXP (XEXP (op0, 0), 1) == constm1_rtx
11140
              && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
11141
            {
11142
              op0 = XEXP (op0, 1);
11143
              code = (code == GE ? GT : LE);
11144
              continue;
11145
            }
11146
          break;
11147
 
11148
        case AND:
11149
          /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1).  This
11150
             will be converted to a ZERO_EXTRACT later.  */
11151
          if (const_op == 0 && equality_comparison_p
11152
              && GET_CODE (XEXP (op0, 0)) == ASHIFT
11153
              && XEXP (XEXP (op0, 0), 0) == const1_rtx)
11154
            {
11155
              op0 = simplify_and_const_int
11156
                (NULL_RTX, mode, gen_rtx_LSHIFTRT (mode,
11157
                                                   XEXP (op0, 1),
11158
                                                   XEXP (XEXP (op0, 0), 1)),
11159
                 (HOST_WIDE_INT) 1);
11160
              continue;
11161
            }
11162
 
11163
          /* If we are comparing (and (lshiftrt X C1) C2) for equality with
11164
             zero and X is a comparison and C1 and C2 describe only bits set
11165
             in STORE_FLAG_VALUE, we can compare with X.  */
11166
          if (const_op == 0 && equality_comparison_p
11167
              && mode_width <= HOST_BITS_PER_WIDE_INT
11168
              && CONST_INT_P (XEXP (op0, 1))
11169
              && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
11170
              && CONST_INT_P (XEXP (XEXP (op0, 0), 1))
11171
              && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
11172
              && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
11173
            {
11174
              mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
11175
                      << INTVAL (XEXP (XEXP (op0, 0), 1)));
11176
              if ((~STORE_FLAG_VALUE & mask) == 0
11177
                  && (COMPARISON_P (XEXP (XEXP (op0, 0), 0))
11178
                      || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
11179
                          && COMPARISON_P (tem))))
11180
                {
11181
                  op0 = XEXP (XEXP (op0, 0), 0);
11182
                  continue;
11183
                }
11184
            }
11185
 
11186
          /* If we are doing an equality comparison of an AND of a bit equal
11187
             to the sign bit, replace this with a LT or GE comparison of
11188
             the underlying value.  */
11189
          if (equality_comparison_p
11190
              && const_op == 0
11191
              && CONST_INT_P (XEXP (op0, 1))
11192
              && mode_width <= HOST_BITS_PER_WIDE_INT
11193
              && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
11194
                  == (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)))
11195
            {
11196
              op0 = XEXP (op0, 0);
11197
              code = (code == EQ ? GE : LT);
11198
              continue;
11199
            }
11200
 
11201
          /* If this AND operation is really a ZERO_EXTEND from a narrower
11202
             mode, the constant fits within that mode, and this is either an
11203
             equality or unsigned comparison, try to do this comparison in
11204
             the narrower mode.
11205
 
11206
             Note that in:
11207
 
11208
             (ne:DI (and:DI (reg:DI 4) (const_int 0xffffffff)) (const_int 0))
11209
             -> (ne:DI (reg:SI 4) (const_int 0))
11210
 
11211
             unless TRULY_NOOP_TRUNCATION allows it or the register is
11212
             known to hold a value of the required mode the
11213
             transformation is invalid.  */
11214
          if ((equality_comparison_p || unsigned_comparison_p)
11215
              && CONST_INT_P (XEXP (op0, 1))
11216
              && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
11217
                                   & GET_MODE_MASK (mode))
11218
                                  + 1)) >= 0
11219
              && const_op >> i == 0
11220
              && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode
11221
              && (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (tmode),
11222
                                         GET_MODE_BITSIZE (GET_MODE (op0)))
11223
                  || (REG_P (XEXP (op0, 0))
11224
                      && reg_truncated_to_mode (tmode, XEXP (op0, 0)))))
11225
            {
11226
              op0 = gen_lowpart (tmode, XEXP (op0, 0));
11227
              continue;
11228
            }
11229
 
11230
          /* If this is (and:M1 (subreg:M2 X 0) (const_int C1)) where C1
11231
             fits in both M1 and M2 and the SUBREG is either paradoxical
11232
             or represents the low part, permute the SUBREG and the AND
11233
             and try again.  */
11234
          if (GET_CODE (XEXP (op0, 0)) == SUBREG)
11235
            {
11236
              unsigned HOST_WIDE_INT c1;
11237
              tmode = GET_MODE (SUBREG_REG (XEXP (op0, 0)));
11238
              /* Require an integral mode, to avoid creating something like
11239
                 (AND:SF ...).  */
11240
              if (SCALAR_INT_MODE_P (tmode)
11241
                  /* It is unsafe to commute the AND into the SUBREG if the
11242
                     SUBREG is paradoxical and WORD_REGISTER_OPERATIONS is
11243
                     not defined.  As originally written the upper bits
11244
                     have a defined value due to the AND operation.
11245
                     However, if we commute the AND inside the SUBREG then
11246
                     they no longer have defined values and the meaning of
11247
                     the code has been changed.  */
11248
                  && (0
11249
#ifdef WORD_REGISTER_OPERATIONS
11250
                      || (mode_width > GET_MODE_BITSIZE (tmode)
11251
                          && mode_width <= BITS_PER_WORD)
11252
#endif
11253
                      || (mode_width <= GET_MODE_BITSIZE (tmode)
11254
                          && subreg_lowpart_p (XEXP (op0, 0))))
11255
                  && CONST_INT_P (XEXP (op0, 1))
11256
                  && mode_width <= HOST_BITS_PER_WIDE_INT
11257
                  && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT
11258
                  && ((c1 = INTVAL (XEXP (op0, 1))) & ~mask) == 0
11259
                  && (c1 & ~GET_MODE_MASK (tmode)) == 0
11260
                  && c1 != mask
11261
                  && c1 != GET_MODE_MASK (tmode))
11262
                {
11263
                  op0 = simplify_gen_binary (AND, tmode,
11264
                                             SUBREG_REG (XEXP (op0, 0)),
11265
                                             gen_int_mode (c1, tmode));
11266
                  op0 = gen_lowpart (mode, op0);
11267
                  continue;
11268
                }
11269
            }
11270
 
11271
          /* Convert (ne (and (not X) 1) 0) to (eq (and X 1) 0).  */
11272
          if (const_op == 0 && equality_comparison_p
11273
              && XEXP (op0, 1) == const1_rtx
11274
              && GET_CODE (XEXP (op0, 0)) == NOT)
11275
            {
11276
              op0 = simplify_and_const_int
11277
                (NULL_RTX, mode, XEXP (XEXP (op0, 0), 0), (HOST_WIDE_INT) 1);
11278
              code = (code == NE ? EQ : NE);
11279
              continue;
11280
            }
11281
 
11282
          /* Convert (ne (and (lshiftrt (not X)) 1) 0) to
11283
             (eq (and (lshiftrt X) 1) 0).
11284
             Also handle the case where (not X) is expressed using xor.  */
11285
          if (const_op == 0 && equality_comparison_p
11286
              && XEXP (op0, 1) == const1_rtx
11287
              && GET_CODE (XEXP (op0, 0)) == LSHIFTRT)
11288
            {
11289
              rtx shift_op = XEXP (XEXP (op0, 0), 0);
11290
              rtx shift_count = XEXP (XEXP (op0, 0), 1);
11291
 
11292
              if (GET_CODE (shift_op) == NOT
11293
                  || (GET_CODE (shift_op) == XOR
11294
                      && CONST_INT_P (XEXP (shift_op, 1))
11295
                      && CONST_INT_P (shift_count)
11296
                      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
11297
                      && (INTVAL (XEXP (shift_op, 1))
11298
                          == (HOST_WIDE_INT) 1 << INTVAL (shift_count))))
11299
                {
11300
                  op0 = simplify_and_const_int
11301
                    (NULL_RTX, mode,
11302
                     gen_rtx_LSHIFTRT (mode, XEXP (shift_op, 0), shift_count),
11303
                     (HOST_WIDE_INT) 1);
11304
                  code = (code == NE ? EQ : NE);
11305
                  continue;
11306
                }
11307
            }
11308
          break;
11309
 
11310
        case ASHIFT:
11311
          /* If we have (compare (ashift FOO N) (const_int C)) and
11312
             the high order N bits of FOO (N+1 if an inequality comparison)
11313
             are known to be zero, we can do this by comparing FOO with C
11314
             shifted right N bits so long as the low-order N bits of C are
11315
             zero.  */
11316
          if (CONST_INT_P (XEXP (op0, 1))
11317
              && INTVAL (XEXP (op0, 1)) >= 0
11318
              && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
11319
                  < HOST_BITS_PER_WIDE_INT)
11320
              && ((const_op
11321
                   & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
11322
              && mode_width <= HOST_BITS_PER_WIDE_INT
11323
              && (nonzero_bits (XEXP (op0, 0), mode)
11324
                  & ~(mask >> (INTVAL (XEXP (op0, 1))
11325
                               + ! equality_comparison_p))) == 0)
11326
            {
11327
              /* We must perform a logical shift, not an arithmetic one,
11328
                 as we want the top N bits of C to be zero.  */
11329
              unsigned HOST_WIDE_INT temp = const_op & GET_MODE_MASK (mode);
11330
 
11331
              temp >>= INTVAL (XEXP (op0, 1));
11332
              op1 = gen_int_mode (temp, mode);
11333
              op0 = XEXP (op0, 0);
11334
              continue;
11335
            }
11336
 
11337
          /* If we are doing a sign bit comparison, it means we are testing
11338
             a particular bit.  Convert it to the appropriate AND.  */
11339
          if (sign_bit_comparison_p && CONST_INT_P (XEXP (op0, 1))
11340
              && mode_width <= HOST_BITS_PER_WIDE_INT)
11341
            {
11342
              op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
11343
                                            ((HOST_WIDE_INT) 1
11344
                                             << (mode_width - 1
11345
                                                 - INTVAL (XEXP (op0, 1)))));
11346
              code = (code == LT ? NE : EQ);
11347
              continue;
11348
            }
11349
 
11350
          /* If this an equality comparison with zero and we are shifting
11351
             the low bit to the sign bit, we can convert this to an AND of the
11352
             low-order bit.  */
11353
          if (const_op == 0 && equality_comparison_p
11354
              && CONST_INT_P (XEXP (op0, 1))
11355
              && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
11356
                 == mode_width - 1)
11357
            {
11358
              op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
11359
                                            (HOST_WIDE_INT) 1);
11360
              continue;
11361
            }
11362
          break;
11363
 
11364
        case ASHIFTRT:
11365
          /* If this is an equality comparison with zero, we can do this
11366
             as a logical shift, which might be much simpler.  */
11367
          if (equality_comparison_p && const_op == 0
11368
              && CONST_INT_P (XEXP (op0, 1)))
11369
            {
11370
              op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
11371
                                          XEXP (op0, 0),
11372
                                          INTVAL (XEXP (op0, 1)));
11373
              continue;
11374
            }
11375
 
11376
          /* If OP0 is a sign extension and CODE is not an unsigned comparison,
11377
             do the comparison in a narrower mode.  */
11378
          if (! unsigned_comparison_p
11379
              && CONST_INT_P (XEXP (op0, 1))
11380
              && GET_CODE (XEXP (op0, 0)) == ASHIFT
11381
              && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
11382
              && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
11383
                                         MODE_INT, 1)) != BLKmode
11384
              && (((unsigned HOST_WIDE_INT) const_op
11385
                   + (GET_MODE_MASK (tmode) >> 1) + 1)
11386
                  <= GET_MODE_MASK (tmode)))
11387
            {
11388
              op0 = gen_lowpart (tmode, XEXP (XEXP (op0, 0), 0));
11389
              continue;
11390
            }
11391
 
11392
          /* Likewise if OP0 is a PLUS of a sign extension with a
11393
             constant, which is usually represented with the PLUS
11394
             between the shifts.  */
11395
          if (! unsigned_comparison_p
11396
              && CONST_INT_P (XEXP (op0, 1))
11397
              && GET_CODE (XEXP (op0, 0)) == PLUS
11398
              && CONST_INT_P (XEXP (XEXP (op0, 0), 1))
11399
              && GET_CODE (XEXP (XEXP (op0, 0), 0)) == ASHIFT
11400
              && XEXP (op0, 1) == XEXP (XEXP (XEXP (op0, 0), 0), 1)
11401
              && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
11402
                                         MODE_INT, 1)) != BLKmode
11403
              && (((unsigned HOST_WIDE_INT) const_op
11404
                   + (GET_MODE_MASK (tmode) >> 1) + 1)
11405
                  <= GET_MODE_MASK (tmode)))
11406
            {
11407
              rtx inner = XEXP (XEXP (XEXP (op0, 0), 0), 0);
11408
              rtx add_const = XEXP (XEXP (op0, 0), 1);
11409
              rtx new_const = simplify_gen_binary (ASHIFTRT, GET_MODE (op0),
11410
                                                   add_const, XEXP (op0, 1));
11411
 
11412
              op0 = simplify_gen_binary (PLUS, tmode,
11413
                                         gen_lowpart (tmode, inner),
11414
                                         new_const);
11415
              continue;
11416
            }
11417
 
11418
          /* ... fall through ...  */
11419
        case LSHIFTRT:
11420
          /* If we have (compare (xshiftrt FOO N) (const_int C)) and
11421
             the low order N bits of FOO are known to be zero, we can do this
11422
             by comparing FOO with C shifted left N bits so long as no
11423
             overflow occurs.  */
11424
          if (CONST_INT_P (XEXP (op0, 1))
11425
              && INTVAL (XEXP (op0, 1)) >= 0
11426
              && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
11427
              && mode_width <= HOST_BITS_PER_WIDE_INT
11428
              && (nonzero_bits (XEXP (op0, 0), mode)
11429
                  & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
11430
              && (((unsigned HOST_WIDE_INT) const_op
11431
                   + (GET_CODE (op0) != LSHIFTRT
11432
                      ? ((GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1)) >> 1)
11433
                         + 1)
11434
                      : 0))
11435
                  <= GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1))))
11436
            {
11437
              /* If the shift was logical, then we must make the condition
11438
                 unsigned.  */
11439
              if (GET_CODE (op0) == LSHIFTRT)
11440
                code = unsigned_condition (code);
11441
 
11442
              const_op <<= INTVAL (XEXP (op0, 1));
11443
              op1 = GEN_INT (const_op);
11444
              op0 = XEXP (op0, 0);
11445
              continue;
11446
            }
11447
 
11448
          /* If we are using this shift to extract just the sign bit, we
11449
             can replace this with an LT or GE comparison.  */
11450
          if (const_op == 0
11451
              && (equality_comparison_p || sign_bit_comparison_p)
11452
              && CONST_INT_P (XEXP (op0, 1))
11453
              && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
11454
                 == mode_width - 1)
11455
            {
11456
              op0 = XEXP (op0, 0);
11457
              code = (code == NE || code == GT ? LT : GE);
11458
              continue;
11459
            }
11460
          break;
11461
 
11462
        default:
11463
          break;
11464
        }
11465
 
11466
      break;
11467
    }
11468
 
11469
  /* Now make any compound operations involved in this comparison.  Then,
11470
     check for an outmost SUBREG on OP0 that is not doing anything or is
11471
     paradoxical.  The latter transformation must only be performed when
11472
     it is known that the "extra" bits will be the same in op0 and op1 or
11473
     that they don't matter.  There are three cases to consider:
11474
 
11475
     1. SUBREG_REG (op0) is a register.  In this case the bits are don't
11476
     care bits and we can assume they have any convenient value.  So
11477
     making the transformation is safe.
11478
 
11479
     2. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is not defined.
11480
     In this case the upper bits of op0 are undefined.  We should not make
11481
     the simplification in that case as we do not know the contents of
11482
     those bits.
11483
 
11484
     3. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is defined and not
11485
     UNKNOWN.  In that case we know those bits are zeros or ones.  We must
11486
     also be sure that they are the same as the upper bits of op1.
11487
 
11488
     We can never remove a SUBREG for a non-equality comparison because
11489
     the sign bit is in a different place in the underlying object.  */
11490
 
11491
  op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
11492
  op1 = make_compound_operation (op1, SET);
11493
 
11494
  if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
11495
      && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
11496
      && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op0))) == MODE_INT
11497
      && (code == NE || code == EQ))
11498
    {
11499
      if (GET_MODE_SIZE (GET_MODE (op0))
11500
          > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))
11501
        {
11502
          /* For paradoxical subregs, allow case 1 as above.  Case 3 isn't
11503
             implemented.  */
11504
          if (REG_P (SUBREG_REG (op0)))
11505
            {
11506
              op0 = SUBREG_REG (op0);
11507
              op1 = gen_lowpart (GET_MODE (op0), op1);
11508
            }
11509
        }
11510
      else if ((GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
11511
                <= HOST_BITS_PER_WIDE_INT)
11512
               && (nonzero_bits (SUBREG_REG (op0),
11513
                                 GET_MODE (SUBREG_REG (op0)))
11514
                   & ~GET_MODE_MASK (GET_MODE (op0))) == 0)
11515
        {
11516
          tem = gen_lowpart (GET_MODE (SUBREG_REG (op0)), op1);
11517
 
11518
          if ((nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
11519
               & ~GET_MODE_MASK (GET_MODE (op0))) == 0)
11520
            op0 = SUBREG_REG (op0), op1 = tem;
11521
        }
11522
    }
11523
 
11524
  /* We now do the opposite procedure: Some machines don't have compare
11525
     insns in all modes.  If OP0's mode is an integer mode smaller than a
11526
     word and we can't do a compare in that mode, see if there is a larger
11527
     mode for which we can do the compare.  There are a number of cases in
11528
     which we can use the wider mode.  */
11529
 
11530
  mode = GET_MODE (op0);
11531
  if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
11532
      && GET_MODE_SIZE (mode) < UNITS_PER_WORD
11533
      && ! have_insn_for (COMPARE, mode))
11534
    for (tmode = GET_MODE_WIDER_MODE (mode);
11535
         (tmode != VOIDmode
11536
          && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
11537
         tmode = GET_MODE_WIDER_MODE (tmode))
11538
      if (have_insn_for (COMPARE, tmode))
11539
        {
11540
          int zero_extended;
11541
 
11542
          /* If this is a test for negative, we can make an explicit
11543
             test of the sign bit.  Test this first so we can use
11544
             a paradoxical subreg to extend OP0.  */
11545
 
11546
          if (op1 == const0_rtx && (code == LT || code == GE)
11547
              && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
11548
            {
11549
              op0 = simplify_gen_binary (AND, tmode,
11550
                                         gen_lowpart (tmode, op0),
11551
                                         GEN_INT ((HOST_WIDE_INT) 1
11552
                                                  << (GET_MODE_BITSIZE (mode)
11553
                                                      - 1)));
11554
              code = (code == LT) ? NE : EQ;
11555
              break;
11556
            }
11557
 
11558
          /* If the only nonzero bits in OP0 and OP1 are those in the
11559
             narrower mode and this is an equality or unsigned comparison,
11560
             we can use the wider mode.  Similarly for sign-extended
11561
             values, in which case it is true for all comparisons.  */
11562
          zero_extended = ((code == EQ || code == NE
11563
                            || code == GEU || code == GTU
11564
                            || code == LEU || code == LTU)
11565
                           && (nonzero_bits (op0, tmode)
11566
                               & ~GET_MODE_MASK (mode)) == 0
11567
                           && ((CONST_INT_P (op1)
11568
                                || (nonzero_bits (op1, tmode)
11569
                                    & ~GET_MODE_MASK (mode)) == 0)));
11570
 
11571
          if (zero_extended
11572
              || ((num_sign_bit_copies (op0, tmode)
11573
                   > (unsigned int) (GET_MODE_BITSIZE (tmode)
11574
                                     - GET_MODE_BITSIZE (mode)))
11575
                  && (num_sign_bit_copies (op1, tmode)
11576
                      > (unsigned int) (GET_MODE_BITSIZE (tmode)
11577
                                        - GET_MODE_BITSIZE (mode)))))
11578
            {
11579
              /* If OP0 is an AND and we don't have an AND in MODE either,
11580
                 make a new AND in the proper mode.  */
11581
              if (GET_CODE (op0) == AND
11582
                  && !have_insn_for (AND, mode))
11583
                op0 = simplify_gen_binary (AND, tmode,
11584
                                           gen_lowpart (tmode,
11585
                                                        XEXP (op0, 0)),
11586
                                           gen_lowpart (tmode,
11587
                                                        XEXP (op0, 1)));
11588
              else
11589
                {
11590
                  if (zero_extended)
11591
                    {
11592
                      op0 = simplify_gen_unary (ZERO_EXTEND, tmode, op0, mode);
11593
                      op1 = simplify_gen_unary (ZERO_EXTEND, tmode, op1, mode);
11594
                    }
11595
                  else
11596
                    {
11597
                      op0 = simplify_gen_unary (SIGN_EXTEND, tmode, op0, mode);
11598
                      op1 = simplify_gen_unary (SIGN_EXTEND, tmode, op1, mode);
11599
                    }
11600
                  break;
11601
                }
11602
            }
11603
        }
11604
 
11605
#ifdef CANONICALIZE_COMPARISON
11606
  /* If this machine only supports a subset of valid comparisons, see if we
11607
     can convert an unsupported one into a supported one.  */
11608
  CANONICALIZE_COMPARISON (code, op0, op1);
11609
#endif
11610
 
11611
  *pop0 = op0;
11612
  *pop1 = op1;
11613
 
11614
  return code;
11615
}
11616
 
11617
/* Utility function for record_value_for_reg.  Count number of
11618
   rtxs in X.  */
11619
static int
11620
count_rtxs (rtx x)
11621
{
11622
  enum rtx_code code = GET_CODE (x);
11623
  const char *fmt;
11624
  int i, j, ret = 1;
11625
 
11626
  if (GET_RTX_CLASS (code) == '2'
11627
      || GET_RTX_CLASS (code) == 'c')
11628
    {
11629
      rtx x0 = XEXP (x, 0);
11630
      rtx x1 = XEXP (x, 1);
11631
 
11632
      if (x0 == x1)
11633
        return 1 + 2 * count_rtxs (x0);
11634
 
11635
      if ((GET_RTX_CLASS (GET_CODE (x1)) == '2'
11636
           || GET_RTX_CLASS (GET_CODE (x1)) == 'c')
11637
          && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
11638
        return 2 + 2 * count_rtxs (x0)
11639
               + count_rtxs (x == XEXP (x1, 0)
11640
                             ? XEXP (x1, 1) : XEXP (x1, 0));
11641
 
11642
      if ((GET_RTX_CLASS (GET_CODE (x0)) == '2'
11643
           || GET_RTX_CLASS (GET_CODE (x0)) == 'c')
11644
          && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
11645
        return 2 + 2 * count_rtxs (x1)
11646
               + count_rtxs (x == XEXP (x0, 0)
11647
                             ? XEXP (x0, 1) : XEXP (x0, 0));
11648
    }
11649
 
11650
  fmt = GET_RTX_FORMAT (code);
11651
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11652
    if (fmt[i] == 'e')
11653
      ret += count_rtxs (XEXP (x, i));
11654
    else if (fmt[i] == 'E')
11655
      for (j = 0; j < XVECLEN (x, i); j++)
11656
        ret += count_rtxs (XVECEXP (x, i, j));
11657
 
11658
  return ret;
11659
}
11660
 
11661
/* Utility function for following routine.  Called when X is part of a value
11662
   being stored into last_set_value.  Sets last_set_table_tick
11663
   for each register mentioned.  Similar to mention_regs in cse.c  */
11664
 
11665
static void
11666
update_table_tick (rtx x)
11667
{
11668
  enum rtx_code code = GET_CODE (x);
11669
  const char *fmt = GET_RTX_FORMAT (code);
11670
  int i, j;
11671
 
11672
  if (code == REG)
11673
    {
11674
      unsigned int regno = REGNO (x);
11675
      unsigned int endregno = END_REGNO (x);
11676
      unsigned int r;
11677
 
11678
      for (r = regno; r < endregno; r++)
11679
        {
11680
          reg_stat_type *rsp = VEC_index (reg_stat_type, reg_stat, r);
11681
          rsp->last_set_table_tick = label_tick;
11682
        }
11683
 
11684
      return;
11685
    }
11686
 
11687
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11688
    if (fmt[i] == 'e')
11689
      {
11690
        /* Check for identical subexpressions.  If x contains
11691
           identical subexpression we only have to traverse one of
11692
           them.  */
11693
        if (i == 0 && ARITHMETIC_P (x))
11694
          {
11695
            /* Note that at this point x1 has already been
11696
               processed.  */
11697
            rtx x0 = XEXP (x, 0);
11698
            rtx x1 = XEXP (x, 1);
11699
 
11700
            /* If x0 and x1 are identical then there is no need to
11701
               process x0.  */
11702
            if (x0 == x1)
11703
              break;
11704
 
11705
            /* If x0 is identical to a subexpression of x1 then while
11706
               processing x1, x0 has already been processed.  Thus we
11707
               are done with x.  */
11708
            if (ARITHMETIC_P (x1)
11709
                && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
11710
              break;
11711
 
11712
            /* If x1 is identical to a subexpression of x0 then we
11713
               still have to process the rest of x0.  */
11714
            if (ARITHMETIC_P (x0)
11715
                && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
11716
              {
11717
                update_table_tick (XEXP (x0, x1 == XEXP (x0, 0) ? 1 : 0));
11718
                break;
11719
              }
11720
          }
11721
 
11722
        update_table_tick (XEXP (x, i));
11723
      }
11724
    else if (fmt[i] == 'E')
11725
      for (j = 0; j < XVECLEN (x, i); j++)
11726
        update_table_tick (XVECEXP (x, i, j));
11727
}
11728
 
11729
/* Record that REG is set to VALUE in insn INSN.  If VALUE is zero, we
11730
   are saying that the register is clobbered and we no longer know its
11731
   value.  If INSN is zero, don't update reg_stat[].last_set; this is
11732
   only permitted with VALUE also zero and is used to invalidate the
11733
   register.  */
11734
 
11735
static void
11736
record_value_for_reg (rtx reg, rtx insn, rtx value)
11737
{
11738
  unsigned int regno = REGNO (reg);
11739
  unsigned int endregno = END_REGNO (reg);
11740
  unsigned int i;
11741
  reg_stat_type *rsp;
11742
 
11743
  /* If VALUE contains REG and we have a previous value for REG, substitute
11744
     the previous value.  */
11745
  if (value && insn && reg_overlap_mentioned_p (reg, value))
11746
    {
11747
      rtx tem;
11748
 
11749
      /* Set things up so get_last_value is allowed to see anything set up to
11750
         our insn.  */
11751
      subst_low_luid = DF_INSN_LUID (insn);
11752
      tem = get_last_value (reg);
11753
 
11754
      /* If TEM is simply a binary operation with two CLOBBERs as operands,
11755
         it isn't going to be useful and will take a lot of time to process,
11756
         so just use the CLOBBER.  */
11757
 
11758
      if (tem)
11759
        {
11760
          if (ARITHMETIC_P (tem)
11761
              && GET_CODE (XEXP (tem, 0)) == CLOBBER
11762
              && GET_CODE (XEXP (tem, 1)) == CLOBBER)
11763
            tem = XEXP (tem, 0);
11764
          else if (count_occurrences (value, reg, 1) >= 2)
11765
            {
11766
              /* If there are two or more occurrences of REG in VALUE,
11767
                 prevent the value from growing too much.  */
11768
              if (count_rtxs (tem) > MAX_LAST_VALUE_RTL)
11769
                tem = gen_rtx_CLOBBER (GET_MODE (tem), const0_rtx);
11770
            }
11771
 
11772
          value = replace_rtx (copy_rtx (value), reg, tem);
11773
        }
11774
    }
11775
 
11776
  /* For each register modified, show we don't know its value, that
11777
     we don't know about its bitwise content, that its value has been
11778
     updated, and that we don't know the location of the death of the
11779
     register.  */
11780
  for (i = regno; i < endregno; i++)
11781
    {
11782
      rsp = VEC_index (reg_stat_type, reg_stat, i);
11783
 
11784
      if (insn)
11785
        rsp->last_set = insn;
11786
 
11787
      rsp->last_set_value = 0;
11788
      rsp->last_set_mode = VOIDmode;
11789
      rsp->last_set_nonzero_bits = 0;
11790
      rsp->last_set_sign_bit_copies = 0;
11791
      rsp->last_death = 0;
11792
      rsp->truncated_to_mode = VOIDmode;
11793
    }
11794
 
11795
  /* Mark registers that are being referenced in this value.  */
11796
  if (value)
11797
    update_table_tick (value);
11798
 
11799
  /* Now update the status of each register being set.
11800
     If someone is using this register in this block, set this register
11801
     to invalid since we will get confused between the two lives in this
11802
     basic block.  This makes using this register always invalid.  In cse, we
11803
     scan the table to invalidate all entries using this register, but this
11804
     is too much work for us.  */
11805
 
11806
  for (i = regno; i < endregno; i++)
11807
    {
11808
      rsp = VEC_index (reg_stat_type, reg_stat, i);
11809
      rsp->last_set_label = label_tick;
11810
      if (!insn
11811
          || (value && rsp->last_set_table_tick >= label_tick_ebb_start))
11812
        rsp->last_set_invalid = 1;
11813
      else
11814
        rsp->last_set_invalid = 0;
11815
    }
11816
 
11817
  /* The value being assigned might refer to X (like in "x++;").  In that
11818
     case, we must replace it with (clobber (const_int 0)) to prevent
11819
     infinite loops.  */
11820
  rsp = VEC_index (reg_stat_type, reg_stat, regno);
11821
  if (value && !get_last_value_validate (&value, insn, label_tick, 0))
11822
    {
11823
      value = copy_rtx (value);
11824
      if (!get_last_value_validate (&value, insn, label_tick, 1))
11825
        value = 0;
11826
    }
11827
 
11828
  /* For the main register being modified, update the value, the mode, the
11829
     nonzero bits, and the number of sign bit copies.  */
11830
 
11831
  rsp->last_set_value = value;
11832
 
11833
  if (value)
11834
    {
11835
      enum machine_mode mode = GET_MODE (reg);
11836
      subst_low_luid = DF_INSN_LUID (insn);
11837
      rsp->last_set_mode = mode;
11838
      if (GET_MODE_CLASS (mode) == MODE_INT
11839
          && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
11840
        mode = nonzero_bits_mode;
11841
      rsp->last_set_nonzero_bits = nonzero_bits (value, mode);
11842
      rsp->last_set_sign_bit_copies
11843
        = num_sign_bit_copies (value, GET_MODE (reg));
11844
    }
11845
}
11846
 
11847
/* Called via note_stores from record_dead_and_set_regs to handle one
11848
   SET or CLOBBER in an insn.  DATA is the instruction in which the
11849
   set is occurring.  */
11850
 
11851
static void
11852
record_dead_and_set_regs_1 (rtx dest, const_rtx setter, void *data)
11853
{
11854
  rtx record_dead_insn = (rtx) data;
11855
 
11856
  if (GET_CODE (dest) == SUBREG)
11857
    dest = SUBREG_REG (dest);
11858
 
11859
  if (!record_dead_insn)
11860
    {
11861
      if (REG_P (dest))
11862
        record_value_for_reg (dest, NULL_RTX, NULL_RTX);
11863
      return;
11864
    }
11865
 
11866
  if (REG_P (dest))
11867
    {
11868
      /* If we are setting the whole register, we know its value.  Otherwise
11869
         show that we don't know the value.  We can handle SUBREG in
11870
         some cases.  */
11871
      if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
11872
        record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
11873
      else if (GET_CODE (setter) == SET
11874
               && GET_CODE (SET_DEST (setter)) == SUBREG
11875
               && SUBREG_REG (SET_DEST (setter)) == dest
11876
               && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
11877
               && subreg_lowpart_p (SET_DEST (setter)))
11878
        record_value_for_reg (dest, record_dead_insn,
11879
                              gen_lowpart (GET_MODE (dest),
11880
                                                       SET_SRC (setter)));
11881
      else
11882
        record_value_for_reg (dest, record_dead_insn, NULL_RTX);
11883
    }
11884
  else if (MEM_P (dest)
11885
           /* Ignore pushes, they clobber nothing.  */
11886
           && ! push_operand (dest, GET_MODE (dest)))
11887
    mem_last_set = DF_INSN_LUID (record_dead_insn);
11888
}
11889
 
11890
/* Update the records of when each REG was most recently set or killed
11891
   for the things done by INSN.  This is the last thing done in processing
11892
   INSN in the combiner loop.
11893
 
11894
   We update reg_stat[], in particular fields last_set, last_set_value,
11895
   last_set_mode, last_set_nonzero_bits, last_set_sign_bit_copies,
11896
   last_death, and also the similar information mem_last_set (which insn
11897
   most recently modified memory) and last_call_luid (which insn was the
11898
   most recent subroutine call).  */
11899
 
11900
static void
11901
record_dead_and_set_regs (rtx insn)
11902
{
11903
  rtx link;
11904
  unsigned int i;
11905
 
11906
  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
11907
    {
11908
      if (REG_NOTE_KIND (link) == REG_DEAD
11909
          && REG_P (XEXP (link, 0)))
11910
        {
11911
          unsigned int regno = REGNO (XEXP (link, 0));
11912
          unsigned int endregno = END_REGNO (XEXP (link, 0));
11913
 
11914
          for (i = regno; i < endregno; i++)
11915
            {
11916
              reg_stat_type *rsp;
11917
 
11918
              rsp = VEC_index (reg_stat_type, reg_stat, i);
11919
              rsp->last_death = insn;
11920
            }
11921
        }
11922
      else if (REG_NOTE_KIND (link) == REG_INC)
11923
        record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
11924
    }
11925
 
11926
  if (CALL_P (insn))
11927
    {
11928
      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
11929
        if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
11930
          {
11931
            reg_stat_type *rsp;
11932
 
11933
            rsp = VEC_index (reg_stat_type, reg_stat, i);
11934
            rsp->last_set_invalid = 1;
11935
            rsp->last_set = insn;
11936
            rsp->last_set_value = 0;
11937
            rsp->last_set_mode = VOIDmode;
11938
            rsp->last_set_nonzero_bits = 0;
11939
            rsp->last_set_sign_bit_copies = 0;
11940
            rsp->last_death = 0;
11941
            rsp->truncated_to_mode = VOIDmode;
11942
          }
11943
 
11944
      last_call_luid = mem_last_set = DF_INSN_LUID (insn);
11945
 
11946
      /* We can't combine into a call pattern.  Remember, though, that
11947
         the return value register is set at this LUID.  We could
11948
         still replace a register with the return value from the
11949
         wrong subroutine call!  */
11950
      note_stores (PATTERN (insn), record_dead_and_set_regs_1, NULL_RTX);
11951
    }
11952
  else
11953
    note_stores (PATTERN (insn), record_dead_and_set_regs_1, insn);
11954
}
11955
 
11956
/* If a SUBREG has the promoted bit set, it is in fact a property of the
11957
   register present in the SUBREG, so for each such SUBREG go back and
11958
   adjust nonzero and sign bit information of the registers that are
11959
   known to have some zero/sign bits set.
11960
 
11961
   This is needed because when combine blows the SUBREGs away, the
11962
   information on zero/sign bits is lost and further combines can be
11963
   missed because of that.  */
11964
 
11965
static void
11966
record_promoted_value (rtx insn, rtx subreg)
11967
{
11968
  rtx links, set;
11969
  unsigned int regno = REGNO (SUBREG_REG (subreg));
11970
  enum machine_mode mode = GET_MODE (subreg);
11971
 
11972
  if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
11973
    return;
11974
 
11975
  for (links = LOG_LINKS (insn); links;)
11976
    {
11977
      reg_stat_type *rsp;
11978
 
11979
      insn = XEXP (links, 0);
11980
      set = single_set (insn);
11981
 
11982
      if (! set || !REG_P (SET_DEST (set))
11983
          || REGNO (SET_DEST (set)) != regno
11984
          || GET_MODE (SET_DEST (set)) != GET_MODE (SUBREG_REG (subreg)))
11985
        {
11986
          links = XEXP (links, 1);
11987
          continue;
11988
        }
11989
 
11990
      rsp = VEC_index (reg_stat_type, reg_stat, regno);
11991
      if (rsp->last_set == insn)
11992
        {
11993
          if (SUBREG_PROMOTED_UNSIGNED_P (subreg) > 0)
11994
            rsp->last_set_nonzero_bits &= GET_MODE_MASK (mode);
11995
        }
11996
 
11997
      if (REG_P (SET_SRC (set)))
11998
        {
11999
          regno = REGNO (SET_SRC (set));
12000
          links = LOG_LINKS (insn);
12001
        }
12002
      else
12003
        break;
12004
    }
12005
}
12006
 
12007
/* Check if X, a register, is known to contain a value already
12008
   truncated to MODE.  In this case we can use a subreg to refer to
12009
   the truncated value even though in the generic case we would need
12010
   an explicit truncation.  */
12011
 
12012
static bool
12013
reg_truncated_to_mode (enum machine_mode mode, const_rtx x)
12014
{
12015
  reg_stat_type *rsp = VEC_index (reg_stat_type, reg_stat, REGNO (x));
12016
  enum machine_mode truncated = rsp->truncated_to_mode;
12017
 
12018
  if (truncated == 0
12019
      || rsp->truncation_label < label_tick_ebb_start)
12020
    return false;
12021
  if (GET_MODE_SIZE (truncated) <= GET_MODE_SIZE (mode))
12022
    return true;
12023
  if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
12024
                             GET_MODE_BITSIZE (truncated)))
12025
    return true;
12026
  return false;
12027
}
12028
 
12029
/* Callback for for_each_rtx.  If *P is a hard reg or a subreg record the mode
12030
   that the register is accessed in.  For non-TRULY_NOOP_TRUNCATION targets we
12031
   might be able to turn a truncate into a subreg using this information.
12032
   Return -1 if traversing *P is complete or 0 otherwise.  */
12033
 
12034
static int
12035
record_truncated_value (rtx *p, void *data ATTRIBUTE_UNUSED)
12036
{
12037
  rtx x = *p;
12038
  enum machine_mode truncated_mode;
12039
  reg_stat_type *rsp;
12040
 
12041
  if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x)))
12042
    {
12043
      enum machine_mode original_mode = GET_MODE (SUBREG_REG (x));
12044
      truncated_mode = GET_MODE (x);
12045
 
12046
      if (GET_MODE_SIZE (original_mode) <= GET_MODE_SIZE (truncated_mode))
12047
        return -1;
12048
 
12049
      if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (truncated_mode),
12050
                                 GET_MODE_BITSIZE (original_mode)))
12051
        return -1;
12052
 
12053
      x = SUBREG_REG (x);
12054
    }
12055
  /* ??? For hard-regs we now record everything.  We might be able to
12056
     optimize this using last_set_mode.  */
12057
  else if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
12058
    truncated_mode = GET_MODE (x);
12059
  else
12060
    return 0;
12061
 
12062
  rsp = VEC_index (reg_stat_type, reg_stat, REGNO (x));
12063
  if (rsp->truncated_to_mode == 0
12064
      || rsp->truncation_label < label_tick_ebb_start
12065
      || (GET_MODE_SIZE (truncated_mode)
12066
          < GET_MODE_SIZE (rsp->truncated_to_mode)))
12067
    {
12068
      rsp->truncated_to_mode = truncated_mode;
12069
      rsp->truncation_label = label_tick;
12070
    }
12071
 
12072
  return -1;
12073
}
12074
 
12075
/* Callback for note_uses.  Find hardregs and subregs of pseudos and
12076
   the modes they are used in.  This can help truning TRUNCATEs into
12077
   SUBREGs.  */
12078
 
12079
static void
12080
record_truncated_values (rtx *x, void *data ATTRIBUTE_UNUSED)
12081
{
12082
  for_each_rtx (x, record_truncated_value, NULL);
12083
}
12084
 
12085
/* Scan X for promoted SUBREGs.  For each one found,
12086
   note what it implies to the registers used in it.  */
12087
 
12088
static void
12089
check_promoted_subreg (rtx insn, rtx x)
12090
{
12091
  if (GET_CODE (x) == SUBREG
12092
      && SUBREG_PROMOTED_VAR_P (x)
12093
      && REG_P (SUBREG_REG (x)))
12094
    record_promoted_value (insn, x);
12095
  else
12096
    {
12097
      const char *format = GET_RTX_FORMAT (GET_CODE (x));
12098
      int i, j;
12099
 
12100
      for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
12101
        switch (format[i])
12102
          {
12103
          case 'e':
12104
            check_promoted_subreg (insn, XEXP (x, i));
12105
            break;
12106
          case 'V':
12107
          case 'E':
12108
            if (XVEC (x, i) != 0)
12109
              for (j = 0; j < XVECLEN (x, i); j++)
12110
                check_promoted_subreg (insn, XVECEXP (x, i, j));
12111
            break;
12112
          }
12113
    }
12114
}
12115
 
12116
/* Verify that all the registers and memory references mentioned in *LOC are
12117
   still valid.  *LOC was part of a value set in INSN when label_tick was
12118
   equal to TICK.  Return 0 if some are not.  If REPLACE is nonzero, replace
12119
   the invalid references with (clobber (const_int 0)) and return 1.  This
12120
   replacement is useful because we often can get useful information about
12121
   the form of a value (e.g., if it was produced by a shift that always
12122
   produces -1 or 0) even though we don't know exactly what registers it
12123
   was produced from.  */
12124
 
12125
static int
12126
get_last_value_validate (rtx *loc, rtx insn, int tick, int replace)
12127
{
12128
  rtx x = *loc;
12129
  const char *fmt = GET_RTX_FORMAT (GET_CODE (x));
12130
  int len = GET_RTX_LENGTH (GET_CODE (x));
12131
  int i, j;
12132
 
12133
  if (REG_P (x))
12134
    {
12135
      unsigned int regno = REGNO (x);
12136
      unsigned int endregno = END_REGNO (x);
12137
      unsigned int j;
12138
 
12139
      for (j = regno; j < endregno; j++)
12140
        {
12141
          reg_stat_type *rsp = VEC_index (reg_stat_type, reg_stat, j);
12142
          if (rsp->last_set_invalid
12143
              /* If this is a pseudo-register that was only set once and not
12144
                 live at the beginning of the function, it is always valid.  */
12145
              || (! (regno >= FIRST_PSEUDO_REGISTER
12146
                     && REG_N_SETS (regno) == 1
12147
                     && (!REGNO_REG_SET_P
12148
                         (DF_LR_IN (ENTRY_BLOCK_PTR->next_bb), regno)))
12149
                  && rsp->last_set_label > tick))
12150
          {
12151
            if (replace)
12152
              *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
12153
            return replace;
12154
          }
12155
        }
12156
 
12157
      return 1;
12158
    }
12159
  /* If this is a memory reference, make sure that there were no stores after
12160
     it that might have clobbered the value.  We don't have alias info, so we
12161
     assume any store invalidates it.  Moreover, we only have local UIDs, so
12162
     we also assume that there were stores in the intervening basic blocks.  */
12163
  else if (MEM_P (x) && !MEM_READONLY_P (x)
12164
           && (tick != label_tick || DF_INSN_LUID (insn) <= mem_last_set))
12165
    {
12166
      if (replace)
12167
        *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
12168
      return replace;
12169
    }
12170
 
12171
  for (i = 0; i < len; i++)
12172
    {
12173
      if (fmt[i] == 'e')
12174
        {
12175
          /* Check for identical subexpressions.  If x contains
12176
             identical subexpression we only have to traverse one of
12177
             them.  */
12178
          if (i == 1 && ARITHMETIC_P (x))
12179
            {
12180
              /* Note that at this point x0 has already been checked
12181
                 and found valid.  */
12182
              rtx x0 = XEXP (x, 0);
12183
              rtx x1 = XEXP (x, 1);
12184
 
12185
              /* If x0 and x1 are identical then x is also valid.  */
12186
              if (x0 == x1)
12187
                return 1;
12188
 
12189
              /* If x1 is identical to a subexpression of x0 then
12190
                 while checking x0, x1 has already been checked.  Thus
12191
                 it is valid and so as x.  */
12192
              if (ARITHMETIC_P (x0)
12193
                  && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
12194
                return 1;
12195
 
12196
              /* If x0 is identical to a subexpression of x1 then x is
12197
                 valid iff the rest of x1 is valid.  */
12198
              if (ARITHMETIC_P (x1)
12199
                  && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
12200
                return
12201
                  get_last_value_validate (&XEXP (x1,
12202
                                                  x0 == XEXP (x1, 0) ? 1 : 0),
12203
                                           insn, tick, replace);
12204
            }
12205
 
12206
          if (get_last_value_validate (&XEXP (x, i), insn, tick,
12207
                                       replace) == 0)
12208
            return 0;
12209
        }
12210
      else if (fmt[i] == 'E')
12211
        for (j = 0; j < XVECLEN (x, i); j++)
12212
          if (get_last_value_validate (&XVECEXP (x, i, j),
12213
                                       insn, tick, replace) == 0)
12214
            return 0;
12215
    }
12216
 
12217
  /* If we haven't found a reason for it to be invalid, it is valid.  */
12218
  return 1;
12219
}
12220
 
12221
/* Get the last value assigned to X, if known.  Some registers
12222
   in the value may be replaced with (clobber (const_int 0)) if their value
12223
   is known longer known reliably.  */
12224
 
12225
static rtx
12226
get_last_value (const_rtx x)
12227
{
12228
  unsigned int regno;
12229
  rtx value;
12230
  reg_stat_type *rsp;
12231
 
12232
  /* If this is a non-paradoxical SUBREG, get the value of its operand and
12233
     then convert it to the desired mode.  If this is a paradoxical SUBREG,
12234
     we cannot predict what values the "extra" bits might have.  */
12235
  if (GET_CODE (x) == SUBREG
12236
      && subreg_lowpart_p (x)
12237
      && (GET_MODE_SIZE (GET_MODE (x))
12238
          <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
12239
      && (value = get_last_value (SUBREG_REG (x))) != 0)
12240
    return gen_lowpart (GET_MODE (x), value);
12241
 
12242
  if (!REG_P (x))
12243
    return 0;
12244
 
12245
  regno = REGNO (x);
12246
  rsp = VEC_index (reg_stat_type, reg_stat, regno);
12247
  value = rsp->last_set_value;
12248
 
12249
  /* If we don't have a value, or if it isn't for this basic block and
12250
     it's either a hard register, set more than once, or it's a live
12251
     at the beginning of the function, return 0.
12252
 
12253
     Because if it's not live at the beginning of the function then the reg
12254
     is always set before being used (is never used without being set).
12255
     And, if it's set only once, and it's always set before use, then all
12256
     uses must have the same last value, even if it's not from this basic
12257
     block.  */
12258
 
12259
  if (value == 0
12260
      || (rsp->last_set_label < label_tick_ebb_start
12261
          && (regno < FIRST_PSEUDO_REGISTER
12262
              || REG_N_SETS (regno) != 1
12263
              || REGNO_REG_SET_P
12264
                 (DF_LR_IN (ENTRY_BLOCK_PTR->next_bb), regno))))
12265
    return 0;
12266
 
12267
  /* If the value was set in a later insn than the ones we are processing,
12268
     we can't use it even if the register was only set once.  */
12269
  if (rsp->last_set_label == label_tick
12270
      && DF_INSN_LUID (rsp->last_set) >= subst_low_luid)
12271
    return 0;
12272
 
12273
  /* If the value has all its registers valid, return it.  */
12274
  if (get_last_value_validate (&value, rsp->last_set, rsp->last_set_label, 0))
12275
    return value;
12276
 
12277
  /* Otherwise, make a copy and replace any invalid register with
12278
     (clobber (const_int 0)).  If that fails for some reason, return 0.  */
12279
 
12280
  value = copy_rtx (value);
12281
  if (get_last_value_validate (&value, rsp->last_set, rsp->last_set_label, 1))
12282
    return value;
12283
 
12284
  return 0;
12285
}
12286
 
12287
/* Return nonzero if expression X refers to a REG or to memory
12288
   that is set in an instruction more recent than FROM_LUID.  */
12289
 
12290
static int
12291
use_crosses_set_p (const_rtx x, int from_luid)
12292
{
12293
  const char *fmt;
12294
  int i;
12295
  enum rtx_code code = GET_CODE (x);
12296
 
12297
  if (code == REG)
12298
    {
12299
      unsigned int regno = REGNO (x);
12300
      unsigned endreg = END_REGNO (x);
12301
 
12302
#ifdef PUSH_ROUNDING
12303
      /* Don't allow uses of the stack pointer to be moved,
12304
         because we don't know whether the move crosses a push insn.  */
12305
      if (regno == STACK_POINTER_REGNUM && PUSH_ARGS)
12306
        return 1;
12307
#endif
12308
      for (; regno < endreg; regno++)
12309
        {
12310
          reg_stat_type *rsp = VEC_index (reg_stat_type, reg_stat, regno);
12311
          if (rsp->last_set
12312
              && rsp->last_set_label == label_tick
12313
              && DF_INSN_LUID (rsp->last_set) > from_luid)
12314
            return 1;
12315
        }
12316
      return 0;
12317
    }
12318
 
12319
  if (code == MEM && mem_last_set > from_luid)
12320
    return 1;
12321
 
12322
  fmt = GET_RTX_FORMAT (code);
12323
 
12324
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
12325
    {
12326
      if (fmt[i] == 'E')
12327
        {
12328
          int j;
12329
          for (j = XVECLEN (x, i) - 1; j >= 0; j--)
12330
            if (use_crosses_set_p (XVECEXP (x, i, j), from_luid))
12331
              return 1;
12332
        }
12333
      else if (fmt[i] == 'e'
12334
               && use_crosses_set_p (XEXP (x, i), from_luid))
12335
        return 1;
12336
    }
12337
  return 0;
12338
}
12339
 
12340
/* Define three variables used for communication between the following
12341
   routines.  */
12342
 
12343
static unsigned int reg_dead_regno, reg_dead_endregno;
12344
static int reg_dead_flag;
12345
 
12346
/* Function called via note_stores from reg_dead_at_p.
12347
 
12348
   If DEST is within [reg_dead_regno, reg_dead_endregno), set
12349
   reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET.  */
12350
 
12351
static void
12352
reg_dead_at_p_1 (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
12353
{
12354
  unsigned int regno, endregno;
12355
 
12356
  if (!REG_P (dest))
12357
    return;
12358
 
12359
  regno = REGNO (dest);
12360
  endregno = END_REGNO (dest);
12361
  if (reg_dead_endregno > regno && reg_dead_regno < endregno)
12362
    reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
12363
}
12364
 
12365
/* Return nonzero if REG is known to be dead at INSN.
12366
 
12367
   We scan backwards from INSN.  If we hit a REG_DEAD note or a CLOBBER
12368
   referencing REG, it is dead.  If we hit a SET referencing REG, it is
12369
   live.  Otherwise, see if it is live or dead at the start of the basic
12370
   block we are in.  Hard regs marked as being live in NEWPAT_USED_REGS
12371
   must be assumed to be always live.  */
12372
 
12373
static int
12374
reg_dead_at_p (rtx reg, rtx insn)
12375
{
12376
  basic_block block;
12377
  unsigned int i;
12378
 
12379
  /* Set variables for reg_dead_at_p_1.  */
12380
  reg_dead_regno = REGNO (reg);
12381
  reg_dead_endregno = END_REGNO (reg);
12382
 
12383
  reg_dead_flag = 0;
12384
 
12385
  /* Check that reg isn't mentioned in NEWPAT_USED_REGS.  For fixed registers
12386
     we allow the machine description to decide whether use-and-clobber
12387
     patterns are OK.  */
12388
  if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
12389
    {
12390
      for (i = reg_dead_regno; i < reg_dead_endregno; i++)
12391
        if (!fixed_regs[i] && TEST_HARD_REG_BIT (newpat_used_regs, i))
12392
          return 0;
12393
    }
12394
 
12395
  /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, or
12396
     beginning of basic block.  */
12397
  block = BLOCK_FOR_INSN (insn);
12398
  for (;;)
12399
    {
12400
      if (INSN_P (insn))
12401
        {
12402
          note_stores (PATTERN (insn), reg_dead_at_p_1, NULL);
12403
          if (reg_dead_flag)
12404
            return reg_dead_flag == 1 ? 1 : 0;
12405
 
12406
          if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
12407
            return 1;
12408
        }
12409
 
12410
      if (insn == BB_HEAD (block))
12411
        break;
12412
 
12413
      insn = PREV_INSN (insn);
12414
    }
12415
 
12416
  /* Look at live-in sets for the basic block that we were in.  */
12417
  for (i = reg_dead_regno; i < reg_dead_endregno; i++)
12418
    if (REGNO_REG_SET_P (df_get_live_in (block), i))
12419
      return 0;
12420
 
12421
  return 1;
12422
}
12423
 
12424
/* Note hard registers in X that are used.  */
12425
 
12426
static void
12427
mark_used_regs_combine (rtx x)
12428
{
12429
  RTX_CODE code = GET_CODE (x);
12430
  unsigned int regno;
12431
  int i;
12432
 
12433
  switch (code)
12434
    {
12435
    case LABEL_REF:
12436
    case SYMBOL_REF:
12437
    case CONST_INT:
12438
    case CONST:
12439
    case CONST_DOUBLE:
12440
    case CONST_VECTOR:
12441
    case PC:
12442
    case ADDR_VEC:
12443
    case ADDR_DIFF_VEC:
12444
    case ASM_INPUT:
12445
#ifdef HAVE_cc0
12446
    /* CC0 must die in the insn after it is set, so we don't need to take
12447
       special note of it here.  */
12448
    case CC0:
12449
#endif
12450
      return;
12451
 
12452
    case CLOBBER:
12453
      /* If we are clobbering a MEM, mark any hard registers inside the
12454
         address as used.  */
12455
      if (MEM_P (XEXP (x, 0)))
12456
        mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
12457
      return;
12458
 
12459
    case REG:
12460
      regno = REGNO (x);
12461
      /* A hard reg in a wide mode may really be multiple registers.
12462
         If so, mark all of them just like the first.  */
12463
      if (regno < FIRST_PSEUDO_REGISTER)
12464
        {
12465
          /* None of this applies to the stack, frame or arg pointers.  */
12466
          if (regno == STACK_POINTER_REGNUM
12467
#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
12468
              || regno == HARD_FRAME_POINTER_REGNUM
12469
#endif
12470
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
12471
              || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
12472
#endif
12473
              || regno == FRAME_POINTER_REGNUM)
12474
            return;
12475
 
12476
          add_to_hard_reg_set (&newpat_used_regs, GET_MODE (x), regno);
12477
        }
12478
      return;
12479
 
12480
    case SET:
12481
      {
12482
        /* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
12483
           the address.  */
12484
        rtx testreg = SET_DEST (x);
12485
 
12486
        while (GET_CODE (testreg) == SUBREG
12487
               || GET_CODE (testreg) == ZERO_EXTRACT
12488
               || GET_CODE (testreg) == STRICT_LOW_PART)
12489
          testreg = XEXP (testreg, 0);
12490
 
12491
        if (MEM_P (testreg))
12492
          mark_used_regs_combine (XEXP (testreg, 0));
12493
 
12494
        mark_used_regs_combine (SET_SRC (x));
12495
      }
12496
      return;
12497
 
12498
    default:
12499
      break;
12500
    }
12501
 
12502
  /* Recursively scan the operands of this expression.  */
12503
 
12504
  {
12505
    const char *fmt = GET_RTX_FORMAT (code);
12506
 
12507
    for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
12508
      {
12509
        if (fmt[i] == 'e')
12510
          mark_used_regs_combine (XEXP (x, i));
12511
        else if (fmt[i] == 'E')
12512
          {
12513
            int j;
12514
 
12515
            for (j = 0; j < XVECLEN (x, i); j++)
12516
              mark_used_regs_combine (XVECEXP (x, i, j));
12517
          }
12518
      }
12519
  }
12520
}
12521
 
12522
/* Remove register number REGNO from the dead registers list of INSN.
12523
 
12524
   Return the note used to record the death, if there was one.  */
12525
 
12526
rtx
12527
remove_death (unsigned int regno, rtx insn)
12528
{
12529
  rtx note = find_regno_note (insn, REG_DEAD, regno);
12530
 
12531
  if (note)
12532
    remove_note (insn, note);
12533
 
12534
  return note;
12535
}
12536
 
12537
/* For each register (hardware or pseudo) used within expression X, if its
12538
   death is in an instruction with luid between FROM_LUID (inclusive) and
12539
   TO_INSN (exclusive), put a REG_DEAD note for that register in the
12540
   list headed by PNOTES.
12541
 
12542
   That said, don't move registers killed by maybe_kill_insn.
12543
 
12544
   This is done when X is being merged by combination into TO_INSN.  These
12545
   notes will then be distributed as needed.  */
12546
 
12547
static void
12548
move_deaths (rtx x, rtx maybe_kill_insn, int from_luid, rtx to_insn,
12549
             rtx *pnotes)
12550
{
12551
  const char *fmt;
12552
  int len, i;
12553
  enum rtx_code code = GET_CODE (x);
12554
 
12555
  if (code == REG)
12556
    {
12557
      unsigned int regno = REGNO (x);
12558
      rtx where_dead = VEC_index (reg_stat_type, reg_stat, regno)->last_death;
12559
 
12560
      /* Don't move the register if it gets killed in between from and to.  */
12561
      if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
12562
          && ! reg_referenced_p (x, maybe_kill_insn))
12563
        return;
12564
 
12565
      if (where_dead
12566
          && BLOCK_FOR_INSN (where_dead) == BLOCK_FOR_INSN (to_insn)
12567
          && DF_INSN_LUID (where_dead) >= from_luid
12568
          && DF_INSN_LUID (where_dead) < DF_INSN_LUID (to_insn))
12569
        {
12570
          rtx note = remove_death (regno, where_dead);
12571
 
12572
          /* It is possible for the call above to return 0.  This can occur
12573
             when last_death points to I2 or I1 that we combined with.
12574
             In that case make a new note.
12575
 
12576
             We must also check for the case where X is a hard register
12577
             and NOTE is a death note for a range of hard registers
12578
             including X.  In that case, we must put REG_DEAD notes for
12579
             the remaining registers in place of NOTE.  */
12580
 
12581
          if (note != 0 && regno < FIRST_PSEUDO_REGISTER
12582
              && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
12583
                  > GET_MODE_SIZE (GET_MODE (x))))
12584
            {
12585
              unsigned int deadregno = REGNO (XEXP (note, 0));
12586
              unsigned int deadend = END_HARD_REGNO (XEXP (note, 0));
12587
              unsigned int ourend = END_HARD_REGNO (x);
12588
              unsigned int i;
12589
 
12590
              for (i = deadregno; i < deadend; i++)
12591
                if (i < regno || i >= ourend)
12592
                  add_reg_note (where_dead, REG_DEAD, regno_reg_rtx[i]);
12593
            }
12594
 
12595
          /* If we didn't find any note, or if we found a REG_DEAD note that
12596
             covers only part of the given reg, and we have a multi-reg hard
12597
             register, then to be safe we must check for REG_DEAD notes
12598
             for each register other than the first.  They could have
12599
             their own REG_DEAD notes lying around.  */
12600
          else if ((note == 0
12601
                    || (note != 0
12602
                        && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
12603
                            < GET_MODE_SIZE (GET_MODE (x)))))
12604
                   && regno < FIRST_PSEUDO_REGISTER
12605
                   && hard_regno_nregs[regno][GET_MODE (x)] > 1)
12606
            {
12607
              unsigned int ourend = END_HARD_REGNO (x);
12608
              unsigned int i, offset;
12609
              rtx oldnotes = 0;
12610
 
12611
              if (note)
12612
                offset = hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))];
12613
              else
12614
                offset = 1;
12615
 
12616
              for (i = regno + offset; i < ourend; i++)
12617
                move_deaths (regno_reg_rtx[i],
12618
                             maybe_kill_insn, from_luid, to_insn, &oldnotes);
12619
            }
12620
 
12621
          if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
12622
            {
12623
              XEXP (note, 1) = *pnotes;
12624
              *pnotes = note;
12625
            }
12626
          else
12627
            *pnotes = alloc_reg_note (REG_DEAD, x, *pnotes);
12628
        }
12629
 
12630
      return;
12631
    }
12632
 
12633
  else if (GET_CODE (x) == SET)
12634
    {
12635
      rtx dest = SET_DEST (x);
12636
 
12637
      move_deaths (SET_SRC (x), maybe_kill_insn, from_luid, to_insn, pnotes);
12638
 
12639
      /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
12640
         that accesses one word of a multi-word item, some
12641
         piece of everything register in the expression is used by
12642
         this insn, so remove any old death.  */
12643
      /* ??? So why do we test for equality of the sizes?  */
12644
 
12645
      if (GET_CODE (dest) == ZERO_EXTRACT
12646
          || GET_CODE (dest) == STRICT_LOW_PART
12647
          || (GET_CODE (dest) == SUBREG
12648
              && (((GET_MODE_SIZE (GET_MODE (dest))
12649
                    + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
12650
                  == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
12651
                       + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
12652
        {
12653
          move_deaths (dest, maybe_kill_insn, from_luid, to_insn, pnotes);
12654
          return;
12655
        }
12656
 
12657
      /* If this is some other SUBREG, we know it replaces the entire
12658
         value, so use that as the destination.  */
12659
      if (GET_CODE (dest) == SUBREG)
12660
        dest = SUBREG_REG (dest);
12661
 
12662
      /* If this is a MEM, adjust deaths of anything used in the address.
12663
         For a REG (the only other possibility), the entire value is
12664
         being replaced so the old value is not used in this insn.  */
12665
 
12666
      if (MEM_P (dest))
12667
        move_deaths (XEXP (dest, 0), maybe_kill_insn, from_luid,
12668
                     to_insn, pnotes);
12669
      return;
12670
    }
12671
 
12672
  else if (GET_CODE (x) == CLOBBER)
12673
    return;
12674
 
12675
  len = GET_RTX_LENGTH (code);
12676
  fmt = GET_RTX_FORMAT (code);
12677
 
12678
  for (i = 0; i < len; i++)
12679
    {
12680
      if (fmt[i] == 'E')
12681
        {
12682
          int j;
12683
          for (j = XVECLEN (x, i) - 1; j >= 0; j--)
12684
            move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_luid,
12685
                         to_insn, pnotes);
12686
        }
12687
      else if (fmt[i] == 'e')
12688
        move_deaths (XEXP (x, i), maybe_kill_insn, from_luid, to_insn, pnotes);
12689
    }
12690
}
12691
 
12692
/* Return 1 if X is the target of a bit-field assignment in BODY, the
12693
   pattern of an insn.  X must be a REG.  */
12694
 
12695
static int
12696
reg_bitfield_target_p (rtx x, rtx body)
12697
{
12698
  int i;
12699
 
12700
  if (GET_CODE (body) == SET)
12701
    {
12702
      rtx dest = SET_DEST (body);
12703
      rtx target;
12704
      unsigned int regno, tregno, endregno, endtregno;
12705
 
12706
      if (GET_CODE (dest) == ZERO_EXTRACT)
12707
        target = XEXP (dest, 0);
12708
      else if (GET_CODE (dest) == STRICT_LOW_PART)
12709
        target = SUBREG_REG (XEXP (dest, 0));
12710
      else
12711
        return 0;
12712
 
12713
      if (GET_CODE (target) == SUBREG)
12714
        target = SUBREG_REG (target);
12715
 
12716
      if (!REG_P (target))
12717
        return 0;
12718
 
12719
      tregno = REGNO (target), regno = REGNO (x);
12720
      if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
12721
        return target == x;
12722
 
12723
      endtregno = end_hard_regno (GET_MODE (target), tregno);
12724
      endregno = end_hard_regno (GET_MODE (x), regno);
12725
 
12726
      return endregno > tregno && regno < endtregno;
12727
    }
12728
 
12729
  else if (GET_CODE (body) == PARALLEL)
12730
    for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
12731
      if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
12732
        return 1;
12733
 
12734
  return 0;
12735
}
12736
 
12737
/* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
12738
   as appropriate.  I3 and I2 are the insns resulting from the combination
12739
   insns including FROM (I2 may be zero).
12740
 
12741
   ELIM_I2 and ELIM_I1 are either zero or registers that we know will
12742
   not need REG_DEAD notes because they are being substituted for.  This
12743
   saves searching in the most common cases.
12744
 
12745
   Each note in the list is either ignored or placed on some insns, depending
12746
   on the type of note.  */
12747
 
12748
static void
12749
distribute_notes (rtx notes, rtx from_insn, rtx i3, rtx i2, rtx elim_i2,
12750
                  rtx elim_i1)
12751
{
12752
  rtx note, next_note;
12753
  rtx tem;
12754
 
12755
  for (note = notes; note; note = next_note)
12756
    {
12757
      rtx place = 0, place2 = 0;
12758
 
12759
      next_note = XEXP (note, 1);
12760
      switch (REG_NOTE_KIND (note))
12761
        {
12762
        case REG_BR_PROB:
12763
        case REG_BR_PRED:
12764
          /* Doesn't matter much where we put this, as long as it's somewhere.
12765
             It is preferable to keep these notes on branches, which is most
12766
             likely to be i3.  */
12767
          place = i3;
12768
          break;
12769
 
12770
        case REG_VALUE_PROFILE:
12771
          /* Just get rid of this note, as it is unused later anyway.  */
12772
          break;
12773
 
12774
        case REG_NON_LOCAL_GOTO:
12775
          if (JUMP_P (i3))
12776
            place = i3;
12777
          else
12778
            {
12779
              gcc_assert (i2 && JUMP_P (i2));
12780
              place = i2;
12781
            }
12782
          break;
12783
 
12784
        case REG_EH_REGION:
12785
          /* These notes must remain with the call or trapping instruction.  */
12786
          if (CALL_P (i3))
12787
            place = i3;
12788
          else if (i2 && CALL_P (i2))
12789
            place = i2;
12790
          else
12791
            {
12792
              gcc_assert (flag_non_call_exceptions);
12793
              if (may_trap_p (i3))
12794
                place = i3;
12795
              else if (i2 && may_trap_p (i2))
12796
                place = i2;
12797
              /* ??? Otherwise assume we've combined things such that we
12798
                 can now prove that the instructions can't trap.  Drop the
12799
                 note in this case.  */
12800
            }
12801
          break;
12802
 
12803
        case REG_NORETURN:
12804
        case REG_SETJMP:
12805
          /* These notes must remain with the call.  It should not be
12806
             possible for both I2 and I3 to be a call.  */
12807
          if (CALL_P (i3))
12808
            place = i3;
12809
          else
12810
            {
12811
              gcc_assert (i2 && CALL_P (i2));
12812
              place = i2;
12813
            }
12814
          break;
12815
 
12816
        case REG_UNUSED:
12817
          /* Any clobbers for i3 may still exist, and so we must process
12818
             REG_UNUSED notes from that insn.
12819
 
12820
             Any clobbers from i2 or i1 can only exist if they were added by
12821
             recog_for_combine.  In that case, recog_for_combine created the
12822
             necessary REG_UNUSED notes.  Trying to keep any original
12823
             REG_UNUSED notes from these insns can cause incorrect output
12824
             if it is for the same register as the original i3 dest.
12825
             In that case, we will notice that the register is set in i3,
12826
             and then add a REG_UNUSED note for the destination of i3, which
12827
             is wrong.  However, it is possible to have REG_UNUSED notes from
12828
             i2 or i1 for register which were both used and clobbered, so
12829
             we keep notes from i2 or i1 if they will turn into REG_DEAD
12830
             notes.  */
12831
 
12832
          /* If this register is set or clobbered in I3, put the note there
12833
             unless there is one already.  */
12834
          if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
12835
            {
12836
              if (from_insn != i3)
12837
                break;
12838
 
12839
              if (! (REG_P (XEXP (note, 0))
12840
                     ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
12841
                     : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
12842
                place = i3;
12843
            }
12844
          /* Otherwise, if this register is used by I3, then this register
12845
             now dies here, so we must put a REG_DEAD note here unless there
12846
             is one already.  */
12847
          else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
12848
                   && ! (REG_P (XEXP (note, 0))
12849
                         ? find_regno_note (i3, REG_DEAD,
12850
                                            REGNO (XEXP (note, 0)))
12851
                         : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
12852
            {
12853
              PUT_REG_NOTE_KIND (note, REG_DEAD);
12854
              place = i3;
12855
            }
12856
          break;
12857
 
12858
        case REG_EQUAL:
12859
        case REG_EQUIV:
12860
        case REG_NOALIAS:
12861
          /* These notes say something about results of an insn.  We can
12862
             only support them if they used to be on I3 in which case they
12863
             remain on I3.  Otherwise they are ignored.
12864
 
12865
             If the note refers to an expression that is not a constant, we
12866
             must also ignore the note since we cannot tell whether the
12867
             equivalence is still true.  It might be possible to do
12868
             slightly better than this (we only have a problem if I2DEST
12869
             or I1DEST is present in the expression), but it doesn't
12870
             seem worth the trouble.  */
12871
 
12872
          if (from_insn == i3
12873
              && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
12874
            place = i3;
12875
          break;
12876
 
12877
        case REG_INC:
12878
          /* These notes say something about how a register is used.  They must
12879
             be present on any use of the register in I2 or I3.  */
12880
          if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
12881
            place = i3;
12882
 
12883
          if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
12884
            {
12885
              if (place)
12886
                place2 = i2;
12887
              else
12888
                place = i2;
12889
            }
12890
          break;
12891
 
12892
        case REG_LABEL_TARGET:
12893
        case REG_LABEL_OPERAND:
12894
          /* This can show up in several ways -- either directly in the
12895
             pattern, or hidden off in the constant pool with (or without?)
12896
             a REG_EQUAL note.  */
12897
          /* ??? Ignore the without-reg_equal-note problem for now.  */
12898
          if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3))
12899
              || ((tem = find_reg_note (i3, REG_EQUAL, NULL_RTX))
12900
                  && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12901
                  && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0)))
12902
            place = i3;
12903
 
12904
          if (i2
12905
              && (reg_mentioned_p (XEXP (note, 0), PATTERN (i2))
12906
                  || ((tem = find_reg_note (i2, REG_EQUAL, NULL_RTX))
12907
                      && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12908
                      && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0))))
12909
            {
12910
              if (place)
12911
                place2 = i2;
12912
              else
12913
                place = i2;
12914
            }
12915
 
12916
          /* For REG_LABEL_TARGET on a JUMP_P, we prefer to put the note
12917
             as a JUMP_LABEL or decrement LABEL_NUSES if it's already
12918
             there.  */
12919
          if (place && JUMP_P (place)
12920
              && REG_NOTE_KIND (note) == REG_LABEL_TARGET
12921
              && (JUMP_LABEL (place) == NULL
12922
                  || JUMP_LABEL (place) == XEXP (note, 0)))
12923
            {
12924
              rtx label = JUMP_LABEL (place);
12925
 
12926
              if (!label)
12927
                JUMP_LABEL (place) = XEXP (note, 0);
12928
              else if (LABEL_P (label))
12929
                LABEL_NUSES (label)--;
12930
            }
12931
 
12932
          if (place2 && JUMP_P (place2)
12933
              && REG_NOTE_KIND (note) == REG_LABEL_TARGET
12934
              && (JUMP_LABEL (place2) == NULL
12935
                  || JUMP_LABEL (place2) == XEXP (note, 0)))
12936
            {
12937
              rtx label = JUMP_LABEL (place2);
12938
 
12939
              if (!label)
12940
                JUMP_LABEL (place2) = XEXP (note, 0);
12941
              else if (LABEL_P (label))
12942
                LABEL_NUSES (label)--;
12943
              place2 = 0;
12944
            }
12945
          break;
12946
 
12947
        case REG_NONNEG:
12948
          /* This note says something about the value of a register prior
12949
             to the execution of an insn.  It is too much trouble to see
12950
             if the note is still correct in all situations.  It is better
12951
             to simply delete it.  */
12952
          break;
12953
 
12954
        case REG_DEAD:
12955
          /* If we replaced the right hand side of FROM_INSN with a
12956
             REG_EQUAL note, the original use of the dying register
12957
             will not have been combined into I3 and I2.  In such cases,
12958
             FROM_INSN is guaranteed to be the first of the combined
12959
             instructions, so we simply need to search back before
12960
             FROM_INSN for the previous use or set of this register,
12961
             then alter the notes there appropriately.
12962
 
12963
             If the register is used as an input in I3, it dies there.
12964
             Similarly for I2, if it is nonzero and adjacent to I3.
12965
 
12966
             If the register is not used as an input in either I3 or I2
12967
             and it is not one of the registers we were supposed to eliminate,
12968
             there are two possibilities.  We might have a non-adjacent I2
12969
             or we might have somehow eliminated an additional register
12970
             from a computation.  For example, we might have had A & B where
12971
             we discover that B will always be zero.  In this case we will
12972
             eliminate the reference to A.
12973
 
12974
             In both cases, we must search to see if we can find a previous
12975
             use of A and put the death note there.  */
12976
 
12977
          if (from_insn
12978
              && from_insn == i2mod
12979
              && !reg_overlap_mentioned_p (XEXP (note, 0), i2mod_new_rhs))
12980
            tem = from_insn;
12981
          else
12982
            {
12983
              if (from_insn
12984
                  && CALL_P (from_insn)
12985
                  && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
12986
                place = from_insn;
12987
              else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
12988
                place = i3;
12989
              else if (i2 != 0 && next_nonnote_nondebug_insn (i2) == i3
12990
                       && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12991
                place = i2;
12992
              else if ((rtx_equal_p (XEXP (note, 0), elim_i2)
12993
                        && !(i2mod
12994
                             && reg_overlap_mentioned_p (XEXP (note, 0),
12995
                                                         i2mod_old_rhs)))
12996
                       || rtx_equal_p (XEXP (note, 0), elim_i1))
12997
                break;
12998
              tem = i3;
12999
            }
13000
 
13001
          if (place == 0)
13002
            {
13003
              basic_block bb = this_basic_block;
13004
 
13005
              for (tem = PREV_INSN (tem); place == 0; tem = PREV_INSN (tem))
13006
                {
13007
                  if (!NONDEBUG_INSN_P (tem))
13008
                    {
13009
                      if (tem == BB_HEAD (bb))
13010
                        break;
13011
                      continue;
13012
                    }
13013
 
13014
                  /* If the register is being set at TEM, see if that is all
13015
                     TEM is doing.  If so, delete TEM.  Otherwise, make this
13016
                     into a REG_UNUSED note instead. Don't delete sets to
13017
                     global register vars.  */
13018
                  if ((REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER
13019
                       || !global_regs[REGNO (XEXP (note, 0))])
13020
                      && reg_set_p (XEXP (note, 0), PATTERN (tem)))
13021
                    {
13022
                      rtx set = single_set (tem);
13023
                      rtx inner_dest = 0;
13024
#ifdef HAVE_cc0
13025
                      rtx cc0_setter = NULL_RTX;
13026
#endif
13027
 
13028
                      if (set != 0)
13029
                        for (inner_dest = SET_DEST (set);
13030
                             (GET_CODE (inner_dest) == STRICT_LOW_PART
13031
                              || GET_CODE (inner_dest) == SUBREG
13032
                              || GET_CODE (inner_dest) == ZERO_EXTRACT);
13033
                             inner_dest = XEXP (inner_dest, 0))
13034
                          ;
13035
 
13036
                      /* Verify that it was the set, and not a clobber that
13037
                         modified the register.
13038
 
13039
                         CC0 targets must be careful to maintain setter/user
13040
                         pairs.  If we cannot delete the setter due to side
13041
                         effects, mark the user with an UNUSED note instead
13042
                         of deleting it.  */
13043
 
13044
                      if (set != 0 && ! side_effects_p (SET_SRC (set))
13045
                          && rtx_equal_p (XEXP (note, 0), inner_dest)
13046
#ifdef HAVE_cc0
13047
                          && (! reg_mentioned_p (cc0_rtx, SET_SRC (set))
13048
                              || ((cc0_setter = prev_cc0_setter (tem)) != NULL
13049
                                  && sets_cc0_p (PATTERN (cc0_setter)) > 0))
13050
#endif
13051
                          )
13052
                        {
13053
                          /* Move the notes and links of TEM elsewhere.
13054
                             This might delete other dead insns recursively.
13055
                             First set the pattern to something that won't use
13056
                             any register.  */
13057
                          rtx old_notes = REG_NOTES (tem);
13058
 
13059
                          PATTERN (tem) = pc_rtx;
13060
                          REG_NOTES (tem) = NULL;
13061
 
13062
                          distribute_notes (old_notes, tem, tem, NULL_RTX,
13063
                                            NULL_RTX, NULL_RTX);
13064
                          distribute_links (LOG_LINKS (tem));
13065
 
13066
                          SET_INSN_DELETED (tem);
13067
                          if (tem == i2)
13068
                            i2 = NULL_RTX;
13069
 
13070
#ifdef HAVE_cc0
13071
                          /* Delete the setter too.  */
13072
                          if (cc0_setter)
13073
                            {
13074
                              PATTERN (cc0_setter) = pc_rtx;
13075
                              old_notes = REG_NOTES (cc0_setter);
13076
                              REG_NOTES (cc0_setter) = NULL;
13077
 
13078
                              distribute_notes (old_notes, cc0_setter,
13079
                                                cc0_setter, NULL_RTX,
13080
                                                NULL_RTX, NULL_RTX);
13081
                              distribute_links (LOG_LINKS (cc0_setter));
13082
 
13083
                              SET_INSN_DELETED (cc0_setter);
13084
                              if (cc0_setter == i2)
13085
                                i2 = NULL_RTX;
13086
                            }
13087
#endif
13088
                        }
13089
                      else
13090
                        {
13091
                          PUT_REG_NOTE_KIND (note, REG_UNUSED);
13092
 
13093
                          /*  If there isn't already a REG_UNUSED note, put one
13094
                              here.  Do not place a REG_DEAD note, even if
13095
                              the register is also used here; that would not
13096
                              match the algorithm used in lifetime analysis
13097
                              and can cause the consistency check in the
13098
                              scheduler to fail.  */
13099
                          if (! find_regno_note (tem, REG_UNUSED,
13100
                                                 REGNO (XEXP (note, 0))))
13101
                            place = tem;
13102
                          break;
13103
                        }
13104
                    }
13105
                  else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
13106
                           || (CALL_P (tem)
13107
                               && find_reg_fusage (tem, USE, XEXP (note, 0))))
13108
                    {
13109
                      place = tem;
13110
 
13111
                      /* If we are doing a 3->2 combination, and we have a
13112
                         register which formerly died in i3 and was not used
13113
                         by i2, which now no longer dies in i3 and is used in
13114
                         i2 but does not die in i2, and place is between i2
13115
                         and i3, then we may need to move a link from place to
13116
                         i2.  */
13117
                      if (i2 && DF_INSN_LUID (place) > DF_INSN_LUID (i2)
13118
                          && from_insn
13119
                          && DF_INSN_LUID (from_insn) > DF_INSN_LUID (i2)
13120
                          && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
13121
                        {
13122
                          rtx links = LOG_LINKS (place);
13123
                          LOG_LINKS (place) = 0;
13124
                          distribute_links (links);
13125
                        }
13126
                      break;
13127
                    }
13128
 
13129
                  if (tem == BB_HEAD (bb))
13130
                    break;
13131
                }
13132
 
13133
            }
13134
 
13135
          /* If the register is set or already dead at PLACE, we needn't do
13136
             anything with this note if it is still a REG_DEAD note.
13137
             We check here if it is set at all, not if is it totally replaced,
13138
             which is what `dead_or_set_p' checks, so also check for it being
13139
             set partially.  */
13140
 
13141
          if (place && REG_NOTE_KIND (note) == REG_DEAD)
13142
            {
13143
              unsigned int regno = REGNO (XEXP (note, 0));
13144
              reg_stat_type *rsp = VEC_index (reg_stat_type, reg_stat, regno);
13145
 
13146
              if (dead_or_set_p (place, XEXP (note, 0))
13147
                  || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
13148
                {
13149
                  /* Unless the register previously died in PLACE, clear
13150
                     last_death.  [I no longer understand why this is
13151
                     being done.] */
13152
                  if (rsp->last_death != place)
13153
                    rsp->last_death = 0;
13154
                  place = 0;
13155
                }
13156
              else
13157
                rsp->last_death = place;
13158
 
13159
              /* If this is a death note for a hard reg that is occupying
13160
                 multiple registers, ensure that we are still using all
13161
                 parts of the object.  If we find a piece of the object
13162
                 that is unused, we must arrange for an appropriate REG_DEAD
13163
                 note to be added for it.  However, we can't just emit a USE
13164
                 and tag the note to it, since the register might actually
13165
                 be dead; so we recourse, and the recursive call then finds
13166
                 the previous insn that used this register.  */
13167
 
13168
              if (place && regno < FIRST_PSEUDO_REGISTER
13169
                  && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] > 1)
13170
                {
13171
                  unsigned int endregno = END_HARD_REGNO (XEXP (note, 0));
13172
                  int all_used = 1;
13173
                  unsigned int i;
13174
 
13175
                  for (i = regno; i < endregno; i++)
13176
                    if ((! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
13177
                         && ! find_regno_fusage (place, USE, i))
13178
                        || dead_or_set_regno_p (place, i))
13179
                      all_used = 0;
13180
 
13181
                  if (! all_used)
13182
                    {
13183
                      /* Put only REG_DEAD notes for pieces that are
13184
                         not already dead or set.  */
13185
 
13186
                      for (i = regno; i < endregno;
13187
                           i += hard_regno_nregs[i][reg_raw_mode[i]])
13188
                        {
13189
                          rtx piece = regno_reg_rtx[i];
13190
                          basic_block bb = this_basic_block;
13191
 
13192
                          if (! dead_or_set_p (place, piece)
13193
                              && ! reg_bitfield_target_p (piece,
13194
                                                          PATTERN (place)))
13195
                            {
13196
                              rtx new_note = alloc_reg_note (REG_DEAD, piece,
13197
                                                             NULL_RTX);
13198
 
13199
                              distribute_notes (new_note, place, place,
13200
                                                NULL_RTX, NULL_RTX, NULL_RTX);
13201
                            }
13202
                          else if (! refers_to_regno_p (i, i + 1,
13203
                                                        PATTERN (place), 0)
13204
                                   && ! find_regno_fusage (place, USE, i))
13205
                            for (tem = PREV_INSN (place); ;
13206
                                 tem = PREV_INSN (tem))
13207
                              {
13208
                                if (!NONDEBUG_INSN_P (tem))
13209
                                  {
13210
                                    if (tem == BB_HEAD (bb))
13211
                                      break;
13212
                                    continue;
13213
                                  }
13214
                                if (dead_or_set_p (tem, piece)
13215
                                    || reg_bitfield_target_p (piece,
13216
                                                              PATTERN (tem)))
13217
                                  {
13218
                                    add_reg_note (tem, REG_UNUSED, piece);
13219
                                    break;
13220
                                  }
13221
                              }
13222
 
13223
                        }
13224
 
13225
                      place = 0;
13226
                    }
13227
                }
13228
            }
13229
          break;
13230
 
13231
        default:
13232
          /* Any other notes should not be present at this point in the
13233
             compilation.  */
13234
          gcc_unreachable ();
13235
        }
13236
 
13237
      if (place)
13238
        {
13239
          XEXP (note, 1) = REG_NOTES (place);
13240
          REG_NOTES (place) = note;
13241
        }
13242
 
13243
      if (place2)
13244
        add_reg_note (place2, REG_NOTE_KIND (note), XEXP (note, 0));
13245
    }
13246
}
13247
 
13248
/* Similarly to above, distribute the LOG_LINKS that used to be present on
13249
   I3, I2, and I1 to new locations.  This is also called to add a link
13250
   pointing at I3 when I3's destination is changed.  */
13251
 
13252
static void
13253
distribute_links (rtx links)
13254
{
13255
  rtx link, next_link;
13256
 
13257
  for (link = links; link; link = next_link)
13258
    {
13259
      rtx place = 0;
13260
      rtx insn;
13261
      rtx set, reg;
13262
 
13263
      next_link = XEXP (link, 1);
13264
 
13265
      /* If the insn that this link points to is a NOTE or isn't a single
13266
         set, ignore it.  In the latter case, it isn't clear what we
13267
         can do other than ignore the link, since we can't tell which
13268
         register it was for.  Such links wouldn't be used by combine
13269
         anyway.
13270
 
13271
         It is not possible for the destination of the target of the link to
13272
         have been changed by combine.  The only potential of this is if we
13273
         replace I3, I2, and I1 by I3 and I2.  But in that case the
13274
         destination of I2 also remains unchanged.  */
13275
 
13276
      if (NOTE_P (XEXP (link, 0))
13277
          || (set = single_set (XEXP (link, 0))) == 0)
13278
        continue;
13279
 
13280
      reg = SET_DEST (set);
13281
      while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
13282
             || GET_CODE (reg) == STRICT_LOW_PART)
13283
        reg = XEXP (reg, 0);
13284
 
13285
      /* A LOG_LINK is defined as being placed on the first insn that uses
13286
         a register and points to the insn that sets the register.  Start
13287
         searching at the next insn after the target of the link and stop
13288
         when we reach a set of the register or the end of the basic block.
13289
 
13290
         Note that this correctly handles the link that used to point from
13291
         I3 to I2.  Also note that not much searching is typically done here
13292
         since most links don't point very far away.  */
13293
 
13294
      for (insn = NEXT_INSN (XEXP (link, 0));
13295
           (insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR
13296
                     || BB_HEAD (this_basic_block->next_bb) != insn));
13297
           insn = NEXT_INSN (insn))
13298
        if (DEBUG_INSN_P (insn))
13299
          continue;
13300
        else if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
13301
          {
13302
            if (reg_referenced_p (reg, PATTERN (insn)))
13303
              place = insn;
13304
            break;
13305
          }
13306
        else if (CALL_P (insn)
13307
                 && find_reg_fusage (insn, USE, reg))
13308
          {
13309
            place = insn;
13310
            break;
13311
          }
13312
        else if (INSN_P (insn) && reg_set_p (reg, insn))
13313
          break;
13314
 
13315
      /* If we found a place to put the link, place it there unless there
13316
         is already a link to the same insn as LINK at that point.  */
13317
 
13318
      if (place)
13319
        {
13320
          rtx link2;
13321
 
13322
          for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
13323
            if (XEXP (link2, 0) == XEXP (link, 0))
13324
              break;
13325
 
13326
          if (link2 == 0)
13327
            {
13328
              XEXP (link, 1) = LOG_LINKS (place);
13329
              LOG_LINKS (place) = link;
13330
 
13331
              /* Set added_links_insn to the earliest insn we added a
13332
                 link to.  */
13333
              if (added_links_insn == 0
13334
                  || DF_INSN_LUID (added_links_insn) > DF_INSN_LUID (place))
13335
                added_links_insn = place;
13336
            }
13337
        }
13338
    }
13339
}
13340
 
13341
/* Subroutine of unmentioned_reg_p and callback from for_each_rtx.
13342
   Check whether the expression pointer to by LOC is a register or
13343
   memory, and if so return 1 if it isn't mentioned in the rtx EXPR.
13344
   Otherwise return zero.  */
13345
 
13346
static int
13347
unmentioned_reg_p_1 (rtx *loc, void *expr)
13348
{
13349
  rtx x = *loc;
13350
 
13351
  if (x != NULL_RTX
13352
      && (REG_P (x) || MEM_P (x))
13353
      && ! reg_mentioned_p (x, (rtx) expr))
13354
    return 1;
13355
  return 0;
13356
}
13357
 
13358
/* Check for any register or memory mentioned in EQUIV that is not
13359
   mentioned in EXPR.  This is used to restrict EQUIV to "specializations"
13360
   of EXPR where some registers may have been replaced by constants.  */
13361
 
13362
static bool
13363
unmentioned_reg_p (rtx equiv, rtx expr)
13364
{
13365
  return for_each_rtx (&equiv, unmentioned_reg_p_1, expr);
13366
}
13367
 
13368
void
13369
dump_combine_stats (FILE *file)
13370
{
13371
  fprintf
13372
    (file,
13373
     ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
13374
     combine_attempts, combine_merges, combine_extras, combine_successes);
13375
}
13376
 
13377
void
13378
dump_combine_total_stats (FILE *file)
13379
{
13380
  fprintf
13381
    (file,
13382
     "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
13383
     total_attempts, total_merges, total_extras, total_successes);
13384
}
13385
 
13386
static bool
13387
gate_handle_combine (void)
13388
{
13389
  return (optimize > 0);
13390
}
13391
 
13392
/* Try combining insns through substitution.  */
13393
static unsigned int
13394
rest_of_handle_combine (void)
13395
{
13396
  int rebuild_jump_labels_after_combine;
13397
 
13398
  df_set_flags (DF_LR_RUN_DCE + DF_DEFER_INSN_RESCAN);
13399
  df_note_add_problem ();
13400
  df_analyze ();
13401
 
13402
  regstat_init_n_sets_and_refs ();
13403
 
13404
  rebuild_jump_labels_after_combine
13405
    = combine_instructions (get_insns (), max_reg_num ());
13406
 
13407
  /* Combining insns may have turned an indirect jump into a
13408
     direct jump.  Rebuild the JUMP_LABEL fields of jumping
13409
     instructions.  */
13410
  if (rebuild_jump_labels_after_combine)
13411
    {
13412
      timevar_push (TV_JUMP);
13413
      rebuild_jump_labels (get_insns ());
13414
      cleanup_cfg (0);
13415
      timevar_pop (TV_JUMP);
13416
    }
13417
 
13418
  regstat_free_n_sets_and_refs ();
13419
  return 0;
13420
}
13421
 
13422
struct rtl_opt_pass pass_combine =
13423
{
13424
 {
13425
  RTL_PASS,
13426
  "combine",                            /* name */
13427
  gate_handle_combine,                  /* gate */
13428
  rest_of_handle_combine,               /* execute */
13429
  NULL,                                 /* sub */
13430
  NULL,                                 /* next */
13431
  0,                                    /* static_pass_number */
13432
  TV_COMBINE,                           /* tv_id */
13433
  PROP_cfglayout,                       /* properties_required */
13434
  0,                                    /* properties_provided */
13435
  0,                                    /* properties_destroyed */
13436
  0,                                    /* todo_flags_start */
13437
  TODO_dump_func |
13438
  TODO_df_finish | TODO_verify_rtl_sharing |
13439
  TODO_ggc_collect,                     /* todo_flags_finish */
13440
 }
13441
};

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.