OpenCores
URL https://opencores.org/ocsvn/scarts/scarts/trunk

Subversion Repositories scarts

[/] [scarts/] [trunk/] [toolchain/] [scarts-gcc/] [gcc-4.1.1/] [gcc/] [emit-rtl.c] - Blame information for rev 20

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 12 jlechner
/* Emit RTL for the GCC expander.
2
   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3
   1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4
 
5
This file is part of GCC.
6
 
7
GCC is free software; you can redistribute it and/or modify it under
8
the terms of the GNU General Public License as published by the Free
9
Software Foundation; either version 2, or (at your option) any later
10
version.
11
 
12
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13
WARRANTY; without even the implied warranty of MERCHANTABILITY or
14
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15
for more details.
16
 
17
You should have received a copy of the GNU General Public License
18
along with GCC; see the file COPYING.  If not, write to the Free
19
Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20
02110-1301, USA.  */
21
 
22
 
23
/* Middle-to-low level generation of rtx code and insns.
24
 
25
   This file contains support functions for creating rtl expressions
26
   and manipulating them in the doubly-linked chain of insns.
27
 
28
   The patterns of the insns are created by machine-dependent
29
   routines in insn-emit.c, which is generated automatically from
30
   the machine description.  These routines make the individual rtx's
31
   of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
32
   which are automatically generated from rtl.def; what is machine
33
   dependent is the kind of rtx's they make and what arguments they
34
   use.  */
35
 
36
#include "config.h"
37
#include "system.h"
38
#include "coretypes.h"
39
#include "tm.h"
40
#include "toplev.h"
41
#include "rtl.h"
42
#include "tree.h"
43
#include "tm_p.h"
44
#include "flags.h"
45
#include "function.h"
46
#include "expr.h"
47
#include "regs.h"
48
#include "hard-reg-set.h"
49
#include "hashtab.h"
50
#include "insn-config.h"
51
#include "recog.h"
52
#include "real.h"
53
#include "bitmap.h"
54
#include "basic-block.h"
55
#include "ggc.h"
56
#include "debug.h"
57
#include "langhooks.h"
58
#include "tree-pass.h"
59
 
60
/* Commonly used modes.  */
61
 
62
enum machine_mode byte_mode;    /* Mode whose width is BITS_PER_UNIT.  */
63
enum machine_mode word_mode;    /* Mode whose width is BITS_PER_WORD.  */
64
enum machine_mode double_mode;  /* Mode whose width is DOUBLE_TYPE_SIZE.  */
65
enum machine_mode ptr_mode;     /* Mode whose width is POINTER_SIZE.  */
66
 
67
 
68
/* This is *not* reset after each function.  It gives each CODE_LABEL
69
   in the entire compilation a unique label number.  */
70
 
71
static GTY(()) int label_num = 1;
72
 
73
/* Nonzero means do not generate NOTEs for source line numbers.  */
74
 
75
static int no_line_numbers;
76
 
77
/* Commonly used rtx's, so that we only need space for one copy.
78
   These are initialized once for the entire compilation.
79
   All of these are unique; no other rtx-object will be equal to any
80
   of these.  */
81
 
82
rtx global_rtl[GR_MAX];
83
 
84
/* Commonly used RTL for hard registers.  These objects are not necessarily
85
   unique, so we allocate them separately from global_rtl.  They are
86
   initialized once per compilation unit, then copied into regno_reg_rtx
87
   at the beginning of each function.  */
88
static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
89
 
90
/* We record floating-point CONST_DOUBLEs in each floating-point mode for
91
   the values of 0, 1, and 2.  For the integer entries and VOIDmode, we
92
   record a copy of const[012]_rtx.  */
93
 
94
rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
95
 
96
rtx const_true_rtx;
97
 
98
REAL_VALUE_TYPE dconst0;
99
REAL_VALUE_TYPE dconst1;
100
REAL_VALUE_TYPE dconst2;
101
REAL_VALUE_TYPE dconst3;
102
REAL_VALUE_TYPE dconst10;
103
REAL_VALUE_TYPE dconstm1;
104
REAL_VALUE_TYPE dconstm2;
105
REAL_VALUE_TYPE dconsthalf;
106
REAL_VALUE_TYPE dconstthird;
107
REAL_VALUE_TYPE dconstpi;
108
REAL_VALUE_TYPE dconste;
109
 
110
/* All references to the following fixed hard registers go through
111
   these unique rtl objects.  On machines where the frame-pointer and
112
   arg-pointer are the same register, they use the same unique object.
113
 
114
   After register allocation, other rtl objects which used to be pseudo-regs
115
   may be clobbered to refer to the frame-pointer register.
116
   But references that were originally to the frame-pointer can be
117
   distinguished from the others because they contain frame_pointer_rtx.
118
 
119
   When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
120
   tricky: until register elimination has taken place hard_frame_pointer_rtx
121
   should be used if it is being set, and frame_pointer_rtx otherwise.  After
122
   register elimination hard_frame_pointer_rtx should always be used.
123
   On machines where the two registers are same (most) then these are the
124
   same.
125
 
126
   In an inline procedure, the stack and frame pointer rtxs may not be
127
   used for anything else.  */
128
rtx static_chain_rtx;           /* (REG:Pmode STATIC_CHAIN_REGNUM) */
129
rtx static_chain_incoming_rtx;  /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
130
rtx pic_offset_table_rtx;       /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
131
 
132
/* This is used to implement __builtin_return_address for some machines.
133
   See for instance the MIPS port.  */
134
rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
135
 
136
/* We make one copy of (const_int C) where C is in
137
   [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
138
   to save space during the compilation and simplify comparisons of
139
   integers.  */
140
 
141
rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
142
 
143
/* A hash table storing CONST_INTs whose absolute value is greater
144
   than MAX_SAVED_CONST_INT.  */
145
 
146
static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
147
     htab_t const_int_htab;
148
 
149
/* A hash table storing memory attribute structures.  */
150
static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
151
     htab_t mem_attrs_htab;
152
 
153
/* A hash table storing register attribute structures.  */
154
static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
155
     htab_t reg_attrs_htab;
156
 
157
/* A hash table storing all CONST_DOUBLEs.  */
158
static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
159
     htab_t const_double_htab;
160
 
161
#define first_insn (cfun->emit->x_first_insn)
162
#define last_insn (cfun->emit->x_last_insn)
163
#define cur_insn_uid (cfun->emit->x_cur_insn_uid)
164
#define last_location (cfun->emit->x_last_location)
165
#define first_label_num (cfun->emit->x_first_label_num)
166
 
167
static rtx make_jump_insn_raw (rtx);
168
static rtx make_call_insn_raw (rtx);
169
static rtx find_line_note (rtx);
170
static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
171
static void unshare_all_decls (tree);
172
static void reset_used_decls (tree);
173
static void mark_label_nuses (rtx);
174
static hashval_t const_int_htab_hash (const void *);
175
static int const_int_htab_eq (const void *, const void *);
176
static hashval_t const_double_htab_hash (const void *);
177
static int const_double_htab_eq (const void *, const void *);
178
static rtx lookup_const_double (rtx);
179
static hashval_t mem_attrs_htab_hash (const void *);
180
static int mem_attrs_htab_eq (const void *, const void *);
181
static mem_attrs *get_mem_attrs (HOST_WIDE_INT, tree, rtx, rtx, unsigned int,
182
                                 enum machine_mode);
183
static hashval_t reg_attrs_htab_hash (const void *);
184
static int reg_attrs_htab_eq (const void *, const void *);
185
static reg_attrs *get_reg_attrs (tree, int);
186
static tree component_ref_for_mem_expr (tree);
187
static rtx gen_const_vector (enum machine_mode, int);
188
static void copy_rtx_if_shared_1 (rtx *orig);
189
 
190
/* Probability of the conditional branch currently proceeded by try_split.
191
   Set to -1 otherwise.  */
192
int split_branch_probability = -1;
193
 
194
/* Returns a hash code for X (which is a really a CONST_INT).  */
195
 
196
static hashval_t
197
const_int_htab_hash (const void *x)
198
{
199
  return (hashval_t) INTVAL ((rtx) x);
200
}
201
 
202
/* Returns nonzero if the value represented by X (which is really a
203
   CONST_INT) is the same as that given by Y (which is really a
204
   HOST_WIDE_INT *).  */
205
 
206
static int
207
const_int_htab_eq (const void *x, const void *y)
208
{
209
  return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y));
210
}
211
 
212
/* Returns a hash code for X (which is really a CONST_DOUBLE).  */
213
static hashval_t
214
const_double_htab_hash (const void *x)
215
{
216
  rtx value = (rtx) x;
217
  hashval_t h;
218
 
219
  if (GET_MODE (value) == VOIDmode)
220
    h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
221
  else
222
    {
223
      h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
224
      /* MODE is used in the comparison, so it should be in the hash.  */
225
      h ^= GET_MODE (value);
226
    }
227
  return h;
228
}
229
 
230
/* Returns nonzero if the value represented by X (really a ...)
231
   is the same as that represented by Y (really a ...) */
232
static int
233
const_double_htab_eq (const void *x, const void *y)
234
{
235
  rtx a = (rtx)x, b = (rtx)y;
236
 
237
  if (GET_MODE (a) != GET_MODE (b))
238
    return 0;
239
  if (GET_MODE (a) == VOIDmode)
240
    return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
241
            && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
242
  else
243
    return real_identical (CONST_DOUBLE_REAL_VALUE (a),
244
                           CONST_DOUBLE_REAL_VALUE (b));
245
}
246
 
247
/* Returns a hash code for X (which is a really a mem_attrs *).  */
248
 
249
static hashval_t
250
mem_attrs_htab_hash (const void *x)
251
{
252
  mem_attrs *p = (mem_attrs *) x;
253
 
254
  return (p->alias ^ (p->align * 1000)
255
          ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
256
          ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
257
          ^ (size_t) iterative_hash_expr (p->expr, 0));
258
}
259
 
260
/* Returns nonzero if the value represented by X (which is really a
261
   mem_attrs *) is the same as that given by Y (which is also really a
262
   mem_attrs *).  */
263
 
264
static int
265
mem_attrs_htab_eq (const void *x, const void *y)
266
{
267
  mem_attrs *p = (mem_attrs *) x;
268
  mem_attrs *q = (mem_attrs *) y;
269
 
270
  return (p->alias == q->alias && p->offset == q->offset
271
          && p->size == q->size && p->align == q->align
272
          && (p->expr == q->expr
273
              || (p->expr != NULL_TREE && q->expr != NULL_TREE
274
                  && operand_equal_p (p->expr, q->expr, 0))));
275
}
276
 
277
/* Allocate a new mem_attrs structure and insert it into the hash table if
278
   one identical to it is not already in the table.  We are doing this for
279
   MEM of mode MODE.  */
280
 
281
static mem_attrs *
282
get_mem_attrs (HOST_WIDE_INT alias, tree expr, rtx offset, rtx size,
283
               unsigned int align, enum machine_mode mode)
284
{
285
  mem_attrs attrs;
286
  void **slot;
287
 
288
  /* If everything is the default, we can just return zero.
289
     This must match what the corresponding MEM_* macros return when the
290
     field is not present.  */
291
  if (alias == 0 && expr == 0 && offset == 0
292
      && (size == 0
293
          || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
294
      && (STRICT_ALIGNMENT && mode != BLKmode
295
          ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
296
    return 0;
297
 
298
  attrs.alias = alias;
299
  attrs.expr = expr;
300
  attrs.offset = offset;
301
  attrs.size = size;
302
  attrs.align = align;
303
 
304
  slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
305
  if (*slot == 0)
306
    {
307
      *slot = ggc_alloc (sizeof (mem_attrs));
308
      memcpy (*slot, &attrs, sizeof (mem_attrs));
309
    }
310
 
311
  return *slot;
312
}
313
 
314
/* Returns a hash code for X (which is a really a reg_attrs *).  */
315
 
316
static hashval_t
317
reg_attrs_htab_hash (const void *x)
318
{
319
  reg_attrs *p = (reg_attrs *) x;
320
 
321
  return ((p->offset * 1000) ^ (long) p->decl);
322
}
323
 
324
/* Returns nonzero if the value represented by X (which is really a
325
   reg_attrs *) is the same as that given by Y (which is also really a
326
   reg_attrs *).  */
327
 
328
static int
329
reg_attrs_htab_eq (const void *x, const void *y)
330
{
331
  reg_attrs *p = (reg_attrs *) x;
332
  reg_attrs *q = (reg_attrs *) y;
333
 
334
  return (p->decl == q->decl && p->offset == q->offset);
335
}
336
/* Allocate a new reg_attrs structure and insert it into the hash table if
337
   one identical to it is not already in the table.  We are doing this for
338
   MEM of mode MODE.  */
339
 
340
static reg_attrs *
341
get_reg_attrs (tree decl, int offset)
342
{
343
  reg_attrs attrs;
344
  void **slot;
345
 
346
  /* If everything is the default, we can just return zero.  */
347
  if (decl == 0 && offset == 0)
348
    return 0;
349
 
350
  attrs.decl = decl;
351
  attrs.offset = offset;
352
 
353
  slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
354
  if (*slot == 0)
355
    {
356
      *slot = ggc_alloc (sizeof (reg_attrs));
357
      memcpy (*slot, &attrs, sizeof (reg_attrs));
358
    }
359
 
360
  return *slot;
361
}
362
 
363
/* Generate a new REG rtx.  Make sure ORIGINAL_REGNO is set properly, and
364
   don't attempt to share with the various global pieces of rtl (such as
365
   frame_pointer_rtx).  */
366
 
367
rtx
368
gen_raw_REG (enum machine_mode mode, int regno)
369
{
370
  rtx x = gen_rtx_raw_REG (mode, regno);
371
  ORIGINAL_REGNO (x) = regno;
372
  return x;
373
}
374
 
375
/* There are some RTL codes that require special attention; the generation
376
   functions do the raw handling.  If you add to this list, modify
377
   special_rtx in gengenrtl.c as well.  */
378
 
379
rtx
380
gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
381
{
382
  void **slot;
383
 
384
  if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
385
    return const_int_rtx[arg + MAX_SAVED_CONST_INT];
386
 
387
#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
388
  if (const_true_rtx && arg == STORE_FLAG_VALUE)
389
    return const_true_rtx;
390
#endif
391
 
392
  /* Look up the CONST_INT in the hash table.  */
393
  slot = htab_find_slot_with_hash (const_int_htab, &arg,
394
                                   (hashval_t) arg, INSERT);
395
  if (*slot == 0)
396
    *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
397
 
398
  return (rtx) *slot;
399
}
400
 
401
rtx
402
gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
403
{
404
  return GEN_INT (trunc_int_for_mode (c, mode));
405
}
406
 
407
/* CONST_DOUBLEs might be created from pairs of integers, or from
408
   REAL_VALUE_TYPEs.  Also, their length is known only at run time,
409
   so we cannot use gen_rtx_raw_CONST_DOUBLE.  */
410
 
411
/* Determine whether REAL, a CONST_DOUBLE, already exists in the
412
   hash table.  If so, return its counterpart; otherwise add it
413
   to the hash table and return it.  */
414
static rtx
415
lookup_const_double (rtx real)
416
{
417
  void **slot = htab_find_slot (const_double_htab, real, INSERT);
418
  if (*slot == 0)
419
    *slot = real;
420
 
421
  return (rtx) *slot;
422
}
423
 
424
/* Return a CONST_DOUBLE rtx for a floating-point value specified by
425
   VALUE in mode MODE.  */
426
rtx
427
const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
428
{
429
  rtx real = rtx_alloc (CONST_DOUBLE);
430
  PUT_MODE (real, mode);
431
 
432
  real->u.rv = value;
433
 
434
  return lookup_const_double (real);
435
}
436
 
437
/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
438
   of ints: I0 is the low-order word and I1 is the high-order word.
439
   Do not use this routine for non-integer modes; convert to
440
   REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE.  */
441
 
442
rtx
443
immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
444
{
445
  rtx value;
446
  unsigned int i;
447
 
448
  if (mode != VOIDmode)
449
    {
450
      int width;
451
 
452
      gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
453
                  || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
454
                  /* We can get a 0 for an error mark.  */
455
                  || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
456
                  || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
457
 
458
      /* We clear out all bits that don't belong in MODE, unless they and
459
         our sign bit are all one.  So we get either a reasonable negative
460
         value or a reasonable unsigned value for this mode.  */
461
      width = GET_MODE_BITSIZE (mode);
462
      if (width < HOST_BITS_PER_WIDE_INT
463
          && ((i0 & ((HOST_WIDE_INT) (-1) << (width - 1)))
464
              != ((HOST_WIDE_INT) (-1) << (width - 1))))
465
        i0 &= ((HOST_WIDE_INT) 1 << width) - 1, i1 = 0;
466
      else if (width == HOST_BITS_PER_WIDE_INT
467
               && ! (i1 == ~0 && i0 < 0))
468
        i1 = 0;
469
      else
470
        /* We should be able to represent this value as a constant.  */
471
        gcc_assert (width <= 2 * HOST_BITS_PER_WIDE_INT);
472
 
473
      /* If this would be an entire word for the target, but is not for
474
         the host, then sign-extend on the host so that the number will
475
         look the same way on the host that it would on the target.
476
 
477
         For example, when building a 64 bit alpha hosted 32 bit sparc
478
         targeted compiler, then we want the 32 bit unsigned value -1 to be
479
         represented as a 64 bit value -1, and not as 0x00000000ffffffff.
480
         The latter confuses the sparc backend.  */
481
 
482
      if (width < HOST_BITS_PER_WIDE_INT
483
          && (i0 & ((HOST_WIDE_INT) 1 << (width - 1))))
484
        i0 |= ((HOST_WIDE_INT) (-1) << width);
485
 
486
      /* If MODE fits within HOST_BITS_PER_WIDE_INT, always use a
487
         CONST_INT.
488
 
489
         ??? Strictly speaking, this is wrong if we create a CONST_INT for
490
         a large unsigned constant with the size of MODE being
491
         HOST_BITS_PER_WIDE_INT and later try to interpret that constant
492
         in a wider mode.  In that case we will mis-interpret it as a
493
         negative number.
494
 
495
         Unfortunately, the only alternative is to make a CONST_DOUBLE for
496
         any constant in any mode if it is an unsigned constant larger
497
         than the maximum signed integer in an int on the host.  However,
498
         doing this will break everyone that always expects to see a
499
         CONST_INT for SImode and smaller.
500
 
501
         We have always been making CONST_INTs in this case, so nothing
502
         new is being broken.  */
503
 
504
      if (width <= HOST_BITS_PER_WIDE_INT)
505
        i1 = (i0 < 0) ? ~(HOST_WIDE_INT) 0 : 0;
506
    }
507
 
508
  /* If this integer fits in one word, return a CONST_INT.  */
509
  if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
510
    return GEN_INT (i0);
511
 
512
  /* We use VOIDmode for integers.  */
513
  value = rtx_alloc (CONST_DOUBLE);
514
  PUT_MODE (value, VOIDmode);
515
 
516
  CONST_DOUBLE_LOW (value) = i0;
517
  CONST_DOUBLE_HIGH (value) = i1;
518
 
519
  for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
520
    XWINT (value, i) = 0;
521
 
522
  return lookup_const_double (value);
523
}
524
 
525
rtx
526
gen_rtx_REG (enum machine_mode mode, unsigned int regno)
527
{
528
  /* In case the MD file explicitly references the frame pointer, have
529
     all such references point to the same frame pointer.  This is
530
     used during frame pointer elimination to distinguish the explicit
531
     references to these registers from pseudos that happened to be
532
     assigned to them.
533
 
534
     If we have eliminated the frame pointer or arg pointer, we will
535
     be using it as a normal register, for example as a spill
536
     register.  In such cases, we might be accessing it in a mode that
537
     is not Pmode and therefore cannot use the pre-allocated rtx.
538
 
539
     Also don't do this when we are making new REGs in reload, since
540
     we don't want to get confused with the real pointers.  */
541
 
542
  if (mode == Pmode && !reload_in_progress)
543
    {
544
      if (regno == FRAME_POINTER_REGNUM
545
          && (!reload_completed || frame_pointer_needed))
546
        return frame_pointer_rtx;
547
#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
548
      if (regno == HARD_FRAME_POINTER_REGNUM
549
          && (!reload_completed || frame_pointer_needed))
550
        return hard_frame_pointer_rtx;
551
#endif
552
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
553
      if (regno == ARG_POINTER_REGNUM)
554
        return arg_pointer_rtx;
555
#endif
556
#ifdef RETURN_ADDRESS_POINTER_REGNUM
557
      if (regno == RETURN_ADDRESS_POINTER_REGNUM)
558
        return return_address_pointer_rtx;
559
#endif
560
      if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
561
          && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
562
        return pic_offset_table_rtx;
563
      if (regno == STACK_POINTER_REGNUM)
564
        return stack_pointer_rtx;
565
    }
566
 
567
#if 0
568
  /* If the per-function register table has been set up, try to re-use
569
     an existing entry in that table to avoid useless generation of RTL.
570
 
571
     This code is disabled for now until we can fix the various backends
572
     which depend on having non-shared hard registers in some cases.   Long
573
     term we want to re-enable this code as it can significantly cut down
574
     on the amount of useless RTL that gets generated.
575
 
576
     We'll also need to fix some code that runs after reload that wants to
577
     set ORIGINAL_REGNO.  */
578
 
579
  if (cfun
580
      && cfun->emit
581
      && regno_reg_rtx
582
      && regno < FIRST_PSEUDO_REGISTER
583
      && reg_raw_mode[regno] == mode)
584
    return regno_reg_rtx[regno];
585
#endif
586
 
587
  return gen_raw_REG (mode, regno);
588
}
589
 
590
rtx
591
gen_rtx_MEM (enum machine_mode mode, rtx addr)
592
{
593
  rtx rt = gen_rtx_raw_MEM (mode, addr);
594
 
595
  /* This field is not cleared by the mere allocation of the rtx, so
596
     we clear it here.  */
597
  MEM_ATTRS (rt) = 0;
598
 
599
  return rt;
600
}
601
 
602
/* Generate a memory referring to non-trapping constant memory.  */
603
 
604
rtx
605
gen_const_mem (enum machine_mode mode, rtx addr)
606
{
607
  rtx mem = gen_rtx_MEM (mode, addr);
608
  MEM_READONLY_P (mem) = 1;
609
  MEM_NOTRAP_P (mem) = 1;
610
  return mem;
611
}
612
 
613
/* Generate a MEM referring to fixed portions of the frame, e.g., register
614
   save areas.  */
615
 
616
rtx
617
gen_frame_mem (enum machine_mode mode, rtx addr)
618
{
619
  rtx mem = gen_rtx_MEM (mode, addr);
620
  MEM_NOTRAP_P (mem) = 1;
621
  set_mem_alias_set (mem, get_frame_alias_set ());
622
  return mem;
623
}
624
 
625
/* Generate a MEM referring to a temporary use of the stack, not part
626
    of the fixed stack frame.  For example, something which is pushed
627
    by a target splitter.  */
628
rtx
629
gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
630
{
631
  rtx mem = gen_rtx_MEM (mode, addr);
632
  MEM_NOTRAP_P (mem) = 1;
633
  if (!current_function_calls_alloca)
634
    set_mem_alias_set (mem, get_frame_alias_set ());
635
  return mem;
636
}
637
 
638
/* We want to create (subreg:OMODE (obj:IMODE) OFFSET).  Return true if
639
   this construct would be valid, and false otherwise.  */
640
 
641
bool
642
validate_subreg (enum machine_mode omode, enum machine_mode imode,
643
                 rtx reg, unsigned int offset)
644
{
645
  unsigned int isize = GET_MODE_SIZE (imode);
646
  unsigned int osize = GET_MODE_SIZE (omode);
647
 
648
  /* All subregs must be aligned.  */
649
  if (offset % osize != 0)
650
    return false;
651
 
652
  /* The subreg offset cannot be outside the inner object.  */
653
  if (offset >= isize)
654
    return false;
655
 
656
  /* ??? This should not be here.  Temporarily continue to allow word_mode
657
     subregs of anything.  The most common offender is (subreg:SI (reg:DF)).
658
     Generally, backends are doing something sketchy but it'll take time to
659
     fix them all.  */
660
  if (omode == word_mode)
661
    ;
662
  /* ??? Similarly, e.g. with (subreg:DF (reg:TI)).  Though store_bit_field
663
     is the culprit here, and not the backends.  */
664
  else if (osize >= UNITS_PER_WORD && isize >= osize)
665
    ;
666
  /* Allow component subregs of complex and vector.  Though given the below
667
     extraction rules, it's not always clear what that means.  */
668
  else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
669
           && GET_MODE_INNER (imode) == omode)
670
    ;
671
  /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
672
     i.e. (subreg:V4SF (reg:SF) 0).  This surely isn't the cleanest way to
673
     represent this.  It's questionable if this ought to be represented at
674
     all -- why can't this all be hidden in post-reload splitters that make
675
     arbitrarily mode changes to the registers themselves.  */
676
  else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
677
    ;
678
  /* Subregs involving floating point modes are not allowed to
679
     change size.  Therefore (subreg:DI (reg:DF) 0) is fine, but
680
     (subreg:SI (reg:DF) 0) isn't.  */
681
  else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
682
    {
683
      if (isize != osize)
684
        return false;
685
    }
686
 
687
  /* Paradoxical subregs must have offset zero.  */
688
  if (osize > isize)
689
    return offset == 0;
690
 
691
  /* This is a normal subreg.  Verify that the offset is representable.  */
692
 
693
  /* For hard registers, we already have most of these rules collected in
694
     subreg_offset_representable_p.  */
695
  if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
696
    {
697
      unsigned int regno = REGNO (reg);
698
 
699
#ifdef CANNOT_CHANGE_MODE_CLASS
700
      if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
701
          && GET_MODE_INNER (imode) == omode)
702
        ;
703
      else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
704
        return false;
705
#endif
706
 
707
      return subreg_offset_representable_p (regno, imode, offset, omode);
708
    }
709
 
710
  /* For pseudo registers, we want most of the same checks.  Namely:
711
     If the register no larger than a word, the subreg must be lowpart.
712
     If the register is larger than a word, the subreg must be the lowpart
713
     of a subword.  A subreg does *not* perform arbitrary bit extraction.
714
     Given that we've already checked mode/offset alignment, we only have
715
     to check subword subregs here.  */
716
  if (osize < UNITS_PER_WORD)
717
    {
718
      enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
719
      unsigned int low_off = subreg_lowpart_offset (omode, wmode);
720
      if (offset % UNITS_PER_WORD != low_off)
721
        return false;
722
    }
723
  return true;
724
}
725
 
726
rtx
727
gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
728
{
729
  gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
730
  return gen_rtx_raw_SUBREG (mode, reg, offset);
731
}
732
 
733
/* Generate a SUBREG representing the least-significant part of REG if MODE
734
   is smaller than mode of REG, otherwise paradoxical SUBREG.  */
735
 
736
rtx
737
gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
738
{
739
  enum machine_mode inmode;
740
 
741
  inmode = GET_MODE (reg);
742
  if (inmode == VOIDmode)
743
    inmode = mode;
744
  return gen_rtx_SUBREG (mode, reg,
745
                         subreg_lowpart_offset (mode, inmode));
746
}
747
 
748
/* gen_rtvec (n, [rt1, ..., rtn])
749
**
750
**          This routine creates an rtvec and stores within it the
751
**      pointers to rtx's which are its arguments.
752
*/
753
 
754
/*VARARGS1*/
755
rtvec
756
gen_rtvec (int n, ...)
757
{
758
  int i, save_n;
759
  rtx *vector;
760
  va_list p;
761
 
762
  va_start (p, n);
763
 
764
  if (n == 0)
765
    return NULL_RTVEC;          /* Don't allocate an empty rtvec...     */
766
 
767
  vector = alloca (n * sizeof (rtx));
768
 
769
  for (i = 0; i < n; i++)
770
    vector[i] = va_arg (p, rtx);
771
 
772
  /* The definition of VA_* in K&R C causes `n' to go out of scope.  */
773
  save_n = n;
774
  va_end (p);
775
 
776
  return gen_rtvec_v (save_n, vector);
777
}
778
 
779
rtvec
780
gen_rtvec_v (int n, rtx *argp)
781
{
782
  int i;
783
  rtvec rt_val;
784
 
785
  if (n == 0)
786
    return NULL_RTVEC;          /* Don't allocate an empty rtvec...     */
787
 
788
  rt_val = rtvec_alloc (n);     /* Allocate an rtvec...                 */
789
 
790
  for (i = 0; i < n; i++)
791
    rt_val->elem[i] = *argp++;
792
 
793
  return rt_val;
794
}
795
 
796
/* Generate a REG rtx for a new pseudo register of mode MODE.
797
   This pseudo is assigned the next sequential register number.  */
798
 
799
rtx
800
gen_reg_rtx (enum machine_mode mode)
801
{
802
  struct function *f = cfun;
803
  rtx val;
804
 
805
  /* Don't let anything called after initial flow analysis create new
806
     registers.  */
807
  gcc_assert (!no_new_pseudos);
808
 
809
  if (generating_concat_p
810
      && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
811
          || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
812
    {
813
      /* For complex modes, don't make a single pseudo.
814
         Instead, make a CONCAT of two pseudos.
815
         This allows noncontiguous allocation of the real and imaginary parts,
816
         which makes much better code.  Besides, allocating DCmode
817
         pseudos overstrains reload on some machines like the 386.  */
818
      rtx realpart, imagpart;
819
      enum machine_mode partmode = GET_MODE_INNER (mode);
820
 
821
      realpart = gen_reg_rtx (partmode);
822
      imagpart = gen_reg_rtx (partmode);
823
      return gen_rtx_CONCAT (mode, realpart, imagpart);
824
    }
825
 
826
  /* Make sure regno_pointer_align, and regno_reg_rtx are large
827
     enough to have an element for this pseudo reg number.  */
828
 
829
  if (reg_rtx_no == f->emit->regno_pointer_align_length)
830
    {
831
      int old_size = f->emit->regno_pointer_align_length;
832
      char *new;
833
      rtx *new1;
834
 
835
      new = ggc_realloc (f->emit->regno_pointer_align, old_size * 2);
836
      memset (new + old_size, 0, old_size);
837
      f->emit->regno_pointer_align = (unsigned char *) new;
838
 
839
      new1 = ggc_realloc (f->emit->x_regno_reg_rtx,
840
                          old_size * 2 * sizeof (rtx));
841
      memset (new1 + old_size, 0, old_size * sizeof (rtx));
842
      regno_reg_rtx = new1;
843
 
844
      f->emit->regno_pointer_align_length = old_size * 2;
845
    }
846
 
847
  val = gen_raw_REG (mode, reg_rtx_no);
848
  regno_reg_rtx[reg_rtx_no++] = val;
849
  return val;
850
}
851
 
852
/* Generate a register with same attributes as REG, but offsetted by OFFSET.
853
   Do the big endian correction if needed.  */
854
 
855
rtx
856
gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno, int offset)
857
{
858
  rtx new = gen_rtx_REG (mode, regno);
859
  tree decl;
860
  HOST_WIDE_INT var_size;
861
 
862
  /* PR middle-end/14084
863
     The problem appears when a variable is stored in a larger register
864
     and later it is used in the original mode or some mode in between
865
     or some part of variable is accessed.
866
 
867
     On little endian machines there is no problem because
868
     the REG_OFFSET of the start of the variable is the same when
869
     accessed in any mode (it is 0).
870
 
871
     However, this is not true on big endian machines.
872
     The offset of the start of the variable is different when accessed
873
     in different modes.
874
     When we are taking a part of the REG we have to change the OFFSET
875
     from offset WRT size of mode of REG to offset WRT size of variable.
876
 
877
     If we would not do the big endian correction the resulting REG_OFFSET
878
     would be larger than the size of the DECL.
879
 
880
     Examples of correction, for BYTES_BIG_ENDIAN WORDS_BIG_ENDIAN machine:
881
 
882
     REG.mode  MODE  DECL size  old offset  new offset  description
883
     DI        SI    4          4           0           int32 in SImode
884
     DI        SI    1          4           0           char in SImode
885
     DI        QI    1          7           0           char in QImode
886
     DI        QI    4          5           1           1st element in QImode
887
                                                        of char[4]
888
     DI        HI    4          6           2           1st element in HImode
889
                                                        of int16[2]
890
 
891
     If the size of DECL is equal or greater than the size of REG
892
     we can't do this correction because the register holds the
893
     whole variable or a part of the variable and thus the REG_OFFSET
894
     is already correct.  */
895
 
896
  decl = REG_EXPR (reg);
897
  if ((BYTES_BIG_ENDIAN || WORDS_BIG_ENDIAN)
898
      && decl != NULL
899
      && offset > 0
900
      && GET_MODE_SIZE (GET_MODE (reg)) > GET_MODE_SIZE (mode)
901
      && ((var_size = int_size_in_bytes (TREE_TYPE (decl))) > 0
902
          && var_size < GET_MODE_SIZE (GET_MODE (reg))))
903
    {
904
      int offset_le;
905
 
906
      /* Convert machine endian to little endian WRT size of mode of REG.  */
907
      if (WORDS_BIG_ENDIAN)
908
        offset_le = ((GET_MODE_SIZE (GET_MODE (reg)) - 1 - offset)
909
                     / UNITS_PER_WORD) * UNITS_PER_WORD;
910
      else
911
        offset_le = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
912
 
913
      if (BYTES_BIG_ENDIAN)
914
        offset_le += ((GET_MODE_SIZE (GET_MODE (reg)) - 1 - offset)
915
                      % UNITS_PER_WORD);
916
      else
917
        offset_le += offset % UNITS_PER_WORD;
918
 
919
      if (offset_le >= var_size)
920
        {
921
          /* MODE is wider than the variable so the new reg will cover
922
             the whole variable so the resulting OFFSET should be 0.  */
923
          offset = 0;
924
        }
925
      else
926
        {
927
          /* Convert little endian to machine endian WRT size of variable.  */
928
          if (WORDS_BIG_ENDIAN)
929
            offset = ((var_size - 1 - offset_le)
930
                      / UNITS_PER_WORD) * UNITS_PER_WORD;
931
          else
932
            offset = (offset_le / UNITS_PER_WORD) * UNITS_PER_WORD;
933
 
934
          if (BYTES_BIG_ENDIAN)
935
            offset += ((var_size - 1 - offset_le)
936
                       % UNITS_PER_WORD);
937
          else
938
            offset += offset_le % UNITS_PER_WORD;
939
        }
940
    }
941
 
942
  REG_ATTRS (new) = get_reg_attrs (REG_EXPR (reg),
943
                                   REG_OFFSET (reg) + offset);
944
  return new;
945
}
946
 
947
/* Set the decl for MEM to DECL.  */
948
 
949
void
950
set_reg_attrs_from_mem (rtx reg, rtx mem)
951
{
952
  if (MEM_OFFSET (mem) && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
953
    REG_ATTRS (reg)
954
      = get_reg_attrs (MEM_EXPR (mem), INTVAL (MEM_OFFSET (mem)));
955
}
956
 
957
/* Set the register attributes for registers contained in PARM_RTX.
958
   Use needed values from memory attributes of MEM.  */
959
 
960
void
961
set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
962
{
963
  if (REG_P (parm_rtx))
964
    set_reg_attrs_from_mem (parm_rtx, mem);
965
  else if (GET_CODE (parm_rtx) == PARALLEL)
966
    {
967
      /* Check for a NULL entry in the first slot, used to indicate that the
968
         parameter goes both on the stack and in registers.  */
969
      int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
970
      for (; i < XVECLEN (parm_rtx, 0); i++)
971
        {
972
          rtx x = XVECEXP (parm_rtx, 0, i);
973
          if (REG_P (XEXP (x, 0)))
974
            REG_ATTRS (XEXP (x, 0))
975
              = get_reg_attrs (MEM_EXPR (mem),
976
                               INTVAL (XEXP (x, 1)));
977
        }
978
    }
979
}
980
 
981
/* Assign the RTX X to declaration T.  */
982
void
983
set_decl_rtl (tree t, rtx x)
984
{
985
  DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
986
 
987
  if (!x)
988
    return;
989
  /* For register, we maintain the reverse information too.  */
990
  if (REG_P (x))
991
    REG_ATTRS (x) = get_reg_attrs (t, 0);
992
  else if (GET_CODE (x) == SUBREG)
993
    REG_ATTRS (SUBREG_REG (x))
994
      = get_reg_attrs (t, -SUBREG_BYTE (x));
995
  if (GET_CODE (x) == CONCAT)
996
    {
997
      if (REG_P (XEXP (x, 0)))
998
        REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
999
      if (REG_P (XEXP (x, 1)))
1000
        REG_ATTRS (XEXP (x, 1))
1001
          = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1002
    }
1003
  if (GET_CODE (x) == PARALLEL)
1004
    {
1005
      int i;
1006
      for (i = 0; i < XVECLEN (x, 0); i++)
1007
        {
1008
          rtx y = XVECEXP (x, 0, i);
1009
          if (REG_P (XEXP (y, 0)))
1010
            REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1011
        }
1012
    }
1013
}
1014
 
1015
/* Assign the RTX X to parameter declaration T.  */
1016
void
1017
set_decl_incoming_rtl (tree t, rtx x)
1018
{
1019
  DECL_INCOMING_RTL (t) = x;
1020
 
1021
  if (!x)
1022
    return;
1023
  /* For register, we maintain the reverse information too.  */
1024
  if (REG_P (x))
1025
    REG_ATTRS (x) = get_reg_attrs (t, 0);
1026
  else if (GET_CODE (x) == SUBREG)
1027
    REG_ATTRS (SUBREG_REG (x))
1028
      = get_reg_attrs (t, -SUBREG_BYTE (x));
1029
  if (GET_CODE (x) == CONCAT)
1030
    {
1031
      if (REG_P (XEXP (x, 0)))
1032
        REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1033
      if (REG_P (XEXP (x, 1)))
1034
        REG_ATTRS (XEXP (x, 1))
1035
          = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1036
    }
1037
  if (GET_CODE (x) == PARALLEL)
1038
    {
1039
      int i, start;
1040
 
1041
      /* Check for a NULL entry, used to indicate that the parameter goes
1042
         both on the stack and in registers.  */
1043
      if (XEXP (XVECEXP (x, 0, 0), 0))
1044
        start = 0;
1045
      else
1046
        start = 1;
1047
 
1048
      for (i = start; i < XVECLEN (x, 0); i++)
1049
        {
1050
          rtx y = XVECEXP (x, 0, i);
1051
          if (REG_P (XEXP (y, 0)))
1052
            REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1053
        }
1054
    }
1055
}
1056
 
1057
/* Identify REG (which may be a CONCAT) as a user register.  */
1058
 
1059
void
1060
mark_user_reg (rtx reg)
1061
{
1062
  if (GET_CODE (reg) == CONCAT)
1063
    {
1064
      REG_USERVAR_P (XEXP (reg, 0)) = 1;
1065
      REG_USERVAR_P (XEXP (reg, 1)) = 1;
1066
    }
1067
  else
1068
    {
1069
      gcc_assert (REG_P (reg));
1070
      REG_USERVAR_P (reg) = 1;
1071
    }
1072
}
1073
 
1074
/* Identify REG as a probable pointer register and show its alignment
1075
   as ALIGN, if nonzero.  */
1076
 
1077
void
1078
mark_reg_pointer (rtx reg, int align)
1079
{
1080
  if (! REG_POINTER (reg))
1081
    {
1082
      REG_POINTER (reg) = 1;
1083
 
1084
      if (align)
1085
        REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1086
    }
1087
  else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1088
    /* We can no-longer be sure just how aligned this pointer is.  */
1089
    REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1090
}
1091
 
1092
/* Return 1 plus largest pseudo reg number used in the current function.  */
1093
 
1094
int
1095
max_reg_num (void)
1096
{
1097
  return reg_rtx_no;
1098
}
1099
 
1100
/* Return 1 + the largest label number used so far in the current function.  */
1101
 
1102
int
1103
max_label_num (void)
1104
{
1105
  return label_num;
1106
}
1107
 
1108
/* Return first label number used in this function (if any were used).  */
1109
 
1110
int
1111
get_first_label_num (void)
1112
{
1113
  return first_label_num;
1114
}
1115
 
1116
/* If the rtx for label was created during the expansion of a nested
1117
   function, then first_label_num won't include this label number.
1118
   Fix this now so that array indicies work later.  */
1119
 
1120
void
1121
maybe_set_first_label_num (rtx x)
1122
{
1123
  if (CODE_LABEL_NUMBER (x) < first_label_num)
1124
    first_label_num = CODE_LABEL_NUMBER (x);
1125
}
1126
 
1127
/* Return a value representing some low-order bits of X, where the number
1128
   of low-order bits is given by MODE.  Note that no conversion is done
1129
   between floating-point and fixed-point values, rather, the bit
1130
   representation is returned.
1131
 
1132
   This function handles the cases in common between gen_lowpart, below,
1133
   and two variants in cse.c and combine.c.  These are the cases that can
1134
   be safely handled at all points in the compilation.
1135
 
1136
   If this is not a case we can handle, return 0.  */
1137
 
1138
rtx
1139
gen_lowpart_common (enum machine_mode mode, rtx x)
1140
{
1141
  int msize = GET_MODE_SIZE (mode);
1142
  int xsize;
1143
  int offset = 0;
1144
  enum machine_mode innermode;
1145
 
1146
  /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1147
     so we have to make one up.  Yuk.  */
1148
  innermode = GET_MODE (x);
1149
  if (GET_CODE (x) == CONST_INT
1150
      && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1151
    innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1152
  else if (innermode == VOIDmode)
1153
    innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
1154
 
1155
  xsize = GET_MODE_SIZE (innermode);
1156
 
1157
  gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1158
 
1159
  if (innermode == mode)
1160
    return x;
1161
 
1162
  /* MODE must occupy no more words than the mode of X.  */
1163
  if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1164
      > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1165
    return 0;
1166
 
1167
  /* Don't allow generating paradoxical FLOAT_MODE subregs.  */
1168
  if (GET_MODE_CLASS (mode) == MODE_FLOAT && msize > xsize)
1169
    return 0;
1170
 
1171
  offset = subreg_lowpart_offset (mode, innermode);
1172
 
1173
  if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1174
      && (GET_MODE_CLASS (mode) == MODE_INT
1175
          || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1176
    {
1177
      /* If we are getting the low-order part of something that has been
1178
         sign- or zero-extended, we can either just use the object being
1179
         extended or make a narrower extension.  If we want an even smaller
1180
         piece than the size of the object being extended, call ourselves
1181
         recursively.
1182
 
1183
         This case is used mostly by combine and cse.  */
1184
 
1185
      if (GET_MODE (XEXP (x, 0)) == mode)
1186
        return XEXP (x, 0);
1187
      else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1188
        return gen_lowpart_common (mode, XEXP (x, 0));
1189
      else if (msize < xsize)
1190
        return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1191
    }
1192
  else if (GET_CODE (x) == SUBREG || REG_P (x)
1193
           || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1194
           || GET_CODE (x) == CONST_DOUBLE || GET_CODE (x) == CONST_INT)
1195
    return simplify_gen_subreg (mode, x, innermode, offset);
1196
 
1197
  /* Otherwise, we can't do this.  */
1198
  return 0;
1199
}
1200
 
1201
rtx
1202
gen_highpart (enum machine_mode mode, rtx x)
1203
{
1204
  unsigned int msize = GET_MODE_SIZE (mode);
1205
  rtx result;
1206
 
1207
  /* This case loses if X is a subreg.  To catch bugs early,
1208
     complain if an invalid MODE is used even in other cases.  */
1209
  gcc_assert (msize <= UNITS_PER_WORD
1210
              || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1211
 
1212
  result = simplify_gen_subreg (mode, x, GET_MODE (x),
1213
                                subreg_highpart_offset (mode, GET_MODE (x)));
1214
  gcc_assert (result);
1215
 
1216
  /* simplify_gen_subreg is not guaranteed to return a valid operand for
1217
     the target if we have a MEM.  gen_highpart must return a valid operand,
1218
     emitting code if necessary to do so.  */
1219
  if (MEM_P (result))
1220
    {
1221
      result = validize_mem (result);
1222
      gcc_assert (result);
1223
    }
1224
 
1225
  return result;
1226
}
1227
 
1228
/* Like gen_highpart, but accept mode of EXP operand in case EXP can
1229
   be VOIDmode constant.  */
1230
rtx
1231
gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1232
{
1233
  if (GET_MODE (exp) != VOIDmode)
1234
    {
1235
      gcc_assert (GET_MODE (exp) == innermode);
1236
      return gen_highpart (outermode, exp);
1237
    }
1238
  return simplify_gen_subreg (outermode, exp, innermode,
1239
                              subreg_highpart_offset (outermode, innermode));
1240
}
1241
 
1242
/* Return offset in bytes to get OUTERMODE low part
1243
   of the value in mode INNERMODE stored in memory in target format.  */
1244
 
1245
unsigned int
1246
subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1247
{
1248
  unsigned int offset = 0;
1249
  int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1250
 
1251
  if (difference > 0)
1252
    {
1253
      if (WORDS_BIG_ENDIAN)
1254
        offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1255
      if (BYTES_BIG_ENDIAN)
1256
        offset += difference % UNITS_PER_WORD;
1257
    }
1258
 
1259
  return offset;
1260
}
1261
 
1262
/* Return offset in bytes to get OUTERMODE high part
1263
   of the value in mode INNERMODE stored in memory in target format.  */
1264
unsigned int
1265
subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1266
{
1267
  unsigned int offset = 0;
1268
  int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1269
 
1270
  gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1271
 
1272
  if (difference > 0)
1273
    {
1274
      if (! WORDS_BIG_ENDIAN)
1275
        offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1276
      if (! BYTES_BIG_ENDIAN)
1277
        offset += difference % UNITS_PER_WORD;
1278
    }
1279
 
1280
  return offset;
1281
}
1282
 
1283
/* Return 1 iff X, assumed to be a SUBREG,
1284
   refers to the least significant part of its containing reg.
1285
   If X is not a SUBREG, always return 1 (it is its own low part!).  */
1286
 
1287
int
1288
subreg_lowpart_p (rtx x)
1289
{
1290
  if (GET_CODE (x) != SUBREG)
1291
    return 1;
1292
  else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1293
    return 0;
1294
 
1295
  return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1296
          == SUBREG_BYTE (x));
1297
}
1298
 
1299
/* Return subword OFFSET of operand OP.
1300
   The word number, OFFSET, is interpreted as the word number starting
1301
   at the low-order address.  OFFSET 0 is the low-order word if not
1302
   WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1303
 
1304
   If we cannot extract the required word, we return zero.  Otherwise,
1305
   an rtx corresponding to the requested word will be returned.
1306
 
1307
   VALIDATE_ADDRESS is nonzero if the address should be validated.  Before
1308
   reload has completed, a valid address will always be returned.  After
1309
   reload, if a valid address cannot be returned, we return zero.
1310
 
1311
   If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1312
   it is the responsibility of the caller.
1313
 
1314
   MODE is the mode of OP in case it is a CONST_INT.
1315
 
1316
   ??? This is still rather broken for some cases.  The problem for the
1317
   moment is that all callers of this thing provide no 'goal mode' to
1318
   tell us to work with.  This exists because all callers were written
1319
   in a word based SUBREG world.
1320
   Now use of this function can be deprecated by simplify_subreg in most
1321
   cases.
1322
 */
1323
 
1324
rtx
1325
operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1326
{
1327
  if (mode == VOIDmode)
1328
    mode = GET_MODE (op);
1329
 
1330
  gcc_assert (mode != VOIDmode);
1331
 
1332
  /* If OP is narrower than a word, fail.  */
1333
  if (mode != BLKmode
1334
      && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1335
    return 0;
1336
 
1337
  /* If we want a word outside OP, return zero.  */
1338
  if (mode != BLKmode
1339
      && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1340
    return const0_rtx;
1341
 
1342
  /* Form a new MEM at the requested address.  */
1343
  if (MEM_P (op))
1344
    {
1345
      rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1346
 
1347
      if (! validate_address)
1348
        return new;
1349
 
1350
      else if (reload_completed)
1351
        {
1352
          if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1353
            return 0;
1354
        }
1355
      else
1356
        return replace_equiv_address (new, XEXP (new, 0));
1357
    }
1358
 
1359
  /* Rest can be handled by simplify_subreg.  */
1360
  return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1361
}
1362
 
1363
/* Similar to `operand_subword', but never return 0.  If we can't
1364
   extract the required subword, put OP into a register and try again.
1365
   The second attempt must succeed.  We always validate the address in
1366
   this case.
1367
 
1368
   MODE is the mode of OP, in case it is CONST_INT.  */
1369
 
1370
rtx
1371
operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1372
{
1373
  rtx result = operand_subword (op, offset, 1, mode);
1374
 
1375
  if (result)
1376
    return result;
1377
 
1378
  if (mode != BLKmode && mode != VOIDmode)
1379
    {
1380
      /* If this is a register which can not be accessed by words, copy it
1381
         to a pseudo register.  */
1382
      if (REG_P (op))
1383
        op = copy_to_reg (op);
1384
      else
1385
        op = force_reg (mode, op);
1386
    }
1387
 
1388
  result = operand_subword (op, offset, 1, mode);
1389
  gcc_assert (result);
1390
 
1391
  return result;
1392
}
1393
 
1394
/* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1395
   or (2) a component ref of something variable.  Represent the later with
1396
   a NULL expression.  */
1397
 
1398
static tree
1399
component_ref_for_mem_expr (tree ref)
1400
{
1401
  tree inner = TREE_OPERAND (ref, 0);
1402
 
1403
  if (TREE_CODE (inner) == COMPONENT_REF)
1404
    inner = component_ref_for_mem_expr (inner);
1405
  else
1406
    {
1407
      /* Now remove any conversions: they don't change what the underlying
1408
         object is.  Likewise for SAVE_EXPR.  */
1409
      while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1410
             || TREE_CODE (inner) == NON_LVALUE_EXPR
1411
             || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1412
             || TREE_CODE (inner) == SAVE_EXPR)
1413
        inner = TREE_OPERAND (inner, 0);
1414
 
1415
      if (! DECL_P (inner))
1416
        inner = NULL_TREE;
1417
    }
1418
 
1419
  if (inner == TREE_OPERAND (ref, 0))
1420
    return ref;
1421
  else
1422
    return build3 (COMPONENT_REF, TREE_TYPE (ref), inner,
1423
                   TREE_OPERAND (ref, 1), NULL_TREE);
1424
}
1425
 
1426
/* Returns 1 if both MEM_EXPR can be considered equal
1427
   and 0 otherwise.  */
1428
 
1429
int
1430
mem_expr_equal_p (tree expr1, tree expr2)
1431
{
1432
  if (expr1 == expr2)
1433
    return 1;
1434
 
1435
  if (! expr1 || ! expr2)
1436
    return 0;
1437
 
1438
  if (TREE_CODE (expr1) != TREE_CODE (expr2))
1439
    return 0;
1440
 
1441
  if (TREE_CODE (expr1) == COMPONENT_REF)
1442
    return
1443
      mem_expr_equal_p (TREE_OPERAND (expr1, 0),
1444
                        TREE_OPERAND (expr2, 0))
1445
      && mem_expr_equal_p (TREE_OPERAND (expr1, 1), /* field decl */
1446
                           TREE_OPERAND (expr2, 1));
1447
 
1448
  if (INDIRECT_REF_P (expr1))
1449
    return mem_expr_equal_p (TREE_OPERAND (expr1, 0),
1450
                             TREE_OPERAND (expr2, 0));
1451
 
1452
  /* ARRAY_REFs, ARRAY_RANGE_REFs and BIT_FIELD_REFs should already
1453
              have been resolved here.  */
1454
  gcc_assert (DECL_P (expr1));
1455
 
1456
  /* Decls with different pointers can't be equal.  */
1457
  return 0;
1458
}
1459
 
1460
/* Given REF, a MEM, and T, either the type of X or the expression
1461
   corresponding to REF, set the memory attributes.  OBJECTP is nonzero
1462
   if we are making a new object of this type.  BITPOS is nonzero if
1463
   there is an offset outstanding on T that will be applied later.  */
1464
 
1465
void
1466
set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1467
                                 HOST_WIDE_INT bitpos)
1468
{
1469
  HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
1470
  tree expr = MEM_EXPR (ref);
1471
  rtx offset = MEM_OFFSET (ref);
1472
  rtx size = MEM_SIZE (ref);
1473
  unsigned int align = MEM_ALIGN (ref);
1474
  HOST_WIDE_INT apply_bitpos = 0;
1475
  tree type;
1476
 
1477
  /* It can happen that type_for_mode was given a mode for which there
1478
     is no language-level type.  In which case it returns NULL, which
1479
     we can see here.  */
1480
  if (t == NULL_TREE)
1481
    return;
1482
 
1483
  type = TYPE_P (t) ? t : TREE_TYPE (t);
1484
  if (type == error_mark_node)
1485
    return;
1486
 
1487
  /* If we have already set DECL_RTL = ref, get_alias_set will get the
1488
     wrong answer, as it assumes that DECL_RTL already has the right alias
1489
     info.  Callers should not set DECL_RTL until after the call to
1490
     set_mem_attributes.  */
1491
  gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1492
 
1493
  /* Get the alias set from the expression or type (perhaps using a
1494
     front-end routine) and use it.  */
1495
  alias = get_alias_set (t);
1496
 
1497
  MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1498
  MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
1499
  MEM_POINTER (ref) = POINTER_TYPE_P (type);
1500
 
1501
  /* If we are making an object of this type, or if this is a DECL, we know
1502
     that it is a scalar if the type is not an aggregate.  */
1503
  if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
1504
    MEM_SCALAR_P (ref) = 1;
1505
 
1506
  /* We can set the alignment from the type if we are making an object,
1507
     this is an INDIRECT_REF, or if TYPE_ALIGN_OK.  */
1508
  if (objectp || TREE_CODE (t) == INDIRECT_REF
1509
      || TREE_CODE (t) == ALIGN_INDIRECT_REF
1510
      || TYPE_ALIGN_OK (type))
1511
    align = MAX (align, TYPE_ALIGN (type));
1512
  else
1513
    if (TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
1514
      {
1515
        if (integer_zerop (TREE_OPERAND (t, 1)))
1516
          /* We don't know anything about the alignment.  */
1517
          align = BITS_PER_UNIT;
1518
        else
1519
          align = tree_low_cst (TREE_OPERAND (t, 1), 1);
1520
      }
1521
 
1522
  /* If the size is known, we can set that.  */
1523
  if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1524
    size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1525
 
1526
  /* If T is not a type, we may be able to deduce some more information about
1527
     the expression.  */
1528
  if (! TYPE_P (t))
1529
    {
1530
      tree base;
1531
 
1532
      if (TREE_THIS_VOLATILE (t))
1533
        MEM_VOLATILE_P (ref) = 1;
1534
 
1535
      /* Now remove any conversions: they don't change what the underlying
1536
         object is.  Likewise for SAVE_EXPR.  */
1537
      while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
1538
             || TREE_CODE (t) == NON_LVALUE_EXPR
1539
             || TREE_CODE (t) == VIEW_CONVERT_EXPR
1540
             || TREE_CODE (t) == SAVE_EXPR)
1541
        t = TREE_OPERAND (t, 0);
1542
 
1543
      /* We may look through structure-like accesses for the purposes of
1544
         examining TREE_THIS_NOTRAP, but not array-like accesses.  */
1545
      base = t;
1546
      while (TREE_CODE (base) == COMPONENT_REF
1547
             || TREE_CODE (base) == REALPART_EXPR
1548
             || TREE_CODE (base) == IMAGPART_EXPR
1549
             || TREE_CODE (base) == BIT_FIELD_REF)
1550
        base = TREE_OPERAND (base, 0);
1551
 
1552
      if (DECL_P (base))
1553
        {
1554
          if (CODE_CONTAINS_STRUCT (TREE_CODE (base), TS_DECL_WITH_VIS))
1555
            MEM_NOTRAP_P (ref) = !DECL_WEAK (base);
1556
          else
1557
            MEM_NOTRAP_P (ref) = 1;
1558
        }
1559
      else
1560
        MEM_NOTRAP_P (ref) = TREE_THIS_NOTRAP (base);
1561
 
1562
      base = get_base_address (base);
1563
      if (base && DECL_P (base)
1564
          && TREE_READONLY (base)
1565
          && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
1566
        {
1567
          tree base_type = TREE_TYPE (base);
1568
          gcc_assert (!(base_type && TYPE_NEEDS_CONSTRUCTING (base_type))
1569
                      || DECL_ARTIFICIAL (base));
1570
          MEM_READONLY_P (ref) = 1;
1571
        }
1572
 
1573
      /* If this expression uses it's parent's alias set, mark it such
1574
         that we won't change it.  */
1575
      if (component_uses_parent_alias_set (t))
1576
        MEM_KEEP_ALIAS_SET_P (ref) = 1;
1577
 
1578
      /* If this is a decl, set the attributes of the MEM from it.  */
1579
      if (DECL_P (t))
1580
        {
1581
          expr = t;
1582
          offset = const0_rtx;
1583
          apply_bitpos = bitpos;
1584
          size = (DECL_SIZE_UNIT (t)
1585
                  && host_integerp (DECL_SIZE_UNIT (t), 1)
1586
                  ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1587
          align = DECL_ALIGN (t);
1588
        }
1589
 
1590
      /* If this is a constant, we know the alignment.  */
1591
      else if (CONSTANT_CLASS_P (t))
1592
        {
1593
          align = TYPE_ALIGN (type);
1594
#ifdef CONSTANT_ALIGNMENT
1595
          align = CONSTANT_ALIGNMENT (t, align);
1596
#endif
1597
        }
1598
 
1599
      /* If this is a field reference and not a bit-field, record it.  */
1600
      /* ??? There is some information that can be gleened from bit-fields,
1601
         such as the word offset in the structure that might be modified.
1602
         But skip it for now.  */
1603
      else if (TREE_CODE (t) == COMPONENT_REF
1604
               && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1605
        {
1606
          expr = component_ref_for_mem_expr (t);
1607
          offset = const0_rtx;
1608
          apply_bitpos = bitpos;
1609
          /* ??? Any reason the field size would be different than
1610
             the size we got from the type?  */
1611
        }
1612
 
1613
      /* If this is an array reference, look for an outer field reference.  */
1614
      else if (TREE_CODE (t) == ARRAY_REF)
1615
        {
1616
          tree off_tree = size_zero_node;
1617
          /* We can't modify t, because we use it at the end of the
1618
             function.  */
1619
          tree t2 = t;
1620
 
1621
          do
1622
            {
1623
              tree index = TREE_OPERAND (t2, 1);
1624
              tree low_bound = array_ref_low_bound (t2);
1625
              tree unit_size = array_ref_element_size (t2);
1626
 
1627
              /* We assume all arrays have sizes that are a multiple of a byte.
1628
                 First subtract the lower bound, if any, in the type of the
1629
                 index, then convert to sizetype and multiply by the size of
1630
                 the array element.  */
1631
              if (! integer_zerop (low_bound))
1632
                index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1633
                                     index, low_bound);
1634
 
1635
              off_tree = size_binop (PLUS_EXPR,
1636
                                     size_binop (MULT_EXPR, convert (sizetype,
1637
                                                                     index),
1638
                                                 unit_size),
1639
                                     off_tree);
1640
              t2 = TREE_OPERAND (t2, 0);
1641
            }
1642
          while (TREE_CODE (t2) == ARRAY_REF);
1643
 
1644
          if (DECL_P (t2))
1645
            {
1646
              expr = t2;
1647
              offset = NULL;
1648
              if (host_integerp (off_tree, 1))
1649
                {
1650
                  HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1651
                  HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1652
                  align = DECL_ALIGN (t2);
1653
                  if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
1654
                    align = aoff;
1655
                  offset = GEN_INT (ioff);
1656
                  apply_bitpos = bitpos;
1657
                }
1658
            }
1659
          else if (TREE_CODE (t2) == COMPONENT_REF)
1660
            {
1661
              expr = component_ref_for_mem_expr (t2);
1662
              if (host_integerp (off_tree, 1))
1663
                {
1664
                  offset = GEN_INT (tree_low_cst (off_tree, 1));
1665
                  apply_bitpos = bitpos;
1666
                }
1667
              /* ??? Any reason the field size would be different than
1668
                 the size we got from the type?  */
1669
            }
1670
          else if (flag_argument_noalias > 1
1671
                   && (INDIRECT_REF_P (t2))
1672
                   && TREE_CODE (TREE_OPERAND (t2, 0)) == PARM_DECL)
1673
            {
1674
              expr = t2;
1675
              offset = NULL;
1676
            }
1677
        }
1678
 
1679
      /* If this is a Fortran indirect argument reference, record the
1680
         parameter decl.  */
1681
      else if (flag_argument_noalias > 1
1682
               && (INDIRECT_REF_P (t))
1683
               && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
1684
        {
1685
          expr = t;
1686
          offset = NULL;
1687
        }
1688
    }
1689
 
1690
  /* If we modified OFFSET based on T, then subtract the outstanding
1691
     bit position offset.  Similarly, increase the size of the accessed
1692
     object to contain the negative offset.  */
1693
  if (apply_bitpos)
1694
    {
1695
      offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
1696
      if (size)
1697
        size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
1698
    }
1699
 
1700
  if (TREE_CODE (t) == ALIGN_INDIRECT_REF)
1701
    {
1702
      /* Force EXPR and OFFSE to NULL, since we don't know exactly what
1703
         we're overlapping.  */
1704
      offset = NULL;
1705
      expr = NULL;
1706
    }
1707
 
1708
  /* Now set the attributes we computed above.  */
1709
  MEM_ATTRS (ref)
1710
    = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
1711
 
1712
  /* If this is already known to be a scalar or aggregate, we are done.  */
1713
  if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1714
    return;
1715
 
1716
  /* If it is a reference into an aggregate, this is part of an aggregate.
1717
     Otherwise we don't know.  */
1718
  else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1719
           || TREE_CODE (t) == ARRAY_RANGE_REF
1720
           || TREE_CODE (t) == BIT_FIELD_REF)
1721
    MEM_IN_STRUCT_P (ref) = 1;
1722
}
1723
 
1724
void
1725
set_mem_attributes (rtx ref, tree t, int objectp)
1726
{
1727
  set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1728
}
1729
 
1730
/* Set the decl for MEM to DECL.  */
1731
 
1732
void
1733
set_mem_attrs_from_reg (rtx mem, rtx reg)
1734
{
1735
  MEM_ATTRS (mem)
1736
    = get_mem_attrs (MEM_ALIAS_SET (mem), REG_EXPR (reg),
1737
                     GEN_INT (REG_OFFSET (reg)),
1738
                     MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1739
}
1740
 
1741
/* Set the alias set of MEM to SET.  */
1742
 
1743
void
1744
set_mem_alias_set (rtx mem, HOST_WIDE_INT set)
1745
{
1746
#ifdef ENABLE_CHECKING
1747
  /* If the new and old alias sets don't conflict, something is wrong.  */
1748
  gcc_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1749
#endif
1750
 
1751
  MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
1752
                                   MEM_SIZE (mem), MEM_ALIGN (mem),
1753
                                   GET_MODE (mem));
1754
}
1755
 
1756
/* Set the alignment of MEM to ALIGN bits.  */
1757
 
1758
void
1759
set_mem_align (rtx mem, unsigned int align)
1760
{
1761
  MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1762
                                   MEM_OFFSET (mem), MEM_SIZE (mem), align,
1763
                                   GET_MODE (mem));
1764
}
1765
 
1766
/* Set the expr for MEM to EXPR.  */
1767
 
1768
void
1769
set_mem_expr (rtx mem, tree expr)
1770
{
1771
  MEM_ATTRS (mem)
1772
    = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1773
                     MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1774
}
1775
 
1776
/* Set the offset of MEM to OFFSET.  */
1777
 
1778
void
1779
set_mem_offset (rtx mem, rtx offset)
1780
{
1781
  MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1782
                                   offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1783
                                   GET_MODE (mem));
1784
}
1785
 
1786
/* Set the size of MEM to SIZE.  */
1787
 
1788
void
1789
set_mem_size (rtx mem, rtx size)
1790
{
1791
  MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1792
                                   MEM_OFFSET (mem), size, MEM_ALIGN (mem),
1793
                                   GET_MODE (mem));
1794
}
1795
 
1796
/* Return a memory reference like MEMREF, but with its mode changed to MODE
1797
   and its address changed to ADDR.  (VOIDmode means don't change the mode.
1798
   NULL for ADDR means don't change the address.)  VALIDATE is nonzero if the
1799
   returned memory location is required to be valid.  The memory
1800
   attributes are not changed.  */
1801
 
1802
static rtx
1803
change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
1804
{
1805
  rtx new;
1806
 
1807
  gcc_assert (MEM_P (memref));
1808
  if (mode == VOIDmode)
1809
    mode = GET_MODE (memref);
1810
  if (addr == 0)
1811
    addr = XEXP (memref, 0);
1812
  if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1813
      && (!validate || memory_address_p (mode, addr)))
1814
    return memref;
1815
 
1816
  if (validate)
1817
    {
1818
      if (reload_in_progress || reload_completed)
1819
        gcc_assert (memory_address_p (mode, addr));
1820
      else
1821
        addr = memory_address (mode, addr);
1822
    }
1823
 
1824
  if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1825
    return memref;
1826
 
1827
  new = gen_rtx_MEM (mode, addr);
1828
  MEM_COPY_ATTRIBUTES (new, memref);
1829
  return new;
1830
}
1831
 
1832
/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1833
   way we are changing MEMREF, so we only preserve the alias set.  */
1834
 
1835
rtx
1836
change_address (rtx memref, enum machine_mode mode, rtx addr)
1837
{
1838
  rtx new = change_address_1 (memref, mode, addr, 1), size;
1839
  enum machine_mode mmode = GET_MODE (new);
1840
  unsigned int align;
1841
 
1842
  size = mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode));
1843
  align = mmode == BLKmode ? BITS_PER_UNIT : GET_MODE_ALIGNMENT (mmode);
1844
 
1845
  /* If there are no changes, just return the original memory reference.  */
1846
  if (new == memref)
1847
    {
1848
      if (MEM_ATTRS (memref) == 0
1849
          || (MEM_EXPR (memref) == NULL
1850
              && MEM_OFFSET (memref) == NULL
1851
              && MEM_SIZE (memref) == size
1852
              && MEM_ALIGN (memref) == align))
1853
        return new;
1854
 
1855
      new = gen_rtx_MEM (mmode, XEXP (memref, 0));
1856
      MEM_COPY_ATTRIBUTES (new, memref);
1857
    }
1858
 
1859
  MEM_ATTRS (new)
1860
    = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0, size, align, mmode);
1861
 
1862
  return new;
1863
}
1864
 
1865
/* Return a memory reference like MEMREF, but with its mode changed
1866
   to MODE and its address offset by OFFSET bytes.  If VALIDATE is
1867
   nonzero, the memory address is forced to be valid.
1868
   If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1869
   and caller is responsible for adjusting MEMREF base register.  */
1870
 
1871
rtx
1872
adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
1873
                  int validate, int adjust)
1874
{
1875
  rtx addr = XEXP (memref, 0);
1876
  rtx new;
1877
  rtx memoffset = MEM_OFFSET (memref);
1878
  rtx size = 0;
1879
  unsigned int memalign = MEM_ALIGN (memref);
1880
 
1881
  /* If there are no changes, just return the original memory reference.  */
1882
  if (mode == GET_MODE (memref) && !offset
1883
      && (!validate || memory_address_p (mode, addr)))
1884
    return memref;
1885
 
1886
  /* ??? Prefer to create garbage instead of creating shared rtl.
1887
     This may happen even if offset is nonzero -- consider
1888
     (plus (plus reg reg) const_int) -- so do this always.  */
1889
  addr = copy_rtx (addr);
1890
 
1891
  if (adjust)
1892
    {
1893
      /* If MEMREF is a LO_SUM and the offset is within the alignment of the
1894
         object, we can merge it into the LO_SUM.  */
1895
      if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
1896
          && offset >= 0
1897
          && (unsigned HOST_WIDE_INT) offset
1898
              < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
1899
        addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
1900
                               plus_constant (XEXP (addr, 1), offset));
1901
      else
1902
        addr = plus_constant (addr, offset);
1903
    }
1904
 
1905
  new = change_address_1 (memref, mode, addr, validate);
1906
 
1907
  /* Compute the new values of the memory attributes due to this adjustment.
1908
     We add the offsets and update the alignment.  */
1909
  if (memoffset)
1910
    memoffset = GEN_INT (offset + INTVAL (memoffset));
1911
 
1912
  /* Compute the new alignment by taking the MIN of the alignment and the
1913
     lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
1914
     if zero.  */
1915
  if (offset != 0)
1916
    memalign
1917
      = MIN (memalign,
1918
             (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
1919
 
1920
  /* We can compute the size in a number of ways.  */
1921
  if (GET_MODE (new) != BLKmode)
1922
    size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
1923
  else if (MEM_SIZE (memref))
1924
    size = plus_constant (MEM_SIZE (memref), -offset);
1925
 
1926
  MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
1927
                                   memoffset, size, memalign, GET_MODE (new));
1928
 
1929
  /* At some point, we should validate that this offset is within the object,
1930
     if all the appropriate values are known.  */
1931
  return new;
1932
}
1933
 
1934
/* Return a memory reference like MEMREF, but with its mode changed
1935
   to MODE and its address changed to ADDR, which is assumed to be
1936
   MEMREF offseted by OFFSET bytes.  If VALIDATE is
1937
   nonzero, the memory address is forced to be valid.  */
1938
 
1939
rtx
1940
adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
1941
                             HOST_WIDE_INT offset, int validate)
1942
{
1943
  memref = change_address_1 (memref, VOIDmode, addr, validate);
1944
  return adjust_address_1 (memref, mode, offset, validate, 0);
1945
}
1946
 
1947
/* Return a memory reference like MEMREF, but whose address is changed by
1948
   adding OFFSET, an RTX, to it.  POW2 is the highest power of two factor
1949
   known to be in OFFSET (possibly 1).  */
1950
 
1951
rtx
1952
offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
1953
{
1954
  rtx new, addr = XEXP (memref, 0);
1955
 
1956
  new = simplify_gen_binary (PLUS, Pmode, addr, offset);
1957
 
1958
  /* At this point we don't know _why_ the address is invalid.  It
1959
     could have secondary memory references, multiplies or anything.
1960
 
1961
     However, if we did go and rearrange things, we can wind up not
1962
     being able to recognize the magic around pic_offset_table_rtx.
1963
     This stuff is fragile, and is yet another example of why it is
1964
     bad to expose PIC machinery too early.  */
1965
  if (! memory_address_p (GET_MODE (memref), new)
1966
      && GET_CODE (addr) == PLUS
1967
      && XEXP (addr, 0) == pic_offset_table_rtx)
1968
    {
1969
      addr = force_reg (GET_MODE (addr), addr);
1970
      new = simplify_gen_binary (PLUS, Pmode, addr, offset);
1971
    }
1972
 
1973
  update_temp_slot_address (XEXP (memref, 0), new);
1974
  new = change_address_1 (memref, VOIDmode, new, 1);
1975
 
1976
  /* If there are no changes, just return the original memory reference.  */
1977
  if (new == memref)
1978
    return new;
1979
 
1980
  /* Update the alignment to reflect the offset.  Reset the offset, which
1981
     we don't know.  */
1982
  MEM_ATTRS (new)
1983
    = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
1984
                     MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
1985
                     GET_MODE (new));
1986
  return new;
1987
}
1988
 
1989
/* Return a memory reference like MEMREF, but with its address changed to
1990
   ADDR.  The caller is asserting that the actual piece of memory pointed
1991
   to is the same, just the form of the address is being changed, such as
1992
   by putting something into a register.  */
1993
 
1994
rtx
1995
replace_equiv_address (rtx memref, rtx addr)
1996
{
1997
  /* change_address_1 copies the memory attribute structure without change
1998
     and that's exactly what we want here.  */
1999
  update_temp_slot_address (XEXP (memref, 0), addr);
2000
  return change_address_1 (memref, VOIDmode, addr, 1);
2001
}
2002
 
2003
/* Likewise, but the reference is not required to be valid.  */
2004
 
2005
rtx
2006
replace_equiv_address_nv (rtx memref, rtx addr)
2007
{
2008
  return change_address_1 (memref, VOIDmode, addr, 0);
2009
}
2010
 
2011
/* Return a memory reference like MEMREF, but with its mode widened to
2012
   MODE and offset by OFFSET.  This would be used by targets that e.g.
2013
   cannot issue QImode memory operations and have to use SImode memory
2014
   operations plus masking logic.  */
2015
 
2016
rtx
2017
widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
2018
{
2019
  rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
2020
  tree expr = MEM_EXPR (new);
2021
  rtx memoffset = MEM_OFFSET (new);
2022
  unsigned int size = GET_MODE_SIZE (mode);
2023
 
2024
  /* If there are no changes, just return the original memory reference.  */
2025
  if (new == memref)
2026
    return new;
2027
 
2028
  /* If we don't know what offset we were at within the expression, then
2029
     we can't know if we've overstepped the bounds.  */
2030
  if (! memoffset)
2031
    expr = NULL_TREE;
2032
 
2033
  while (expr)
2034
    {
2035
      if (TREE_CODE (expr) == COMPONENT_REF)
2036
        {
2037
          tree field = TREE_OPERAND (expr, 1);
2038
          tree offset = component_ref_field_offset (expr);
2039
 
2040
          if (! DECL_SIZE_UNIT (field))
2041
            {
2042
              expr = NULL_TREE;
2043
              break;
2044
            }
2045
 
2046
          /* Is the field at least as large as the access?  If so, ok,
2047
             otherwise strip back to the containing structure.  */
2048
          if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2049
              && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2050
              && INTVAL (memoffset) >= 0)
2051
            break;
2052
 
2053
          if (! host_integerp (offset, 1))
2054
            {
2055
              expr = NULL_TREE;
2056
              break;
2057
            }
2058
 
2059
          expr = TREE_OPERAND (expr, 0);
2060
          memoffset
2061
            = (GEN_INT (INTVAL (memoffset)
2062
                        + tree_low_cst (offset, 1)
2063
                        + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2064
                           / BITS_PER_UNIT)));
2065
        }
2066
      /* Similarly for the decl.  */
2067
      else if (DECL_P (expr)
2068
               && DECL_SIZE_UNIT (expr)
2069
               && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2070
               && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2071
               && (! memoffset || INTVAL (memoffset) >= 0))
2072
        break;
2073
      else
2074
        {
2075
          /* The widened memory access overflows the expression, which means
2076
             that it could alias another expression.  Zap it.  */
2077
          expr = NULL_TREE;
2078
          break;
2079
        }
2080
    }
2081
 
2082
  if (! expr)
2083
    memoffset = NULL_RTX;
2084
 
2085
  /* The widened memory may alias other stuff, so zap the alias set.  */
2086
  /* ??? Maybe use get_alias_set on any remaining expression.  */
2087
 
2088
  MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2089
                                   MEM_ALIGN (new), mode);
2090
 
2091
  return new;
2092
}
2093
 
2094
/* Return a newly created CODE_LABEL rtx with a unique label number.  */
2095
 
2096
rtx
2097
gen_label_rtx (void)
2098
{
2099
  return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2100
                             NULL, label_num++, NULL);
2101
}
2102
 
2103
/* For procedure integration.  */
2104
 
2105
/* Install new pointers to the first and last insns in the chain.
2106
   Also, set cur_insn_uid to one higher than the last in use.
2107
   Used for an inline-procedure after copying the insn chain.  */
2108
 
2109
void
2110
set_new_first_and_last_insn (rtx first, rtx last)
2111
{
2112
  rtx insn;
2113
 
2114
  first_insn = first;
2115
  last_insn = last;
2116
  cur_insn_uid = 0;
2117
 
2118
  for (insn = first; insn; insn = NEXT_INSN (insn))
2119
    cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2120
 
2121
  cur_insn_uid++;
2122
}
2123
 
2124
/* Go through all the RTL insn bodies and copy any invalid shared
2125
   structure.  This routine should only be called once.  */
2126
 
2127
static void
2128
unshare_all_rtl_1 (tree fndecl, rtx insn)
2129
{
2130
  tree decl;
2131
 
2132
  /* Make sure that virtual parameters are not shared.  */
2133
  for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2134
    SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2135
 
2136
  /* Make sure that virtual stack slots are not shared.  */
2137
  unshare_all_decls (DECL_INITIAL (fndecl));
2138
 
2139
  /* Unshare just about everything else.  */
2140
  unshare_all_rtl_in_chain (insn);
2141
 
2142
  /* Make sure the addresses of stack slots found outside the insn chain
2143
     (such as, in DECL_RTL of a variable) are not shared
2144
     with the insn chain.
2145
 
2146
     This special care is necessary when the stack slot MEM does not
2147
     actually appear in the insn chain.  If it does appear, its address
2148
     is unshared from all else at that point.  */
2149
  stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2150
}
2151
 
2152
/* Go through all the RTL insn bodies and copy any invalid shared
2153
   structure, again.  This is a fairly expensive thing to do so it
2154
   should be done sparingly.  */
2155
 
2156
void
2157
unshare_all_rtl_again (rtx insn)
2158
{
2159
  rtx p;
2160
  tree decl;
2161
 
2162
  for (p = insn; p; p = NEXT_INSN (p))
2163
    if (INSN_P (p))
2164
      {
2165
        reset_used_flags (PATTERN (p));
2166
        reset_used_flags (REG_NOTES (p));
2167
        reset_used_flags (LOG_LINKS (p));
2168
      }
2169
 
2170
  /* Make sure that virtual stack slots are not shared.  */
2171
  reset_used_decls (DECL_INITIAL (cfun->decl));
2172
 
2173
  /* Make sure that virtual parameters are not shared.  */
2174
  for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2175
    reset_used_flags (DECL_RTL (decl));
2176
 
2177
  reset_used_flags (stack_slot_list);
2178
 
2179
  unshare_all_rtl_1 (cfun->decl, insn);
2180
}
2181
 
2182
void
2183
unshare_all_rtl (void)
2184
{
2185
  unshare_all_rtl_1 (current_function_decl, get_insns ());
2186
}
2187
 
2188
struct tree_opt_pass pass_unshare_all_rtl =
2189
{
2190
  "unshare",                            /* name */
2191
  NULL,                                 /* gate */
2192
  unshare_all_rtl,                      /* execute */
2193
  NULL,                                 /* sub */
2194
  NULL,                                 /* next */
2195
  0,                                    /* static_pass_number */
2196
  0,                                    /* tv_id */
2197
  0,                                    /* properties_required */
2198
  0,                                    /* properties_provided */
2199
  0,                                    /* properties_destroyed */
2200
  0,                                    /* todo_flags_start */
2201
  TODO_dump_func,                       /* todo_flags_finish */
2202
 
2203
};
2204
 
2205
 
2206
/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2207
   Recursively does the same for subexpressions.  */
2208
 
2209
static void
2210
verify_rtx_sharing (rtx orig, rtx insn)
2211
{
2212
  rtx x = orig;
2213
  int i;
2214
  enum rtx_code code;
2215
  const char *format_ptr;
2216
 
2217
  if (x == 0)
2218
    return;
2219
 
2220
  code = GET_CODE (x);
2221
 
2222
  /* These types may be freely shared.  */
2223
 
2224
  switch (code)
2225
    {
2226
    case REG:
2227
    case CONST_INT:
2228
    case CONST_DOUBLE:
2229
    case CONST_VECTOR:
2230
    case SYMBOL_REF:
2231
    case LABEL_REF:
2232
    case CODE_LABEL:
2233
    case PC:
2234
    case CC0:
2235
    case SCRATCH:
2236
      return;
2237
      /* SCRATCH must be shared because they represent distinct values.  */
2238
    case CLOBBER:
2239
      if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2240
        return;
2241
      break;
2242
 
2243
    case CONST:
2244
      /* CONST can be shared if it contains a SYMBOL_REF.  If it contains
2245
         a LABEL_REF, it isn't sharable.  */
2246
      if (GET_CODE (XEXP (x, 0)) == PLUS
2247
          && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2248
          && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2249
        return;
2250
      break;
2251
 
2252
    case MEM:
2253
      /* A MEM is allowed to be shared if its address is constant.  */
2254
      if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2255
          || reload_completed || reload_in_progress)
2256
        return;
2257
 
2258
      break;
2259
 
2260
    default:
2261
      break;
2262
    }
2263
 
2264
  /* This rtx may not be shared.  If it has already been seen,
2265
     replace it with a copy of itself.  */
2266
#ifdef ENABLE_CHECKING
2267
  if (RTX_FLAG (x, used))
2268
    {
2269
      error ("invalid rtl sharing found in the insn");
2270
      debug_rtx (insn);
2271
      error ("shared rtx");
2272
      debug_rtx (x);
2273
      internal_error ("internal consistency failure");
2274
    }
2275
#endif
2276
  gcc_assert (!RTX_FLAG (x, used));
2277
 
2278
  RTX_FLAG (x, used) = 1;
2279
 
2280
  /* Now scan the subexpressions recursively.  */
2281
 
2282
  format_ptr = GET_RTX_FORMAT (code);
2283
 
2284
  for (i = 0; i < GET_RTX_LENGTH (code); i++)
2285
    {
2286
      switch (*format_ptr++)
2287
        {
2288
        case 'e':
2289
          verify_rtx_sharing (XEXP (x, i), insn);
2290
          break;
2291
 
2292
        case 'E':
2293
          if (XVEC (x, i) != NULL)
2294
            {
2295
              int j;
2296
              int len = XVECLEN (x, i);
2297
 
2298
              for (j = 0; j < len; j++)
2299
                {
2300
                  /* We allow sharing of ASM_OPERANDS inside single
2301
                     instruction.  */
2302
                  if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2303
                      && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2304
                          == ASM_OPERANDS))
2305
                    verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2306
                  else
2307
                    verify_rtx_sharing (XVECEXP (x, i, j), insn);
2308
                }
2309
            }
2310
          break;
2311
        }
2312
    }
2313
  return;
2314
}
2315
 
2316
/* Go through all the RTL insn bodies and check that there is no unexpected
2317
   sharing in between the subexpressions.  */
2318
 
2319
void
2320
verify_rtl_sharing (void)
2321
{
2322
  rtx p;
2323
 
2324
  for (p = get_insns (); p; p = NEXT_INSN (p))
2325
    if (INSN_P (p))
2326
      {
2327
        reset_used_flags (PATTERN (p));
2328
        reset_used_flags (REG_NOTES (p));
2329
        reset_used_flags (LOG_LINKS (p));
2330
      }
2331
 
2332
  for (p = get_insns (); p; p = NEXT_INSN (p))
2333
    if (INSN_P (p))
2334
      {
2335
        verify_rtx_sharing (PATTERN (p), p);
2336
        verify_rtx_sharing (REG_NOTES (p), p);
2337
        verify_rtx_sharing (LOG_LINKS (p), p);
2338
      }
2339
}
2340
 
2341
/* Go through all the RTL insn bodies and copy any invalid shared structure.
2342
   Assumes the mark bits are cleared at entry.  */
2343
 
2344
void
2345
unshare_all_rtl_in_chain (rtx insn)
2346
{
2347
  for (; insn; insn = NEXT_INSN (insn))
2348
    if (INSN_P (insn))
2349
      {
2350
        PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2351
        REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2352
        LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2353
      }
2354
}
2355
 
2356
/* Go through all virtual stack slots of a function and copy any
2357
   shared structure.  */
2358
static void
2359
unshare_all_decls (tree blk)
2360
{
2361
  tree t;
2362
 
2363
  /* Copy shared decls.  */
2364
  for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2365
    if (DECL_RTL_SET_P (t))
2366
      SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
2367
 
2368
  /* Now process sub-blocks.  */
2369
  for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2370
    unshare_all_decls (t);
2371
}
2372
 
2373
/* Go through all virtual stack slots of a function and mark them as
2374
   not shared.  */
2375
static void
2376
reset_used_decls (tree blk)
2377
{
2378
  tree t;
2379
 
2380
  /* Mark decls.  */
2381
  for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2382
    if (DECL_RTL_SET_P (t))
2383
      reset_used_flags (DECL_RTL (t));
2384
 
2385
  /* Now process sub-blocks.  */
2386
  for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2387
    reset_used_decls (t);
2388
}
2389
 
2390
/* Mark ORIG as in use, and return a copy of it if it was already in use.
2391
   Recursively does the same for subexpressions.  Uses
2392
   copy_rtx_if_shared_1 to reduce stack space.  */
2393
 
2394
rtx
2395
copy_rtx_if_shared (rtx orig)
2396
{
2397
  copy_rtx_if_shared_1 (&orig);
2398
  return orig;
2399
}
2400
 
2401
/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2402
   use.  Recursively does the same for subexpressions.  */
2403
 
2404
static void
2405
copy_rtx_if_shared_1 (rtx *orig1)
2406
{
2407
  rtx x;
2408
  int i;
2409
  enum rtx_code code;
2410
  rtx *last_ptr;
2411
  const char *format_ptr;
2412
  int copied = 0;
2413
  int length;
2414
 
2415
  /* Repeat is used to turn tail-recursion into iteration.  */
2416
repeat:
2417
  x = *orig1;
2418
 
2419
  if (x == 0)
2420
    return;
2421
 
2422
  code = GET_CODE (x);
2423
 
2424
  /* These types may be freely shared.  */
2425
 
2426
  switch (code)
2427
    {
2428
    case REG:
2429
    case CONST_INT:
2430
    case CONST_DOUBLE:
2431
    case CONST_VECTOR:
2432
    case SYMBOL_REF:
2433
    case LABEL_REF:
2434
    case CODE_LABEL:
2435
    case PC:
2436
    case CC0:
2437
    case SCRATCH:
2438
      /* SCRATCH must be shared because they represent distinct values.  */
2439
      return;
2440
    case CLOBBER:
2441
      if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2442
        return;
2443
      break;
2444
 
2445
    case CONST:
2446
      /* CONST can be shared if it contains a SYMBOL_REF.  If it contains
2447
         a LABEL_REF, it isn't sharable.  */
2448
      if (GET_CODE (XEXP (x, 0)) == PLUS
2449
          && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2450
          && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2451
        return;
2452
      break;
2453
 
2454
    case INSN:
2455
    case JUMP_INSN:
2456
    case CALL_INSN:
2457
    case NOTE:
2458
    case BARRIER:
2459
      /* The chain of insns is not being copied.  */
2460
      return;
2461
 
2462
    default:
2463
      break;
2464
    }
2465
 
2466
  /* This rtx may not be shared.  If it has already been seen,
2467
     replace it with a copy of itself.  */
2468
 
2469
  if (RTX_FLAG (x, used))
2470
    {
2471
      rtx copy;
2472
 
2473
      copy = rtx_alloc (code);
2474
      memcpy (copy, x, RTX_SIZE (code));
2475
      x = copy;
2476
      copied = 1;
2477
    }
2478
  RTX_FLAG (x, used) = 1;
2479
 
2480
  /* Now scan the subexpressions recursively.
2481
     We can store any replaced subexpressions directly into X
2482
     since we know X is not shared!  Any vectors in X
2483
     must be copied if X was copied.  */
2484
 
2485
  format_ptr = GET_RTX_FORMAT (code);
2486
  length = GET_RTX_LENGTH (code);
2487
  last_ptr = NULL;
2488
 
2489
  for (i = 0; i < length; i++)
2490
    {
2491
      switch (*format_ptr++)
2492
        {
2493
        case 'e':
2494
          if (last_ptr)
2495
            copy_rtx_if_shared_1 (last_ptr);
2496
          last_ptr = &XEXP (x, i);
2497
          break;
2498
 
2499
        case 'E':
2500
          if (XVEC (x, i) != NULL)
2501
            {
2502
              int j;
2503
              int len = XVECLEN (x, i);
2504
 
2505
              /* Copy the vector iff I copied the rtx and the length
2506
                 is nonzero.  */
2507
              if (copied && len > 0)
2508
                XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2509
 
2510
              /* Call recursively on all inside the vector.  */
2511
              for (j = 0; j < len; j++)
2512
                {
2513
                  if (last_ptr)
2514
                    copy_rtx_if_shared_1 (last_ptr);
2515
                  last_ptr = &XVECEXP (x, i, j);
2516
                }
2517
            }
2518
          break;
2519
        }
2520
    }
2521
  *orig1 = x;
2522
  if (last_ptr)
2523
    {
2524
      orig1 = last_ptr;
2525
      goto repeat;
2526
    }
2527
  return;
2528
}
2529
 
2530
/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2531
   to look for shared sub-parts.  */
2532
 
2533
void
2534
reset_used_flags (rtx x)
2535
{
2536
  int i, j;
2537
  enum rtx_code code;
2538
  const char *format_ptr;
2539
  int length;
2540
 
2541
  /* Repeat is used to turn tail-recursion into iteration.  */
2542
repeat:
2543
  if (x == 0)
2544
    return;
2545
 
2546
  code = GET_CODE (x);
2547
 
2548
  /* These types may be freely shared so we needn't do any resetting
2549
     for them.  */
2550
 
2551
  switch (code)
2552
    {
2553
    case REG:
2554
    case CONST_INT:
2555
    case CONST_DOUBLE:
2556
    case CONST_VECTOR:
2557
    case SYMBOL_REF:
2558
    case CODE_LABEL:
2559
    case PC:
2560
    case CC0:
2561
      return;
2562
 
2563
    case INSN:
2564
    case JUMP_INSN:
2565
    case CALL_INSN:
2566
    case NOTE:
2567
    case LABEL_REF:
2568
    case BARRIER:
2569
      /* The chain of insns is not being copied.  */
2570
      return;
2571
 
2572
    default:
2573
      break;
2574
    }
2575
 
2576
  RTX_FLAG (x, used) = 0;
2577
 
2578
  format_ptr = GET_RTX_FORMAT (code);
2579
  length = GET_RTX_LENGTH (code);
2580
 
2581
  for (i = 0; i < length; i++)
2582
    {
2583
      switch (*format_ptr++)
2584
        {
2585
        case 'e':
2586
          if (i == length-1)
2587
            {
2588
              x = XEXP (x, i);
2589
              goto repeat;
2590
            }
2591
          reset_used_flags (XEXP (x, i));
2592
          break;
2593
 
2594
        case 'E':
2595
          for (j = 0; j < XVECLEN (x, i); j++)
2596
            reset_used_flags (XVECEXP (x, i, j));
2597
          break;
2598
        }
2599
    }
2600
}
2601
 
2602
/* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2603
   to look for shared sub-parts.  */
2604
 
2605
void
2606
set_used_flags (rtx x)
2607
{
2608
  int i, j;
2609
  enum rtx_code code;
2610
  const char *format_ptr;
2611
 
2612
  if (x == 0)
2613
    return;
2614
 
2615
  code = GET_CODE (x);
2616
 
2617
  /* These types may be freely shared so we needn't do any resetting
2618
     for them.  */
2619
 
2620
  switch (code)
2621
    {
2622
    case REG:
2623
    case CONST_INT:
2624
    case CONST_DOUBLE:
2625
    case CONST_VECTOR:
2626
    case SYMBOL_REF:
2627
    case CODE_LABEL:
2628
    case PC:
2629
    case CC0:
2630
      return;
2631
 
2632
    case INSN:
2633
    case JUMP_INSN:
2634
    case CALL_INSN:
2635
    case NOTE:
2636
    case LABEL_REF:
2637
    case BARRIER:
2638
      /* The chain of insns is not being copied.  */
2639
      return;
2640
 
2641
    default:
2642
      break;
2643
    }
2644
 
2645
  RTX_FLAG (x, used) = 1;
2646
 
2647
  format_ptr = GET_RTX_FORMAT (code);
2648
  for (i = 0; i < GET_RTX_LENGTH (code); i++)
2649
    {
2650
      switch (*format_ptr++)
2651
        {
2652
        case 'e':
2653
          set_used_flags (XEXP (x, i));
2654
          break;
2655
 
2656
        case 'E':
2657
          for (j = 0; j < XVECLEN (x, i); j++)
2658
            set_used_flags (XVECEXP (x, i, j));
2659
          break;
2660
        }
2661
    }
2662
}
2663
 
2664
/* Copy X if necessary so that it won't be altered by changes in OTHER.
2665
   Return X or the rtx for the pseudo reg the value of X was copied into.
2666
   OTHER must be valid as a SET_DEST.  */
2667
 
2668
rtx
2669
make_safe_from (rtx x, rtx other)
2670
{
2671
  while (1)
2672
    switch (GET_CODE (other))
2673
      {
2674
      case SUBREG:
2675
        other = SUBREG_REG (other);
2676
        break;
2677
      case STRICT_LOW_PART:
2678
      case SIGN_EXTEND:
2679
      case ZERO_EXTEND:
2680
        other = XEXP (other, 0);
2681
        break;
2682
      default:
2683
        goto done;
2684
      }
2685
 done:
2686
  if ((MEM_P (other)
2687
       && ! CONSTANT_P (x)
2688
       && !REG_P (x)
2689
       && GET_CODE (x) != SUBREG)
2690
      || (REG_P (other)
2691
          && (REGNO (other) < FIRST_PSEUDO_REGISTER
2692
              || reg_mentioned_p (other, x))))
2693
    {
2694
      rtx temp = gen_reg_rtx (GET_MODE (x));
2695
      emit_move_insn (temp, x);
2696
      return temp;
2697
    }
2698
  return x;
2699
}
2700
 
2701
/* Emission of insns (adding them to the doubly-linked list).  */
2702
 
2703
/* Return the first insn of the current sequence or current function.  */
2704
 
2705
rtx
2706
get_insns (void)
2707
{
2708
  return first_insn;
2709
}
2710
 
2711
/* Specify a new insn as the first in the chain.  */
2712
 
2713
void
2714
set_first_insn (rtx insn)
2715
{
2716
  gcc_assert (!PREV_INSN (insn));
2717
  first_insn = insn;
2718
}
2719
 
2720
/* Return the last insn emitted in current sequence or current function.  */
2721
 
2722
rtx
2723
get_last_insn (void)
2724
{
2725
  return last_insn;
2726
}
2727
 
2728
/* Specify a new insn as the last in the chain.  */
2729
 
2730
void
2731
set_last_insn (rtx insn)
2732
{
2733
  gcc_assert (!NEXT_INSN (insn));
2734
  last_insn = insn;
2735
}
2736
 
2737
/* Return the last insn emitted, even if it is in a sequence now pushed.  */
2738
 
2739
rtx
2740
get_last_insn_anywhere (void)
2741
{
2742
  struct sequence_stack *stack;
2743
  if (last_insn)
2744
    return last_insn;
2745
  for (stack = seq_stack; stack; stack = stack->next)
2746
    if (stack->last != 0)
2747
      return stack->last;
2748
  return 0;
2749
}
2750
 
2751
/* Return the first nonnote insn emitted in current sequence or current
2752
   function.  This routine looks inside SEQUENCEs.  */
2753
 
2754
rtx
2755
get_first_nonnote_insn (void)
2756
{
2757
  rtx insn = first_insn;
2758
 
2759
  if (insn)
2760
    {
2761
      if (NOTE_P (insn))
2762
        for (insn = next_insn (insn);
2763
             insn && NOTE_P (insn);
2764
             insn = next_insn (insn))
2765
          continue;
2766
      else
2767
        {
2768
          if (NONJUMP_INSN_P (insn)
2769
              && GET_CODE (PATTERN (insn)) == SEQUENCE)
2770
            insn = XVECEXP (PATTERN (insn), 0, 0);
2771
        }
2772
    }
2773
 
2774
  return insn;
2775
}
2776
 
2777
/* Return the last nonnote insn emitted in current sequence or current
2778
   function.  This routine looks inside SEQUENCEs.  */
2779
 
2780
rtx
2781
get_last_nonnote_insn (void)
2782
{
2783
  rtx insn = last_insn;
2784
 
2785
  if (insn)
2786
    {
2787
      if (NOTE_P (insn))
2788
        for (insn = previous_insn (insn);
2789
             insn && NOTE_P (insn);
2790
             insn = previous_insn (insn))
2791
          continue;
2792
      else
2793
        {
2794
          if (NONJUMP_INSN_P (insn)
2795
              && GET_CODE (PATTERN (insn)) == SEQUENCE)
2796
            insn = XVECEXP (PATTERN (insn), 0,
2797
                            XVECLEN (PATTERN (insn), 0) - 1);
2798
        }
2799
    }
2800
 
2801
  return insn;
2802
}
2803
 
2804
/* Return a number larger than any instruction's uid in this function.  */
2805
 
2806
int
2807
get_max_uid (void)
2808
{
2809
  return cur_insn_uid;
2810
}
2811
 
2812
/* Renumber instructions so that no instruction UIDs are wasted.  */
2813
 
2814
void
2815
renumber_insns (FILE *stream)
2816
{
2817
  rtx insn;
2818
 
2819
  /* If we're not supposed to renumber instructions, don't.  */
2820
  if (!flag_renumber_insns)
2821
    return;
2822
 
2823
  /* If there aren't that many instructions, then it's not really
2824
     worth renumbering them.  */
2825
  if (flag_renumber_insns == 1 && get_max_uid () < 25000)
2826
    return;
2827
 
2828
  cur_insn_uid = 1;
2829
 
2830
  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2831
    {
2832
      if (stream)
2833
        fprintf (stream, "Renumbering insn %d to %d\n",
2834
                 INSN_UID (insn), cur_insn_uid);
2835
      INSN_UID (insn) = cur_insn_uid++;
2836
    }
2837
}
2838
 
2839
/* Return the next insn.  If it is a SEQUENCE, return the first insn
2840
   of the sequence.  */
2841
 
2842
rtx
2843
next_insn (rtx insn)
2844
{
2845
  if (insn)
2846
    {
2847
      insn = NEXT_INSN (insn);
2848
      if (insn && NONJUMP_INSN_P (insn)
2849
          && GET_CODE (PATTERN (insn)) == SEQUENCE)
2850
        insn = XVECEXP (PATTERN (insn), 0, 0);
2851
    }
2852
 
2853
  return insn;
2854
}
2855
 
2856
/* Return the previous insn.  If it is a SEQUENCE, return the last insn
2857
   of the sequence.  */
2858
 
2859
rtx
2860
previous_insn (rtx insn)
2861
{
2862
  if (insn)
2863
    {
2864
      insn = PREV_INSN (insn);
2865
      if (insn && NONJUMP_INSN_P (insn)
2866
          && GET_CODE (PATTERN (insn)) == SEQUENCE)
2867
        insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2868
    }
2869
 
2870
  return insn;
2871
}
2872
 
2873
/* Return the next insn after INSN that is not a NOTE.  This routine does not
2874
   look inside SEQUENCEs.  */
2875
 
2876
rtx
2877
next_nonnote_insn (rtx insn)
2878
{
2879
  while (insn)
2880
    {
2881
      insn = NEXT_INSN (insn);
2882
      if (insn == 0 || !NOTE_P (insn))
2883
        break;
2884
    }
2885
 
2886
  return insn;
2887
}
2888
 
2889
/* Return the previous insn before INSN that is not a NOTE.  This routine does
2890
   not look inside SEQUENCEs.  */
2891
 
2892
rtx
2893
prev_nonnote_insn (rtx insn)
2894
{
2895
  while (insn)
2896
    {
2897
      insn = PREV_INSN (insn);
2898
      if (insn == 0 || !NOTE_P (insn))
2899
        break;
2900
    }
2901
 
2902
  return insn;
2903
}
2904
 
2905
/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
2906
   or 0, if there is none.  This routine does not look inside
2907
   SEQUENCEs.  */
2908
 
2909
rtx
2910
next_real_insn (rtx insn)
2911
{
2912
  while (insn)
2913
    {
2914
      insn = NEXT_INSN (insn);
2915
      if (insn == 0 || INSN_P (insn))
2916
        break;
2917
    }
2918
 
2919
  return insn;
2920
}
2921
 
2922
/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
2923
   or 0, if there is none.  This routine does not look inside
2924
   SEQUENCEs.  */
2925
 
2926
rtx
2927
prev_real_insn (rtx insn)
2928
{
2929
  while (insn)
2930
    {
2931
      insn = PREV_INSN (insn);
2932
      if (insn == 0 || INSN_P (insn))
2933
        break;
2934
    }
2935
 
2936
  return insn;
2937
}
2938
 
2939
/* Return the last CALL_INSN in the current list, or 0 if there is none.
2940
   This routine does not look inside SEQUENCEs.  */
2941
 
2942
rtx
2943
last_call_insn (void)
2944
{
2945
  rtx insn;
2946
 
2947
  for (insn = get_last_insn ();
2948
       insn && !CALL_P (insn);
2949
       insn = PREV_INSN (insn))
2950
    ;
2951
 
2952
  return insn;
2953
}
2954
 
2955
/* Find the next insn after INSN that really does something.  This routine
2956
   does not look inside SEQUENCEs.  Until reload has completed, this is the
2957
   same as next_real_insn.  */
2958
 
2959
int
2960
active_insn_p (rtx insn)
2961
{
2962
  return (CALL_P (insn) || JUMP_P (insn)
2963
          || (NONJUMP_INSN_P (insn)
2964
              && (! reload_completed
2965
                  || (GET_CODE (PATTERN (insn)) != USE
2966
                      && GET_CODE (PATTERN (insn)) != CLOBBER))));
2967
}
2968
 
2969
rtx
2970
next_active_insn (rtx insn)
2971
{
2972
  while (insn)
2973
    {
2974
      insn = NEXT_INSN (insn);
2975
      if (insn == 0 || active_insn_p (insn))
2976
        break;
2977
    }
2978
 
2979
  return insn;
2980
}
2981
 
2982
/* Find the last insn before INSN that really does something.  This routine
2983
   does not look inside SEQUENCEs.  Until reload has completed, this is the
2984
   same as prev_real_insn.  */
2985
 
2986
rtx
2987
prev_active_insn (rtx insn)
2988
{
2989
  while (insn)
2990
    {
2991
      insn = PREV_INSN (insn);
2992
      if (insn == 0 || active_insn_p (insn))
2993
        break;
2994
    }
2995
 
2996
  return insn;
2997
}
2998
 
2999
/* Return the next CODE_LABEL after the insn INSN, or 0 if there is none.  */
3000
 
3001
rtx
3002
next_label (rtx insn)
3003
{
3004
  while (insn)
3005
    {
3006
      insn = NEXT_INSN (insn);
3007
      if (insn == 0 || LABEL_P (insn))
3008
        break;
3009
    }
3010
 
3011
  return insn;
3012
}
3013
 
3014
/* Return the last CODE_LABEL before the insn INSN, or 0 if there is none.  */
3015
 
3016
rtx
3017
prev_label (rtx insn)
3018
{
3019
  while (insn)
3020
    {
3021
      insn = PREV_INSN (insn);
3022
      if (insn == 0 || LABEL_P (insn))
3023
        break;
3024
    }
3025
 
3026
  return insn;
3027
}
3028
 
3029
/* Return the last label to mark the same position as LABEL.  Return null
3030
   if LABEL itself is null.  */
3031
 
3032
rtx
3033
skip_consecutive_labels (rtx label)
3034
{
3035
  rtx insn;
3036
 
3037
  for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3038
    if (LABEL_P (insn))
3039
      label = insn;
3040
 
3041
  return label;
3042
}
3043
 
3044
#ifdef HAVE_cc0
3045
/* INSN uses CC0 and is being moved into a delay slot.  Set up REG_CC_SETTER
3046
   and REG_CC_USER notes so we can find it.  */
3047
 
3048
void
3049
link_cc0_insns (rtx insn)
3050
{
3051
  rtx user = next_nonnote_insn (insn);
3052
 
3053
  if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
3054
    user = XVECEXP (PATTERN (user), 0, 0);
3055
 
3056
  REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
3057
                                        REG_NOTES (user));
3058
  REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
3059
}
3060
 
3061
/* Return the next insn that uses CC0 after INSN, which is assumed to
3062
   set it.  This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3063
   applied to the result of this function should yield INSN).
3064
 
3065
   Normally, this is simply the next insn.  However, if a REG_CC_USER note
3066
   is present, it contains the insn that uses CC0.
3067
 
3068
   Return 0 if we can't find the insn.  */
3069
 
3070
rtx
3071
next_cc0_user (rtx insn)
3072
{
3073
  rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3074
 
3075
  if (note)
3076
    return XEXP (note, 0);
3077
 
3078
  insn = next_nonnote_insn (insn);
3079
  if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3080
    insn = XVECEXP (PATTERN (insn), 0, 0);
3081
 
3082
  if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3083
    return insn;
3084
 
3085
  return 0;
3086
}
3087
 
3088
/* Find the insn that set CC0 for INSN.  Unless INSN has a REG_CC_SETTER
3089
   note, it is the previous insn.  */
3090
 
3091
rtx
3092
prev_cc0_setter (rtx insn)
3093
{
3094
  rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3095
 
3096
  if (note)
3097
    return XEXP (note, 0);
3098
 
3099
  insn = prev_nonnote_insn (insn);
3100
  gcc_assert (sets_cc0_p (PATTERN (insn)));
3101
 
3102
  return insn;
3103
}
3104
#endif
3105
 
3106
/* Increment the label uses for all labels present in rtx.  */
3107
 
3108
static void
3109
mark_label_nuses (rtx x)
3110
{
3111
  enum rtx_code code;
3112
  int i, j;
3113
  const char *fmt;
3114
 
3115
  code = GET_CODE (x);
3116
  if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
3117
    LABEL_NUSES (XEXP (x, 0))++;
3118
 
3119
  fmt = GET_RTX_FORMAT (code);
3120
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3121
    {
3122
      if (fmt[i] == 'e')
3123
        mark_label_nuses (XEXP (x, i));
3124
      else if (fmt[i] == 'E')
3125
        for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3126
          mark_label_nuses (XVECEXP (x, i, j));
3127
    }
3128
}
3129
 
3130
 
3131
/* Try splitting insns that can be split for better scheduling.
3132
   PAT is the pattern which might split.
3133
   TRIAL is the insn providing PAT.
3134
   LAST is nonzero if we should return the last insn of the sequence produced.
3135
 
3136
   If this routine succeeds in splitting, it returns the first or last
3137
   replacement insn depending on the value of LAST.  Otherwise, it
3138
   returns TRIAL.  If the insn to be returned can be split, it will be.  */
3139
 
3140
rtx
3141
try_split (rtx pat, rtx trial, int last)
3142
{
3143
  rtx before = PREV_INSN (trial);
3144
  rtx after = NEXT_INSN (trial);
3145
  int has_barrier = 0;
3146
  rtx tem;
3147
  rtx note, seq;
3148
  int probability;
3149
  rtx insn_last, insn;
3150
  int njumps = 0;
3151
 
3152
  if (any_condjump_p (trial)
3153
      && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3154
    split_branch_probability = INTVAL (XEXP (note, 0));
3155
  probability = split_branch_probability;
3156
 
3157
  seq = split_insns (pat, trial);
3158
 
3159
  split_branch_probability = -1;
3160
 
3161
  /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3162
     We may need to handle this specially.  */
3163
  if (after && BARRIER_P (after))
3164
    {
3165
      has_barrier = 1;
3166
      after = NEXT_INSN (after);
3167
    }
3168
 
3169
  if (!seq)
3170
    return trial;
3171
 
3172
  /* Avoid infinite loop if any insn of the result matches
3173
     the original pattern.  */
3174
  insn_last = seq;
3175
  while (1)
3176
    {
3177
      if (INSN_P (insn_last)
3178
          && rtx_equal_p (PATTERN (insn_last), pat))
3179
        return trial;
3180
      if (!NEXT_INSN (insn_last))
3181
        break;
3182
      insn_last = NEXT_INSN (insn_last);
3183
    }
3184
 
3185
  /* Mark labels.  */
3186
  for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3187
    {
3188
      if (JUMP_P (insn))
3189
        {
3190
          mark_jump_label (PATTERN (insn), insn, 0);
3191
          njumps++;
3192
          if (probability != -1
3193
              && any_condjump_p (insn)
3194
              && !find_reg_note (insn, REG_BR_PROB, 0))
3195
            {
3196
              /* We can preserve the REG_BR_PROB notes only if exactly
3197
                 one jump is created, otherwise the machine description
3198
                 is responsible for this step using
3199
                 split_branch_probability variable.  */
3200
              gcc_assert (njumps == 1);
3201
              REG_NOTES (insn)
3202
                = gen_rtx_EXPR_LIST (REG_BR_PROB,
3203
                                     GEN_INT (probability),
3204
                                     REG_NOTES (insn));
3205
            }
3206
        }
3207
    }
3208
 
3209
  /* If we are splitting a CALL_INSN, look for the CALL_INSN
3210
     in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it.  */
3211
  if (CALL_P (trial))
3212
    {
3213
      for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3214
        if (CALL_P (insn))
3215
          {
3216
            rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3217
            while (*p)
3218
              p = &XEXP (*p, 1);
3219
            *p = CALL_INSN_FUNCTION_USAGE (trial);
3220
            SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3221
          }
3222
    }
3223
 
3224
  /* Copy notes, particularly those related to the CFG.  */
3225
  for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3226
    {
3227
      switch (REG_NOTE_KIND (note))
3228
        {
3229
        case REG_EH_REGION:
3230
          insn = insn_last;
3231
          while (insn != NULL_RTX)
3232
            {
3233
              if (CALL_P (insn)
3234
                  || (flag_non_call_exceptions && INSN_P (insn)
3235
                      && may_trap_p (PATTERN (insn))))
3236
                REG_NOTES (insn)
3237
                  = gen_rtx_EXPR_LIST (REG_EH_REGION,
3238
                                       XEXP (note, 0),
3239
                                       REG_NOTES (insn));
3240
              insn = PREV_INSN (insn);
3241
            }
3242
          break;
3243
 
3244
        case REG_NORETURN:
3245
        case REG_SETJMP:
3246
          insn = insn_last;
3247
          while (insn != NULL_RTX)
3248
            {
3249
              if (CALL_P (insn))
3250
                REG_NOTES (insn)
3251
                  = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3252
                                       XEXP (note, 0),
3253
                                       REG_NOTES (insn));
3254
              insn = PREV_INSN (insn);
3255
            }
3256
          break;
3257
 
3258
        case REG_NON_LOCAL_GOTO:
3259
          insn = insn_last;
3260
          while (insn != NULL_RTX)
3261
            {
3262
              if (JUMP_P (insn))
3263
                REG_NOTES (insn)
3264
                  = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3265
                                       XEXP (note, 0),
3266
                                       REG_NOTES (insn));
3267
              insn = PREV_INSN (insn);
3268
            }
3269
          break;
3270
 
3271
        default:
3272
          break;
3273
        }
3274
    }
3275
 
3276
  /* If there are LABELS inside the split insns increment the
3277
     usage count so we don't delete the label.  */
3278
  if (NONJUMP_INSN_P (trial))
3279
    {
3280
      insn = insn_last;
3281
      while (insn != NULL_RTX)
3282
        {
3283
          if (NONJUMP_INSN_P (insn))
3284
            mark_label_nuses (PATTERN (insn));
3285
 
3286
          insn = PREV_INSN (insn);
3287
        }
3288
    }
3289
 
3290
  tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
3291
 
3292
  delete_insn (trial);
3293
  if (has_barrier)
3294
    emit_barrier_after (tem);
3295
 
3296
  /* Recursively call try_split for each new insn created; by the
3297
     time control returns here that insn will be fully split, so
3298
     set LAST and continue from the insn after the one returned.
3299
     We can't use next_active_insn here since AFTER may be a note.
3300
     Ignore deleted insns, which can be occur if not optimizing.  */
3301
  for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3302
    if (! INSN_DELETED_P (tem) && INSN_P (tem))
3303
      tem = try_split (PATTERN (tem), tem, 1);
3304
 
3305
  /* Return either the first or the last insn, depending on which was
3306
     requested.  */
3307
  return last
3308
    ? (after ? PREV_INSN (after) : last_insn)
3309
    : NEXT_INSN (before);
3310
}
3311
 
3312
/* Make and return an INSN rtx, initializing all its slots.
3313
   Store PATTERN in the pattern slots.  */
3314
 
3315
rtx
3316
make_insn_raw (rtx pattern)
3317
{
3318
  rtx insn;
3319
 
3320
  insn = rtx_alloc (INSN);
3321
 
3322
  INSN_UID (insn) = cur_insn_uid++;
3323
  PATTERN (insn) = pattern;
3324
  INSN_CODE (insn) = -1;
3325
  LOG_LINKS (insn) = NULL;
3326
  REG_NOTES (insn) = NULL;
3327
  INSN_LOCATOR (insn) = 0;
3328
  BLOCK_FOR_INSN (insn) = NULL;
3329
 
3330
#ifdef ENABLE_RTL_CHECKING
3331
  if (insn
3332
      && INSN_P (insn)
3333
      && (returnjump_p (insn)
3334
          || (GET_CODE (insn) == SET
3335
              && SET_DEST (insn) == pc_rtx)))
3336
    {
3337
      warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3338
      debug_rtx (insn);
3339
    }
3340
#endif
3341
 
3342
  return insn;
3343
}
3344
 
3345
/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn.  */
3346
 
3347
static rtx
3348
make_jump_insn_raw (rtx pattern)
3349
{
3350
  rtx insn;
3351
 
3352
  insn = rtx_alloc (JUMP_INSN);
3353
  INSN_UID (insn) = cur_insn_uid++;
3354
 
3355
  PATTERN (insn) = pattern;
3356
  INSN_CODE (insn) = -1;
3357
  LOG_LINKS (insn) = NULL;
3358
  REG_NOTES (insn) = NULL;
3359
  JUMP_LABEL (insn) = NULL;
3360
  INSN_LOCATOR (insn) = 0;
3361
  BLOCK_FOR_INSN (insn) = NULL;
3362
 
3363
  return insn;
3364
}
3365
 
3366
/* Like `make_insn_raw' but make a CALL_INSN instead of an insn.  */
3367
 
3368
static rtx
3369
make_call_insn_raw (rtx pattern)
3370
{
3371
  rtx insn;
3372
 
3373
  insn = rtx_alloc (CALL_INSN);
3374
  INSN_UID (insn) = cur_insn_uid++;
3375
 
3376
  PATTERN (insn) = pattern;
3377
  INSN_CODE (insn) = -1;
3378
  LOG_LINKS (insn) = NULL;
3379
  REG_NOTES (insn) = NULL;
3380
  CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3381
  INSN_LOCATOR (insn) = 0;
3382
  BLOCK_FOR_INSN (insn) = NULL;
3383
 
3384
  return insn;
3385
}
3386
 
3387
/* Add INSN to the end of the doubly-linked list.
3388
   INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE.  */
3389
 
3390
void
3391
add_insn (rtx insn)
3392
{
3393
  PREV_INSN (insn) = last_insn;
3394
  NEXT_INSN (insn) = 0;
3395
 
3396
  if (NULL != last_insn)
3397
    NEXT_INSN (last_insn) = insn;
3398
 
3399
  if (NULL == first_insn)
3400
    first_insn = insn;
3401
 
3402
  last_insn = insn;
3403
}
3404
 
3405
/* Add INSN into the doubly-linked list after insn AFTER.  This and
3406
   the next should be the only functions called to insert an insn once
3407
   delay slots have been filled since only they know how to update a
3408
   SEQUENCE.  */
3409
 
3410
void
3411
add_insn_after (rtx insn, rtx after)
3412
{
3413
  rtx next = NEXT_INSN (after);
3414
  basic_block bb;
3415
 
3416
  gcc_assert (!optimize || !INSN_DELETED_P (after));
3417
 
3418
  NEXT_INSN (insn) = next;
3419
  PREV_INSN (insn) = after;
3420
 
3421
  if (next)
3422
    {
3423
      PREV_INSN (next) = insn;
3424
      if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3425
        PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3426
    }
3427
  else if (last_insn == after)
3428
    last_insn = insn;
3429
  else
3430
    {
3431
      struct sequence_stack *stack = seq_stack;
3432
      /* Scan all pending sequences too.  */
3433
      for (; stack; stack = stack->next)
3434
        if (after == stack->last)
3435
          {
3436
            stack->last = insn;
3437
            break;
3438
          }
3439
 
3440
      gcc_assert (stack);
3441
    }
3442
 
3443
  if (!BARRIER_P (after)
3444
      && !BARRIER_P (insn)
3445
      && (bb = BLOCK_FOR_INSN (after)))
3446
    {
3447
      set_block_for_insn (insn, bb);
3448
      if (INSN_P (insn))
3449
        bb->flags |= BB_DIRTY;
3450
      /* Should not happen as first in the BB is always
3451
         either NOTE or LABEL.  */
3452
      if (BB_END (bb) == after
3453
          /* Avoid clobbering of structure when creating new BB.  */
3454
          && !BARRIER_P (insn)
3455
          && (!NOTE_P (insn)
3456
              || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3457
        BB_END (bb) = insn;
3458
    }
3459
 
3460
  NEXT_INSN (after) = insn;
3461
  if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
3462
    {
3463
      rtx sequence = PATTERN (after);
3464
      NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3465
    }
3466
}
3467
 
3468
/* Add INSN into the doubly-linked list before insn BEFORE.  This and
3469
   the previous should be the only functions called to insert an insn once
3470
   delay slots have been filled since only they know how to update a
3471
   SEQUENCE.  */
3472
 
3473
void
3474
add_insn_before (rtx insn, rtx before)
3475
{
3476
  rtx prev = PREV_INSN (before);
3477
  basic_block bb;
3478
 
3479
  gcc_assert (!optimize || !INSN_DELETED_P (before));
3480
 
3481
  PREV_INSN (insn) = prev;
3482
  NEXT_INSN (insn) = before;
3483
 
3484
  if (prev)
3485
    {
3486
      NEXT_INSN (prev) = insn;
3487
      if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3488
        {
3489
          rtx sequence = PATTERN (prev);
3490
          NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3491
        }
3492
    }
3493
  else if (first_insn == before)
3494
    first_insn = insn;
3495
  else
3496
    {
3497
      struct sequence_stack *stack = seq_stack;
3498
      /* Scan all pending sequences too.  */
3499
      for (; stack; stack = stack->next)
3500
        if (before == stack->first)
3501
          {
3502
            stack->first = insn;
3503
            break;
3504
          }
3505
 
3506
      gcc_assert (stack);
3507
    }
3508
 
3509
  if (!BARRIER_P (before)
3510
      && !BARRIER_P (insn)
3511
      && (bb = BLOCK_FOR_INSN (before)))
3512
    {
3513
      set_block_for_insn (insn, bb);
3514
      if (INSN_P (insn))
3515
        bb->flags |= BB_DIRTY;
3516
      /* Should not happen as first in the BB is always either NOTE or
3517
         LABEL.  */
3518
      gcc_assert (BB_HEAD (bb) != insn
3519
                  /* Avoid clobbering of structure when creating new BB.  */
3520
                  || BARRIER_P (insn)
3521
                  || (NOTE_P (insn)
3522
                      && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK));
3523
    }
3524
 
3525
  PREV_INSN (before) = insn;
3526
  if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
3527
    PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3528
}
3529
 
3530
/* Remove an insn from its doubly-linked list.  This function knows how
3531
   to handle sequences.  */
3532
void
3533
remove_insn (rtx insn)
3534
{
3535
  rtx next = NEXT_INSN (insn);
3536
  rtx prev = PREV_INSN (insn);
3537
  basic_block bb;
3538
 
3539
  if (prev)
3540
    {
3541
      NEXT_INSN (prev) = next;
3542
      if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3543
        {
3544
          rtx sequence = PATTERN (prev);
3545
          NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3546
        }
3547
    }
3548
  else if (first_insn == insn)
3549
    first_insn = next;
3550
  else
3551
    {
3552
      struct sequence_stack *stack = seq_stack;
3553
      /* Scan all pending sequences too.  */
3554
      for (; stack; stack = stack->next)
3555
        if (insn == stack->first)
3556
          {
3557
            stack->first = next;
3558
            break;
3559
          }
3560
 
3561
      gcc_assert (stack);
3562
    }
3563
 
3564
  if (next)
3565
    {
3566
      PREV_INSN (next) = prev;
3567
      if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3568
        PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3569
    }
3570
  else if (last_insn == insn)
3571
    last_insn = prev;
3572
  else
3573
    {
3574
      struct sequence_stack *stack = seq_stack;
3575
      /* Scan all pending sequences too.  */
3576
      for (; stack; stack = stack->next)
3577
        if (insn == stack->last)
3578
          {
3579
            stack->last = prev;
3580
            break;
3581
          }
3582
 
3583
      gcc_assert (stack);
3584
    }
3585
  if (!BARRIER_P (insn)
3586
      && (bb = BLOCK_FOR_INSN (insn)))
3587
    {
3588
      if (INSN_P (insn))
3589
        bb->flags |= BB_DIRTY;
3590
      if (BB_HEAD (bb) == insn)
3591
        {
3592
          /* Never ever delete the basic block note without deleting whole
3593
             basic block.  */
3594
          gcc_assert (!NOTE_P (insn));
3595
          BB_HEAD (bb) = next;
3596
        }
3597
      if (BB_END (bb) == insn)
3598
        BB_END (bb) = prev;
3599
    }
3600
}
3601
 
3602
/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN.  */
3603
 
3604
void
3605
add_function_usage_to (rtx call_insn, rtx call_fusage)
3606
{
3607
  gcc_assert (call_insn && CALL_P (call_insn));
3608
 
3609
  /* Put the register usage information on the CALL.  If there is already
3610
     some usage information, put ours at the end.  */
3611
  if (CALL_INSN_FUNCTION_USAGE (call_insn))
3612
    {
3613
      rtx link;
3614
 
3615
      for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3616
           link = XEXP (link, 1))
3617
        ;
3618
 
3619
      XEXP (link, 1) = call_fusage;
3620
    }
3621
  else
3622
    CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3623
}
3624
 
3625
/* Delete all insns made since FROM.
3626
   FROM becomes the new last instruction.  */
3627
 
3628
void
3629
delete_insns_since (rtx from)
3630
{
3631
  if (from == 0)
3632
    first_insn = 0;
3633
  else
3634
    NEXT_INSN (from) = 0;
3635
  last_insn = from;
3636
}
3637
 
3638
/* This function is deprecated, please use sequences instead.
3639
 
3640
   Move a consecutive bunch of insns to a different place in the chain.
3641
   The insns to be moved are those between FROM and TO.
3642
   They are moved to a new position after the insn AFTER.
3643
   AFTER must not be FROM or TO or any insn in between.
3644
 
3645
   This function does not know about SEQUENCEs and hence should not be
3646
   called after delay-slot filling has been done.  */
3647
 
3648
void
3649
reorder_insns_nobb (rtx from, rtx to, rtx after)
3650
{
3651
  /* Splice this bunch out of where it is now.  */
3652
  if (PREV_INSN (from))
3653
    NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3654
  if (NEXT_INSN (to))
3655
    PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3656
  if (last_insn == to)
3657
    last_insn = PREV_INSN (from);
3658
  if (first_insn == from)
3659
    first_insn = NEXT_INSN (to);
3660
 
3661
  /* Make the new neighbors point to it and it to them.  */
3662
  if (NEXT_INSN (after))
3663
    PREV_INSN (NEXT_INSN (after)) = to;
3664
 
3665
  NEXT_INSN (to) = NEXT_INSN (after);
3666
  PREV_INSN (from) = after;
3667
  NEXT_INSN (after) = from;
3668
  if (after == last_insn)
3669
    last_insn = to;
3670
}
3671
 
3672
/* Same as function above, but take care to update BB boundaries.  */
3673
void
3674
reorder_insns (rtx from, rtx to, rtx after)
3675
{
3676
  rtx prev = PREV_INSN (from);
3677
  basic_block bb, bb2;
3678
 
3679
  reorder_insns_nobb (from, to, after);
3680
 
3681
  if (!BARRIER_P (after)
3682
      && (bb = BLOCK_FOR_INSN (after)))
3683
    {
3684
      rtx x;
3685
      bb->flags |= BB_DIRTY;
3686
 
3687
      if (!BARRIER_P (from)
3688
          && (bb2 = BLOCK_FOR_INSN (from)))
3689
        {
3690
          if (BB_END (bb2) == to)
3691
            BB_END (bb2) = prev;
3692
          bb2->flags |= BB_DIRTY;
3693
        }
3694
 
3695
      if (BB_END (bb) == after)
3696
        BB_END (bb) = to;
3697
 
3698
      for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3699
        if (!BARRIER_P (x))
3700
          set_block_for_insn (x, bb);
3701
    }
3702
}
3703
 
3704
/* Return the line note insn preceding INSN.  */
3705
 
3706
static rtx
3707
find_line_note (rtx insn)
3708
{
3709
  if (no_line_numbers)
3710
    return 0;
3711
 
3712
  for (; insn; insn = PREV_INSN (insn))
3713
    if (NOTE_P (insn)
3714
        && NOTE_LINE_NUMBER (insn) >= 0)
3715
      break;
3716
 
3717
  return insn;
3718
}
3719
 
3720
/* Remove unnecessary notes from the instruction stream.  */
3721
 
3722
void
3723
remove_unnecessary_notes (void)
3724
{
3725
  rtx eh_stack = NULL_RTX;
3726
  rtx insn;
3727
  rtx next;
3728
  rtx tmp;
3729
 
3730
  /* We must not remove the first instruction in the function because
3731
     the compiler depends on the first instruction being a note.  */
3732
  for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
3733
    {
3734
      /* Remember what's next.  */
3735
      next = NEXT_INSN (insn);
3736
 
3737
      /* We're only interested in notes.  */
3738
      if (!NOTE_P (insn))
3739
        continue;
3740
 
3741
      switch (NOTE_LINE_NUMBER (insn))
3742
        {
3743
        case NOTE_INSN_DELETED:
3744
          remove_insn (insn);
3745
          break;
3746
 
3747
        case NOTE_INSN_EH_REGION_BEG:
3748
          eh_stack = alloc_INSN_LIST (insn, eh_stack);
3749
          break;
3750
 
3751
        case NOTE_INSN_EH_REGION_END:
3752
          /* Too many end notes.  */
3753
          gcc_assert (eh_stack);
3754
          /* Mismatched nesting.  */
3755
          gcc_assert (NOTE_EH_HANDLER (XEXP (eh_stack, 0))
3756
                      == NOTE_EH_HANDLER (insn));
3757
          tmp = eh_stack;
3758
          eh_stack = XEXP (eh_stack, 1);
3759
          free_INSN_LIST_node (tmp);
3760
          break;
3761
 
3762
        case NOTE_INSN_BLOCK_BEG:
3763
        case NOTE_INSN_BLOCK_END:
3764
          /* BLOCK_END and BLOCK_BEG notes only exist in the `final' pass.  */
3765
          gcc_unreachable ();
3766
 
3767
        default:
3768
          break;
3769
        }
3770
    }
3771
 
3772
  /* Too many EH_REGION_BEG notes.  */
3773
  gcc_assert (!eh_stack);
3774
}
3775
 
3776
struct tree_opt_pass pass_remove_unnecessary_notes =
3777
{
3778
  "eunotes",                            /* name */
3779
  NULL,                                 /* gate */
3780
  remove_unnecessary_notes,             /* execute */
3781
  NULL,                                 /* sub */
3782
  NULL,                                 /* next */
3783
  0,                                    /* static_pass_number */
3784
  0,                                     /* tv_id */
3785
  0,                                     /* properties_required */
3786
  0,                                    /* properties_provided */
3787
  0,                                     /* properties_destroyed */
3788
  0,                                    /* todo_flags_start */
3789
  TODO_dump_func,                       /* todo_flags_finish */
3790
 
3791
};
3792
 
3793
 
3794
/* Emit insn(s) of given code and pattern
3795
   at a specified place within the doubly-linked list.
3796
 
3797
   All of the emit_foo global entry points accept an object
3798
   X which is either an insn list or a PATTERN of a single
3799
   instruction.
3800
 
3801
   There are thus a few canonical ways to generate code and
3802
   emit it at a specific place in the instruction stream.  For
3803
   example, consider the instruction named SPOT and the fact that
3804
   we would like to emit some instructions before SPOT.  We might
3805
   do it like this:
3806
 
3807
        start_sequence ();
3808
        ... emit the new instructions ...
3809
        insns_head = get_insns ();
3810
        end_sequence ();
3811
 
3812
        emit_insn_before (insns_head, SPOT);
3813
 
3814
   It used to be common to generate SEQUENCE rtl instead, but that
3815
   is a relic of the past which no longer occurs.  The reason is that
3816
   SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
3817
   generated would almost certainly die right after it was created.  */
3818
 
3819
/* Make X be output before the instruction BEFORE.  */
3820
 
3821
rtx
3822
emit_insn_before_noloc (rtx x, rtx before)
3823
{
3824
  rtx last = before;
3825
  rtx insn;
3826
 
3827
  gcc_assert (before);
3828
 
3829
  if (x == NULL_RTX)
3830
    return last;
3831
 
3832
  switch (GET_CODE (x))
3833
    {
3834
    case INSN:
3835
    case JUMP_INSN:
3836
    case CALL_INSN:
3837
    case CODE_LABEL:
3838
    case BARRIER:
3839
    case NOTE:
3840
      insn = x;
3841
      while (insn)
3842
        {
3843
          rtx next = NEXT_INSN (insn);
3844
          add_insn_before (insn, before);
3845
          last = insn;
3846
          insn = next;
3847
        }
3848
      break;
3849
 
3850
#ifdef ENABLE_RTL_CHECKING
3851
    case SEQUENCE:
3852
      gcc_unreachable ();
3853
      break;
3854
#endif
3855
 
3856
    default:
3857
      last = make_insn_raw (x);
3858
      add_insn_before (last, before);
3859
      break;
3860
    }
3861
 
3862
  return last;
3863
}
3864
 
3865
/* Make an instruction with body X and code JUMP_INSN
3866
   and output it before the instruction BEFORE.  */
3867
 
3868
rtx
3869
emit_jump_insn_before_noloc (rtx x, rtx before)
3870
{
3871
  rtx insn, last = NULL_RTX;
3872
 
3873
  gcc_assert (before);
3874
 
3875
  switch (GET_CODE (x))
3876
    {
3877
    case INSN:
3878
    case JUMP_INSN:
3879
    case CALL_INSN:
3880
    case CODE_LABEL:
3881
    case BARRIER:
3882
    case NOTE:
3883
      insn = x;
3884
      while (insn)
3885
        {
3886
          rtx next = NEXT_INSN (insn);
3887
          add_insn_before (insn, before);
3888
          last = insn;
3889
          insn = next;
3890
        }
3891
      break;
3892
 
3893
#ifdef ENABLE_RTL_CHECKING
3894
    case SEQUENCE:
3895
      gcc_unreachable ();
3896
      break;
3897
#endif
3898
 
3899
    default:
3900
      last = make_jump_insn_raw (x);
3901
      add_insn_before (last, before);
3902
      break;
3903
    }
3904
 
3905
  return last;
3906
}
3907
 
3908
/* Make an instruction with body X and code CALL_INSN
3909
   and output it before the instruction BEFORE.  */
3910
 
3911
rtx
3912
emit_call_insn_before_noloc (rtx x, rtx before)
3913
{
3914
  rtx last = NULL_RTX, insn;
3915
 
3916
  gcc_assert (before);
3917
 
3918
  switch (GET_CODE (x))
3919
    {
3920
    case INSN:
3921
    case JUMP_INSN:
3922
    case CALL_INSN:
3923
    case CODE_LABEL:
3924
    case BARRIER:
3925
    case NOTE:
3926
      insn = x;
3927
      while (insn)
3928
        {
3929
          rtx next = NEXT_INSN (insn);
3930
          add_insn_before (insn, before);
3931
          last = insn;
3932
          insn = next;
3933
        }
3934
      break;
3935
 
3936
#ifdef ENABLE_RTL_CHECKING
3937
    case SEQUENCE:
3938
      gcc_unreachable ();
3939
      break;
3940
#endif
3941
 
3942
    default:
3943
      last = make_call_insn_raw (x);
3944
      add_insn_before (last, before);
3945
      break;
3946
    }
3947
 
3948
  return last;
3949
}
3950
 
3951
/* Make an insn of code BARRIER
3952
   and output it before the insn BEFORE.  */
3953
 
3954
rtx
3955
emit_barrier_before (rtx before)
3956
{
3957
  rtx insn = rtx_alloc (BARRIER);
3958
 
3959
  INSN_UID (insn) = cur_insn_uid++;
3960
 
3961
  add_insn_before (insn, before);
3962
  return insn;
3963
}
3964
 
3965
/* Emit the label LABEL before the insn BEFORE.  */
3966
 
3967
rtx
3968
emit_label_before (rtx label, rtx before)
3969
{
3970
  /* This can be called twice for the same label as a result of the
3971
     confusion that follows a syntax error!  So make it harmless.  */
3972
  if (INSN_UID (label) == 0)
3973
    {
3974
      INSN_UID (label) = cur_insn_uid++;
3975
      add_insn_before (label, before);
3976
    }
3977
 
3978
  return label;
3979
}
3980
 
3981
/* Emit a note of subtype SUBTYPE before the insn BEFORE.  */
3982
 
3983
rtx
3984
emit_note_before (int subtype, rtx before)
3985
{
3986
  rtx note = rtx_alloc (NOTE);
3987
  INSN_UID (note) = cur_insn_uid++;
3988
#ifndef USE_MAPPED_LOCATION
3989
  NOTE_SOURCE_FILE (note) = 0;
3990
#endif
3991
  NOTE_LINE_NUMBER (note) = subtype;
3992
  BLOCK_FOR_INSN (note) = NULL;
3993
 
3994
  add_insn_before (note, before);
3995
  return note;
3996
}
3997
 
3998
/* Helper for emit_insn_after, handles lists of instructions
3999
   efficiently.  */
4000
 
4001
static rtx emit_insn_after_1 (rtx, rtx);
4002
 
4003
static rtx
4004
emit_insn_after_1 (rtx first, rtx after)
4005
{
4006
  rtx last;
4007
  rtx after_after;
4008
  basic_block bb;
4009
 
4010
  if (!BARRIER_P (after)
4011
      && (bb = BLOCK_FOR_INSN (after)))
4012
    {
4013
      bb->flags |= BB_DIRTY;
4014
      for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4015
        if (!BARRIER_P (last))
4016
          set_block_for_insn (last, bb);
4017
      if (!BARRIER_P (last))
4018
        set_block_for_insn (last, bb);
4019
      if (BB_END (bb) == after)
4020
        BB_END (bb) = last;
4021
    }
4022
  else
4023
    for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4024
      continue;
4025
 
4026
  after_after = NEXT_INSN (after);
4027
 
4028
  NEXT_INSN (after) = first;
4029
  PREV_INSN (first) = after;
4030
  NEXT_INSN (last) = after_after;
4031
  if (after_after)
4032
    PREV_INSN (after_after) = last;
4033
 
4034
  if (after == last_insn)
4035
    last_insn = last;
4036
  return last;
4037
}
4038
 
4039
/* Make X be output after the insn AFTER.  */
4040
 
4041
rtx
4042
emit_insn_after_noloc (rtx x, rtx after)
4043
{
4044
  rtx last = after;
4045
 
4046
  gcc_assert (after);
4047
 
4048
  if (x == NULL_RTX)
4049
    return last;
4050
 
4051
  switch (GET_CODE (x))
4052
    {
4053
    case INSN:
4054
    case JUMP_INSN:
4055
    case CALL_INSN:
4056
    case CODE_LABEL:
4057
    case BARRIER:
4058
    case NOTE:
4059
      last = emit_insn_after_1 (x, after);
4060
      break;
4061
 
4062
#ifdef ENABLE_RTL_CHECKING
4063
    case SEQUENCE:
4064
      gcc_unreachable ();
4065
      break;
4066
#endif
4067
 
4068
    default:
4069
      last = make_insn_raw (x);
4070
      add_insn_after (last, after);
4071
      break;
4072
    }
4073
 
4074
  return last;
4075
}
4076
 
4077
/* Similar to emit_insn_after, except that line notes are to be inserted so
4078
   as to act as if this insn were at FROM.  */
4079
 
4080
void
4081
emit_insn_after_with_line_notes (rtx x, rtx after, rtx from)
4082
{
4083
  rtx from_line = find_line_note (from);
4084
  rtx after_line = find_line_note (after);
4085
  rtx insn = emit_insn_after (x, after);
4086
 
4087
  if (from_line)
4088
    emit_note_copy_after (from_line, after);
4089
 
4090
  if (after_line)
4091
    emit_note_copy_after (after_line, insn);
4092
}
4093
 
4094
/* Make an insn of code JUMP_INSN with body X
4095
   and output it after the insn AFTER.  */
4096
 
4097
rtx
4098
emit_jump_insn_after_noloc (rtx x, rtx after)
4099
{
4100
  rtx last;
4101
 
4102
  gcc_assert (after);
4103
 
4104
  switch (GET_CODE (x))
4105
    {
4106
    case INSN:
4107
    case JUMP_INSN:
4108
    case CALL_INSN:
4109
    case CODE_LABEL:
4110
    case BARRIER:
4111
    case NOTE:
4112
      last = emit_insn_after_1 (x, after);
4113
      break;
4114
 
4115
#ifdef ENABLE_RTL_CHECKING
4116
    case SEQUENCE:
4117
      gcc_unreachable ();
4118
      break;
4119
#endif
4120
 
4121
    default:
4122
      last = make_jump_insn_raw (x);
4123
      add_insn_after (last, after);
4124
      break;
4125
    }
4126
 
4127
  return last;
4128
}
4129
 
4130
/* Make an instruction with body X and code CALL_INSN
4131
   and output it after the instruction AFTER.  */
4132
 
4133
rtx
4134
emit_call_insn_after_noloc (rtx x, rtx after)
4135
{
4136
  rtx last;
4137
 
4138
  gcc_assert (after);
4139
 
4140
  switch (GET_CODE (x))
4141
    {
4142
    case INSN:
4143
    case JUMP_INSN:
4144
    case CALL_INSN:
4145
    case CODE_LABEL:
4146
    case BARRIER:
4147
    case NOTE:
4148
      last = emit_insn_after_1 (x, after);
4149
      break;
4150
 
4151
#ifdef ENABLE_RTL_CHECKING
4152
    case SEQUENCE:
4153
      gcc_unreachable ();
4154
      break;
4155
#endif
4156
 
4157
    default:
4158
      last = make_call_insn_raw (x);
4159
      add_insn_after (last, after);
4160
      break;
4161
    }
4162
 
4163
  return last;
4164
}
4165
 
4166
/* Make an insn of code BARRIER
4167
   and output it after the insn AFTER.  */
4168
 
4169
rtx
4170
emit_barrier_after (rtx after)
4171
{
4172
  rtx insn = rtx_alloc (BARRIER);
4173
 
4174
  INSN_UID (insn) = cur_insn_uid++;
4175
 
4176
  add_insn_after (insn, after);
4177
  return insn;
4178
}
4179
 
4180
/* Emit the label LABEL after the insn AFTER.  */
4181
 
4182
rtx
4183
emit_label_after (rtx label, rtx after)
4184
{
4185
  /* This can be called twice for the same label
4186
     as a result of the confusion that follows a syntax error!
4187
     So make it harmless.  */
4188
  if (INSN_UID (label) == 0)
4189
    {
4190
      INSN_UID (label) = cur_insn_uid++;
4191
      add_insn_after (label, after);
4192
    }
4193
 
4194
  return label;
4195
}
4196
 
4197
/* Emit a note of subtype SUBTYPE after the insn AFTER.  */
4198
 
4199
rtx
4200
emit_note_after (int subtype, rtx after)
4201
{
4202
  rtx note = rtx_alloc (NOTE);
4203
  INSN_UID (note) = cur_insn_uid++;
4204
#ifndef USE_MAPPED_LOCATION
4205
  NOTE_SOURCE_FILE (note) = 0;
4206
#endif
4207
  NOTE_LINE_NUMBER (note) = subtype;
4208
  BLOCK_FOR_INSN (note) = NULL;
4209
  add_insn_after (note, after);
4210
  return note;
4211
}
4212
 
4213
/* Emit a copy of note ORIG after the insn AFTER.  */
4214
 
4215
rtx
4216
emit_note_copy_after (rtx orig, rtx after)
4217
{
4218
  rtx note;
4219
 
4220
  if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
4221
    {
4222
      cur_insn_uid++;
4223
      return 0;
4224
    }
4225
 
4226
  note = rtx_alloc (NOTE);
4227
  INSN_UID (note) = cur_insn_uid++;
4228
  NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
4229
  NOTE_DATA (note) = NOTE_DATA (orig);
4230
  BLOCK_FOR_INSN (note) = NULL;
4231
  add_insn_after (note, after);
4232
  return note;
4233
}
4234
 
4235
/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to SCOPE.  */
4236
rtx
4237
emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4238
{
4239
  rtx last = emit_insn_after_noloc (pattern, after);
4240
 
4241
  if (pattern == NULL_RTX || !loc)
4242
    return last;
4243
 
4244
  after = NEXT_INSN (after);
4245
  while (1)
4246
    {
4247
      if (active_insn_p (after) && !INSN_LOCATOR (after))
4248
        INSN_LOCATOR (after) = loc;
4249
      if (after == last)
4250
        break;
4251
      after = NEXT_INSN (after);
4252
    }
4253
  return last;
4254
}
4255
 
4256
/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER.  */
4257
rtx
4258
emit_insn_after (rtx pattern, rtx after)
4259
{
4260
  if (INSN_P (after))
4261
    return emit_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4262
  else
4263
    return emit_insn_after_noloc (pattern, after);
4264
}
4265
 
4266
/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to SCOPE.  */
4267
rtx
4268
emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4269
{
4270
  rtx last = emit_jump_insn_after_noloc (pattern, after);
4271
 
4272
  if (pattern == NULL_RTX || !loc)
4273
    return last;
4274
 
4275
  after = NEXT_INSN (after);
4276
  while (1)
4277
    {
4278
      if (active_insn_p (after) && !INSN_LOCATOR (after))
4279
        INSN_LOCATOR (after) = loc;
4280
      if (after == last)
4281
        break;
4282
      after = NEXT_INSN (after);
4283
    }
4284
  return last;
4285
}
4286
 
4287
/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER.  */
4288
rtx
4289
emit_jump_insn_after (rtx pattern, rtx after)
4290
{
4291
  if (INSN_P (after))
4292
    return emit_jump_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4293
  else
4294
    return emit_jump_insn_after_noloc (pattern, after);
4295
}
4296
 
4297
/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to SCOPE.  */
4298
rtx
4299
emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4300
{
4301
  rtx last = emit_call_insn_after_noloc (pattern, after);
4302
 
4303
  if (pattern == NULL_RTX || !loc)
4304
    return last;
4305
 
4306
  after = NEXT_INSN (after);
4307
  while (1)
4308
    {
4309
      if (active_insn_p (after) && !INSN_LOCATOR (after))
4310
        INSN_LOCATOR (after) = loc;
4311
      if (after == last)
4312
        break;
4313
      after = NEXT_INSN (after);
4314
    }
4315
  return last;
4316
}
4317
 
4318
/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER.  */
4319
rtx
4320
emit_call_insn_after (rtx pattern, rtx after)
4321
{
4322
  if (INSN_P (after))
4323
    return emit_call_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4324
  else
4325
    return emit_call_insn_after_noloc (pattern, after);
4326
}
4327
 
4328
/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to SCOPE.  */
4329
rtx
4330
emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4331
{
4332
  rtx first = PREV_INSN (before);
4333
  rtx last = emit_insn_before_noloc (pattern, before);
4334
 
4335
  if (pattern == NULL_RTX || !loc)
4336
    return last;
4337
 
4338
  first = NEXT_INSN (first);
4339
  while (1)
4340
    {
4341
      if (active_insn_p (first) && !INSN_LOCATOR (first))
4342
        INSN_LOCATOR (first) = loc;
4343
      if (first == last)
4344
        break;
4345
      first = NEXT_INSN (first);
4346
    }
4347
  return last;
4348
}
4349
 
4350
/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE.  */
4351
rtx
4352
emit_insn_before (rtx pattern, rtx before)
4353
{
4354
  if (INSN_P (before))
4355
    return emit_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4356
  else
4357
    return emit_insn_before_noloc (pattern, before);
4358
}
4359
 
4360
/* like emit_insn_before_noloc, but set insn_locator according to scope.  */
4361
rtx
4362
emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4363
{
4364
  rtx first = PREV_INSN (before);
4365
  rtx last = emit_jump_insn_before_noloc (pattern, before);
4366
 
4367
  if (pattern == NULL_RTX)
4368
    return last;
4369
 
4370
  first = NEXT_INSN (first);
4371
  while (1)
4372
    {
4373
      if (active_insn_p (first) && !INSN_LOCATOR (first))
4374
        INSN_LOCATOR (first) = loc;
4375
      if (first == last)
4376
        break;
4377
      first = NEXT_INSN (first);
4378
    }
4379
  return last;
4380
}
4381
 
4382
/* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE.  */
4383
rtx
4384
emit_jump_insn_before (rtx pattern, rtx before)
4385
{
4386
  if (INSN_P (before))
4387
    return emit_jump_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4388
  else
4389
    return emit_jump_insn_before_noloc (pattern, before);
4390
}
4391
 
4392
/* like emit_insn_before_noloc, but set insn_locator according to scope.  */
4393
rtx
4394
emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4395
{
4396
  rtx first = PREV_INSN (before);
4397
  rtx last = emit_call_insn_before_noloc (pattern, before);
4398
 
4399
  if (pattern == NULL_RTX)
4400
    return last;
4401
 
4402
  first = NEXT_INSN (first);
4403
  while (1)
4404
    {
4405
      if (active_insn_p (first) && !INSN_LOCATOR (first))
4406
        INSN_LOCATOR (first) = loc;
4407
      if (first == last)
4408
        break;
4409
      first = NEXT_INSN (first);
4410
    }
4411
  return last;
4412
}
4413
 
4414
/* like emit_call_insn_before_noloc,
4415
   but set insn_locator according to before.  */
4416
rtx
4417
emit_call_insn_before (rtx pattern, rtx before)
4418
{
4419
  if (INSN_P (before))
4420
    return emit_call_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4421
  else
4422
    return emit_call_insn_before_noloc (pattern, before);
4423
}
4424
 
4425
/* Take X and emit it at the end of the doubly-linked
4426
   INSN list.
4427
 
4428
   Returns the last insn emitted.  */
4429
 
4430
rtx
4431
emit_insn (rtx x)
4432
{
4433
  rtx last = last_insn;
4434
  rtx insn;
4435
 
4436
  if (x == NULL_RTX)
4437
    return last;
4438
 
4439
  switch (GET_CODE (x))
4440
    {
4441
    case INSN:
4442
    case JUMP_INSN:
4443
    case CALL_INSN:
4444
    case CODE_LABEL:
4445
    case BARRIER:
4446
    case NOTE:
4447
      insn = x;
4448
      while (insn)
4449
        {
4450
          rtx next = NEXT_INSN (insn);
4451
          add_insn (insn);
4452
          last = insn;
4453
          insn = next;
4454
        }
4455
      break;
4456
 
4457
#ifdef ENABLE_RTL_CHECKING
4458
    case SEQUENCE:
4459
      gcc_unreachable ();
4460
      break;
4461
#endif
4462
 
4463
    default:
4464
      last = make_insn_raw (x);
4465
      add_insn (last);
4466
      break;
4467
    }
4468
 
4469
  return last;
4470
}
4471
 
4472
/* Make an insn of code JUMP_INSN with pattern X
4473
   and add it to the end of the doubly-linked list.  */
4474
 
4475
rtx
4476
emit_jump_insn (rtx x)
4477
{
4478
  rtx last = NULL_RTX, insn;
4479
 
4480
  switch (GET_CODE (x))
4481
    {
4482
    case INSN:
4483
    case JUMP_INSN:
4484
    case CALL_INSN:
4485
    case CODE_LABEL:
4486
    case BARRIER:
4487
    case NOTE:
4488
      insn = x;
4489
      while (insn)
4490
        {
4491
          rtx next = NEXT_INSN (insn);
4492
          add_insn (insn);
4493
          last = insn;
4494
          insn = next;
4495
        }
4496
      break;
4497
 
4498
#ifdef ENABLE_RTL_CHECKING
4499
    case SEQUENCE:
4500
      gcc_unreachable ();
4501
      break;
4502
#endif
4503
 
4504
    default:
4505
      last = make_jump_insn_raw (x);
4506
      add_insn (last);
4507
      break;
4508
    }
4509
 
4510
  return last;
4511
}
4512
 
4513
/* Make an insn of code CALL_INSN with pattern X
4514
   and add it to the end of the doubly-linked list.  */
4515
 
4516
rtx
4517
emit_call_insn (rtx x)
4518
{
4519
  rtx insn;
4520
 
4521
  switch (GET_CODE (x))
4522
    {
4523
    case INSN:
4524
    case JUMP_INSN:
4525
    case CALL_INSN:
4526
    case CODE_LABEL:
4527
    case BARRIER:
4528
    case NOTE:
4529
      insn = emit_insn (x);
4530
      break;
4531
 
4532
#ifdef ENABLE_RTL_CHECKING
4533
    case SEQUENCE:
4534
      gcc_unreachable ();
4535
      break;
4536
#endif
4537
 
4538
    default:
4539
      insn = make_call_insn_raw (x);
4540
      add_insn (insn);
4541
      break;
4542
    }
4543
 
4544
  return insn;
4545
}
4546
 
4547
/* Add the label LABEL to the end of the doubly-linked list.  */
4548
 
4549
rtx
4550
emit_label (rtx label)
4551
{
4552
  /* This can be called twice for the same label
4553
     as a result of the confusion that follows a syntax error!
4554
     So make it harmless.  */
4555
  if (INSN_UID (label) == 0)
4556
    {
4557
      INSN_UID (label) = cur_insn_uid++;
4558
      add_insn (label);
4559
    }
4560
  return label;
4561
}
4562
 
4563
/* Make an insn of code BARRIER
4564
   and add it to the end of the doubly-linked list.  */
4565
 
4566
rtx
4567
emit_barrier (void)
4568
{
4569
  rtx barrier = rtx_alloc (BARRIER);
4570
  INSN_UID (barrier) = cur_insn_uid++;
4571
  add_insn (barrier);
4572
  return barrier;
4573
}
4574
 
4575
/* Make line numbering NOTE insn for LOCATION add it to the end
4576
   of the doubly-linked list, but only if line-numbers are desired for
4577
   debugging info and it doesn't match the previous one.  */
4578
 
4579
rtx
4580
emit_line_note (location_t location)
4581
{
4582
  rtx note;
4583
 
4584
#ifdef USE_MAPPED_LOCATION
4585
  if (location == last_location)
4586
    return NULL_RTX;
4587
#else
4588
  if (location.file && last_location.file
4589
      && !strcmp (location.file, last_location.file)
4590
      && location.line == last_location.line)
4591
    return NULL_RTX;
4592
#endif
4593
  last_location = location;
4594
 
4595
  if (no_line_numbers)
4596
    {
4597
      cur_insn_uid++;
4598
      return NULL_RTX;
4599
    }
4600
 
4601
#ifdef USE_MAPPED_LOCATION
4602
  note = emit_note ((int) location);
4603
#else
4604
  note = emit_note (location.line);
4605
  NOTE_SOURCE_FILE (note) = location.file;
4606
#endif
4607
 
4608
  return note;
4609
}
4610
 
4611
/* Emit a copy of note ORIG.  */
4612
 
4613
rtx
4614
emit_note_copy (rtx orig)
4615
{
4616
  rtx note;
4617
 
4618
  if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
4619
    {
4620
      cur_insn_uid++;
4621
      return NULL_RTX;
4622
    }
4623
 
4624
  note = rtx_alloc (NOTE);
4625
 
4626
  INSN_UID (note) = cur_insn_uid++;
4627
  NOTE_DATA (note) = NOTE_DATA (orig);
4628
  NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
4629
  BLOCK_FOR_INSN (note) = NULL;
4630
  add_insn (note);
4631
 
4632
  return note;
4633
}
4634
 
4635
/* Make an insn of code NOTE or type NOTE_NO
4636
   and add it to the end of the doubly-linked list.  */
4637
 
4638
rtx
4639
emit_note (int note_no)
4640
{
4641
  rtx note;
4642
 
4643
  note = rtx_alloc (NOTE);
4644
  INSN_UID (note) = cur_insn_uid++;
4645
  NOTE_LINE_NUMBER (note) = note_no;
4646
  memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4647
  BLOCK_FOR_INSN (note) = NULL;
4648
  add_insn (note);
4649
  return note;
4650
}
4651
 
4652
/* Cause next statement to emit a line note even if the line number
4653
   has not changed.  */
4654
 
4655
void
4656
force_next_line_note (void)
4657
{
4658
#ifdef USE_MAPPED_LOCATION
4659
  last_location = -1;
4660
#else
4661
  last_location.line = -1;
4662
#endif
4663
}
4664
 
4665
/* Place a note of KIND on insn INSN with DATUM as the datum. If a
4666
   note of this type already exists, remove it first.  */
4667
 
4668
rtx
4669
set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
4670
{
4671
  rtx note = find_reg_note (insn, kind, NULL_RTX);
4672
 
4673
  switch (kind)
4674
    {
4675
    case REG_EQUAL:
4676
    case REG_EQUIV:
4677
      /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4678
         has multiple sets (some callers assume single_set
4679
         means the insn only has one set, when in fact it
4680
         means the insn only has one * useful * set).  */
4681
      if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4682
        {
4683
          gcc_assert (!note);
4684
          return NULL_RTX;
4685
        }
4686
 
4687
      /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4688
         It serves no useful purpose and breaks eliminate_regs.  */
4689
      if (GET_CODE (datum) == ASM_OPERANDS)
4690
        return NULL_RTX;
4691
      break;
4692
 
4693
    default:
4694
      break;
4695
    }
4696
 
4697
  if (note)
4698
    {
4699
      XEXP (note, 0) = datum;
4700
      return note;
4701
    }
4702
 
4703
  REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
4704
  return REG_NOTES (insn);
4705
}
4706
 
4707
/* Return an indication of which type of insn should have X as a body.
4708
   The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN.  */
4709
 
4710
static enum rtx_code
4711
classify_insn (rtx x)
4712
{
4713
  if (LABEL_P (x))
4714
    return CODE_LABEL;
4715
  if (GET_CODE (x) == CALL)
4716
    return CALL_INSN;
4717
  if (GET_CODE (x) == RETURN)
4718
    return JUMP_INSN;
4719
  if (GET_CODE (x) == SET)
4720
    {
4721
      if (SET_DEST (x) == pc_rtx)
4722
        return JUMP_INSN;
4723
      else if (GET_CODE (SET_SRC (x)) == CALL)
4724
        return CALL_INSN;
4725
      else
4726
        return INSN;
4727
    }
4728
  if (GET_CODE (x) == PARALLEL)
4729
    {
4730
      int j;
4731
      for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4732
        if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4733
          return CALL_INSN;
4734
        else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4735
                 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4736
          return JUMP_INSN;
4737
        else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4738
                 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4739
          return CALL_INSN;
4740
    }
4741
  return INSN;
4742
}
4743
 
4744
/* Emit the rtl pattern X as an appropriate kind of insn.
4745
   If X is a label, it is simply added into the insn chain.  */
4746
 
4747
rtx
4748
emit (rtx x)
4749
{
4750
  enum rtx_code code = classify_insn (x);
4751
 
4752
  switch (code)
4753
    {
4754
    case CODE_LABEL:
4755
      return emit_label (x);
4756
    case INSN:
4757
      return emit_insn (x);
4758
    case  JUMP_INSN:
4759
      {
4760
        rtx insn = emit_jump_insn (x);
4761
        if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4762
          return emit_barrier ();
4763
        return insn;
4764
      }
4765
    case CALL_INSN:
4766
      return emit_call_insn (x);
4767
    default:
4768
      gcc_unreachable ();
4769
    }
4770
}
4771
 
4772
/* Space for free sequence stack entries.  */
4773
static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
4774
 
4775
/* Begin emitting insns to a sequence.  If this sequence will contain
4776
   something that might cause the compiler to pop arguments to function
4777
   calls (because those pops have previously been deferred; see
4778
   INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
4779
   before calling this function.  That will ensure that the deferred
4780
   pops are not accidentally emitted in the middle of this sequence.  */
4781
 
4782
void
4783
start_sequence (void)
4784
{
4785
  struct sequence_stack *tem;
4786
 
4787
  if (free_sequence_stack != NULL)
4788
    {
4789
      tem = free_sequence_stack;
4790
      free_sequence_stack = tem->next;
4791
    }
4792
  else
4793
    tem = ggc_alloc (sizeof (struct sequence_stack));
4794
 
4795
  tem->next = seq_stack;
4796
  tem->first = first_insn;
4797
  tem->last = last_insn;
4798
 
4799
  seq_stack = tem;
4800
 
4801
  first_insn = 0;
4802
  last_insn = 0;
4803
}
4804
 
4805
/* Set up the insn chain starting with FIRST as the current sequence,
4806
   saving the previously current one.  See the documentation for
4807
   start_sequence for more information about how to use this function.  */
4808
 
4809
void
4810
push_to_sequence (rtx first)
4811
{
4812
  rtx last;
4813
 
4814
  start_sequence ();
4815
 
4816
  for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
4817
 
4818
  first_insn = first;
4819
  last_insn = last;
4820
}
4821
 
4822
/* Set up the outer-level insn chain
4823
   as the current sequence, saving the previously current one.  */
4824
 
4825
void
4826
push_topmost_sequence (void)
4827
{
4828
  struct sequence_stack *stack, *top = NULL;
4829
 
4830
  start_sequence ();
4831
 
4832
  for (stack = seq_stack; stack; stack = stack->next)
4833
    top = stack;
4834
 
4835
  first_insn = top->first;
4836
  last_insn = top->last;
4837
}
4838
 
4839
/* After emitting to the outer-level insn chain, update the outer-level
4840
   insn chain, and restore the previous saved state.  */
4841
 
4842
void
4843
pop_topmost_sequence (void)
4844
{
4845
  struct sequence_stack *stack, *top = NULL;
4846
 
4847
  for (stack = seq_stack; stack; stack = stack->next)
4848
    top = stack;
4849
 
4850
  top->first = first_insn;
4851
  top->last = last_insn;
4852
 
4853
  end_sequence ();
4854
}
4855
 
4856
/* After emitting to a sequence, restore previous saved state.
4857
 
4858
   To get the contents of the sequence just made, you must call
4859
   `get_insns' *before* calling here.
4860
 
4861
   If the compiler might have deferred popping arguments while
4862
   generating this sequence, and this sequence will not be immediately
4863
   inserted into the instruction stream, use do_pending_stack_adjust
4864
   before calling get_insns.  That will ensure that the deferred
4865
   pops are inserted into this sequence, and not into some random
4866
   location in the instruction stream.  See INHIBIT_DEFER_POP for more
4867
   information about deferred popping of arguments.  */
4868
 
4869
void
4870
end_sequence (void)
4871
{
4872
  struct sequence_stack *tem = seq_stack;
4873
 
4874
  first_insn = tem->first;
4875
  last_insn = tem->last;
4876
  seq_stack = tem->next;
4877
 
4878
  memset (tem, 0, sizeof (*tem));
4879
  tem->next = free_sequence_stack;
4880
  free_sequence_stack = tem;
4881
}
4882
 
4883
/* Return 1 if currently emitting into a sequence.  */
4884
 
4885
int
4886
in_sequence_p (void)
4887
{
4888
  return seq_stack != 0;
4889
}
4890
 
4891
/* Put the various virtual registers into REGNO_REG_RTX.  */
4892
 
4893
void
4894
init_virtual_regs (struct emit_status *es)
4895
{
4896
  rtx *ptr = es->x_regno_reg_rtx;
4897
  ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
4898
  ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
4899
  ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
4900
  ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
4901
  ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
4902
}
4903
 
4904
 
4905
/* Used by copy_insn_1 to avoid copying SCRATCHes more than once.  */
4906
static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
4907
static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
4908
static int copy_insn_n_scratches;
4909
 
4910
/* When an insn is being copied by copy_insn_1, this is nonzero if we have
4911
   copied an ASM_OPERANDS.
4912
   In that case, it is the original input-operand vector.  */
4913
static rtvec orig_asm_operands_vector;
4914
 
4915
/* When an insn is being copied by copy_insn_1, this is nonzero if we have
4916
   copied an ASM_OPERANDS.
4917
   In that case, it is the copied input-operand vector.  */
4918
static rtvec copy_asm_operands_vector;
4919
 
4920
/* Likewise for the constraints vector.  */
4921
static rtvec orig_asm_constraints_vector;
4922
static rtvec copy_asm_constraints_vector;
4923
 
4924
/* Recursively create a new copy of an rtx for copy_insn.
4925
   This function differs from copy_rtx in that it handles SCRATCHes and
4926
   ASM_OPERANDs properly.
4927
   Normally, this function is not used directly; use copy_insn as front end.
4928
   However, you could first copy an insn pattern with copy_insn and then use
4929
   this function afterwards to properly copy any REG_NOTEs containing
4930
   SCRATCHes.  */
4931
 
4932
rtx
4933
copy_insn_1 (rtx orig)
4934
{
4935
  rtx copy;
4936
  int i, j;
4937
  RTX_CODE code;
4938
  const char *format_ptr;
4939
 
4940
  code = GET_CODE (orig);
4941
 
4942
  switch (code)
4943
    {
4944
    case REG:
4945
    case CONST_INT:
4946
    case CONST_DOUBLE:
4947
    case CONST_VECTOR:
4948
    case SYMBOL_REF:
4949
    case CODE_LABEL:
4950
    case PC:
4951
    case CC0:
4952
      return orig;
4953
    case CLOBBER:
4954
      if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
4955
        return orig;
4956
      break;
4957
 
4958
    case SCRATCH:
4959
      for (i = 0; i < copy_insn_n_scratches; i++)
4960
        if (copy_insn_scratch_in[i] == orig)
4961
          return copy_insn_scratch_out[i];
4962
      break;
4963
 
4964
    case CONST:
4965
      /* CONST can be shared if it contains a SYMBOL_REF.  If it contains
4966
         a LABEL_REF, it isn't sharable.  */
4967
      if (GET_CODE (XEXP (orig, 0)) == PLUS
4968
          && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
4969
          && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
4970
        return orig;
4971
      break;
4972
 
4973
      /* A MEM with a constant address is not sharable.  The problem is that
4974
         the constant address may need to be reloaded.  If the mem is shared,
4975
         then reloading one copy of this mem will cause all copies to appear
4976
         to have been reloaded.  */
4977
 
4978
    default:
4979
      break;
4980
    }
4981
 
4982
  copy = rtx_alloc (code);
4983
 
4984
  /* Copy the various flags, and other information.  We assume that
4985
     all fields need copying, and then clear the fields that should
4986
     not be copied.  That is the sensible default behavior, and forces
4987
     us to explicitly document why we are *not* copying a flag.  */
4988
  memcpy (copy, orig, RTX_HDR_SIZE);
4989
 
4990
  /* We do not copy the USED flag, which is used as a mark bit during
4991
     walks over the RTL.  */
4992
  RTX_FLAG (copy, used) = 0;
4993
 
4994
  /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs.  */
4995
  if (INSN_P (orig))
4996
    {
4997
      RTX_FLAG (copy, jump) = 0;
4998
      RTX_FLAG (copy, call) = 0;
4999
      RTX_FLAG (copy, frame_related) = 0;
5000
    }
5001
 
5002
  format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5003
 
5004
  for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5005
    {
5006
      copy->u.fld[i] = orig->u.fld[i];
5007
      switch (*format_ptr++)
5008
        {
5009
        case 'e':
5010
          if (XEXP (orig, i) != NULL)
5011
            XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5012
          break;
5013
 
5014
        case 'E':
5015
        case 'V':
5016
          if (XVEC (orig, i) == orig_asm_constraints_vector)
5017
            XVEC (copy, i) = copy_asm_constraints_vector;
5018
          else if (XVEC (orig, i) == orig_asm_operands_vector)
5019
            XVEC (copy, i) = copy_asm_operands_vector;
5020
          else if (XVEC (orig, i) != NULL)
5021
            {
5022
              XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5023
              for (j = 0; j < XVECLEN (copy, i); j++)
5024
                XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5025
            }
5026
          break;
5027
 
5028
        case 't':
5029
        case 'w':
5030
        case 'i':
5031
        case 's':
5032
        case 'S':
5033
        case 'u':
5034
        case '0':
5035
          /* These are left unchanged.  */
5036
          break;
5037
 
5038
        default:
5039
          gcc_unreachable ();
5040
        }
5041
    }
5042
 
5043
  if (code == SCRATCH)
5044
    {
5045
      i = copy_insn_n_scratches++;
5046
      gcc_assert (i < MAX_RECOG_OPERANDS);
5047
      copy_insn_scratch_in[i] = orig;
5048
      copy_insn_scratch_out[i] = copy;
5049
    }
5050
  else if (code == ASM_OPERANDS)
5051
    {
5052
      orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5053
      copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5054
      orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5055
      copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5056
    }
5057
 
5058
  return copy;
5059
}
5060
 
5061
/* Create a new copy of an rtx.
5062
   This function differs from copy_rtx in that it handles SCRATCHes and
5063
   ASM_OPERANDs properly.
5064
   INSN doesn't really have to be a full INSN; it could be just the
5065
   pattern.  */
5066
rtx
5067
copy_insn (rtx insn)
5068
{
5069
  copy_insn_n_scratches = 0;
5070
  orig_asm_operands_vector = 0;
5071
  orig_asm_constraints_vector = 0;
5072
  copy_asm_operands_vector = 0;
5073
  copy_asm_constraints_vector = 0;
5074
  return copy_insn_1 (insn);
5075
}
5076
 
5077
/* Initialize data structures and variables in this file
5078
   before generating rtl for each function.  */
5079
 
5080
void
5081
init_emit (void)
5082
{
5083
  struct function *f = cfun;
5084
 
5085
  f->emit = ggc_alloc (sizeof (struct emit_status));
5086
  first_insn = NULL;
5087
  last_insn = NULL;
5088
  cur_insn_uid = 1;
5089
  reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5090
  last_location = UNKNOWN_LOCATION;
5091
  first_label_num = label_num;
5092
  seq_stack = NULL;
5093
 
5094
  /* Init the tables that describe all the pseudo regs.  */
5095
 
5096
  f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5097
 
5098
  f->emit->regno_pointer_align
5099
    = ggc_alloc_cleared (f->emit->regno_pointer_align_length
5100
                         * sizeof (unsigned char));
5101
 
5102
  regno_reg_rtx
5103
    = ggc_alloc (f->emit->regno_pointer_align_length * sizeof (rtx));
5104
 
5105
  /* Put copies of all the hard registers into regno_reg_rtx.  */
5106
  memcpy (regno_reg_rtx,
5107
          static_regno_reg_rtx,
5108
          FIRST_PSEUDO_REGISTER * sizeof (rtx));
5109
 
5110
  /* Put copies of all the virtual register rtx into regno_reg_rtx.  */
5111
  init_virtual_regs (f->emit);
5112
 
5113
  /* Indicate that the virtual registers and stack locations are
5114
     all pointers.  */
5115
  REG_POINTER (stack_pointer_rtx) = 1;
5116
  REG_POINTER (frame_pointer_rtx) = 1;
5117
  REG_POINTER (hard_frame_pointer_rtx) = 1;
5118
  REG_POINTER (arg_pointer_rtx) = 1;
5119
 
5120
  REG_POINTER (virtual_incoming_args_rtx) = 1;
5121
  REG_POINTER (virtual_stack_vars_rtx) = 1;
5122
  REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5123
  REG_POINTER (virtual_outgoing_args_rtx) = 1;
5124
  REG_POINTER (virtual_cfa_rtx) = 1;
5125
 
5126
#ifdef STACK_BOUNDARY
5127
  REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5128
  REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5129
  REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5130
  REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5131
 
5132
  REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5133
  REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5134
  REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5135
  REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5136
  REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5137
#endif
5138
 
5139
#ifdef INIT_EXPANDERS
5140
  INIT_EXPANDERS;
5141
#endif
5142
}
5143
 
5144
/* Generate a vector constant for mode MODE and constant value CONSTANT.  */
5145
 
5146
static rtx
5147
gen_const_vector (enum machine_mode mode, int constant)
5148
{
5149
  rtx tem;
5150
  rtvec v;
5151
  int units, i;
5152
  enum machine_mode inner;
5153
 
5154
  units = GET_MODE_NUNITS (mode);
5155
  inner = GET_MODE_INNER (mode);
5156
 
5157
  v = rtvec_alloc (units);
5158
 
5159
  /* We need to call this function after we set the scalar const_tiny_rtx
5160
     entries.  */
5161
  gcc_assert (const_tiny_rtx[constant][(int) inner]);
5162
 
5163
  for (i = 0; i < units; ++i)
5164
    RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5165
 
5166
  tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5167
  return tem;
5168
}
5169
 
5170
/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5171
   all elements are zero, and the one vector when all elements are one.  */
5172
rtx
5173
gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5174
{
5175
  enum machine_mode inner = GET_MODE_INNER (mode);
5176
  int nunits = GET_MODE_NUNITS (mode);
5177
  rtx x;
5178
  int i;
5179
 
5180
  /* Check to see if all of the elements have the same value.  */
5181
  x = RTVEC_ELT (v, nunits - 1);
5182
  for (i = nunits - 2; i >= 0; i--)
5183
    if (RTVEC_ELT (v, i) != x)
5184
      break;
5185
 
5186
  /* If the values are all the same, check to see if we can use one of the
5187
     standard constant vectors.  */
5188
  if (i == -1)
5189
    {
5190
      if (x == CONST0_RTX (inner))
5191
        return CONST0_RTX (mode);
5192
      else if (x == CONST1_RTX (inner))
5193
        return CONST1_RTX (mode);
5194
    }
5195
 
5196
  return gen_rtx_raw_CONST_VECTOR (mode, v);
5197
}
5198
 
5199
/* Create some permanent unique rtl objects shared between all functions.
5200
   LINE_NUMBERS is nonzero if line numbers are to be generated.  */
5201
 
5202
void
5203
init_emit_once (int line_numbers)
5204
{
5205
  int i;
5206
  enum machine_mode mode;
5207
  enum machine_mode double_mode;
5208
 
5209
  /* We need reg_raw_mode, so initialize the modes now.  */
5210
  init_reg_modes_once ();
5211
 
5212
  /* Initialize the CONST_INT, CONST_DOUBLE, and memory attribute hash
5213
     tables.  */
5214
  const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5215
                                    const_int_htab_eq, NULL);
5216
 
5217
  const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5218
                                       const_double_htab_eq, NULL);
5219
 
5220
  mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5221
                                    mem_attrs_htab_eq, NULL);
5222
  reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5223
                                    reg_attrs_htab_eq, NULL);
5224
 
5225
  no_line_numbers = ! line_numbers;
5226
 
5227
  /* Compute the word and byte modes.  */
5228
 
5229
  byte_mode = VOIDmode;
5230
  word_mode = VOIDmode;
5231
  double_mode = VOIDmode;
5232
 
5233
  for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5234
       mode = GET_MODE_WIDER_MODE (mode))
5235
    {
5236
      if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5237
          && byte_mode == VOIDmode)
5238
        byte_mode = mode;
5239
 
5240
      if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5241
          && word_mode == VOIDmode)
5242
        word_mode = mode;
5243
    }
5244
 
5245
  for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5246
       mode = GET_MODE_WIDER_MODE (mode))
5247
    {
5248
      if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5249
          && double_mode == VOIDmode)
5250
        double_mode = mode;
5251
    }
5252
 
5253
  ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5254
 
5255
  /* Assign register numbers to the globally defined register rtx.
5256
     This must be done at runtime because the register number field
5257
     is in a union and some compilers can't initialize unions.  */
5258
 
5259
  pc_rtx = gen_rtx_PC (VOIDmode);
5260
  cc0_rtx = gen_rtx_CC0 (VOIDmode);
5261
  stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5262
  frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5263
  if (hard_frame_pointer_rtx == 0)
5264
    hard_frame_pointer_rtx = gen_raw_REG (Pmode,
5265
                                          HARD_FRAME_POINTER_REGNUM);
5266
  if (arg_pointer_rtx == 0)
5267
    arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5268
  virtual_incoming_args_rtx =
5269
    gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5270
  virtual_stack_vars_rtx =
5271
    gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5272
  virtual_stack_dynamic_rtx =
5273
    gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5274
  virtual_outgoing_args_rtx =
5275
    gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5276
  virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5277
 
5278
  /* Initialize RTL for commonly used hard registers.  These are
5279
     copied into regno_reg_rtx as we begin to compile each function.  */
5280
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5281
    static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5282
 
5283
#ifdef INIT_EXPANDERS
5284
  /* This is to initialize {init|mark|free}_machine_status before the first
5285
     call to push_function_context_to.  This is needed by the Chill front
5286
     end which calls push_function_context_to before the first call to
5287
     init_function_start.  */
5288
  INIT_EXPANDERS;
5289
#endif
5290
 
5291
  /* Create the unique rtx's for certain rtx codes and operand values.  */
5292
 
5293
  /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5294
     tries to use these variables.  */
5295
  for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5296
    const_int_rtx[i + MAX_SAVED_CONST_INT] =
5297
      gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5298
 
5299
  if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5300
      && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5301
    const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5302
  else
5303
    const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5304
 
5305
  REAL_VALUE_FROM_INT (dconst0,   0,  0, double_mode);
5306
  REAL_VALUE_FROM_INT (dconst1,   1,  0, double_mode);
5307
  REAL_VALUE_FROM_INT (dconst2,   2,  0, double_mode);
5308
  REAL_VALUE_FROM_INT (dconst3,   3,  0, double_mode);
5309
  REAL_VALUE_FROM_INT (dconst10, 10,  0, double_mode);
5310
  REAL_VALUE_FROM_INT (dconstm1, -1, -1, double_mode);
5311
  REAL_VALUE_FROM_INT (dconstm2, -2, -1, double_mode);
5312
 
5313
  dconsthalf = dconst1;
5314
  SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5315
 
5316
  real_arithmetic (&dconstthird, RDIV_EXPR, &dconst1, &dconst3);
5317
 
5318
  /* Initialize mathematical constants for constant folding builtins.
5319
     These constants need to be given to at least 160 bits precision.  */
5320
  real_from_string (&dconstpi,
5321
    "3.1415926535897932384626433832795028841971693993751058209749445923078");
5322
  real_from_string (&dconste,
5323
    "2.7182818284590452353602874713526624977572470936999595749669676277241");
5324
 
5325
  for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
5326
    {
5327
      REAL_VALUE_TYPE *r =
5328
        (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5329
 
5330
      for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5331
           mode = GET_MODE_WIDER_MODE (mode))
5332
        const_tiny_rtx[i][(int) mode] =
5333
          CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5334
 
5335
      const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5336
 
5337
      for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5338
           mode = GET_MODE_WIDER_MODE (mode))
5339
        const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5340
 
5341
      for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5342
           mode != VOIDmode;
5343
           mode = GET_MODE_WIDER_MODE (mode))
5344
        const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5345
    }
5346
 
5347
  for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5348
       mode != VOIDmode;
5349
       mode = GET_MODE_WIDER_MODE (mode))
5350
    {
5351
      const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5352
      const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5353
    }
5354
 
5355
  for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5356
       mode != VOIDmode;
5357
       mode = GET_MODE_WIDER_MODE (mode))
5358
    {
5359
      const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5360
      const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5361
    }
5362
 
5363
  for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5364
    if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5365
      const_tiny_rtx[0][i] = const0_rtx;
5366
 
5367
  const_tiny_rtx[0][(int) BImode] = const0_rtx;
5368
  if (STORE_FLAG_VALUE == 1)
5369
    const_tiny_rtx[1][(int) BImode] = const1_rtx;
5370
 
5371
#ifdef RETURN_ADDRESS_POINTER_REGNUM
5372
  return_address_pointer_rtx
5373
    = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5374
#endif
5375
 
5376
#ifdef STATIC_CHAIN_REGNUM
5377
  static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5378
 
5379
#ifdef STATIC_CHAIN_INCOMING_REGNUM
5380
  if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5381
    static_chain_incoming_rtx
5382
      = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5383
  else
5384
#endif
5385
    static_chain_incoming_rtx = static_chain_rtx;
5386
#endif
5387
 
5388
#ifdef STATIC_CHAIN
5389
  static_chain_rtx = STATIC_CHAIN;
5390
 
5391
#ifdef STATIC_CHAIN_INCOMING
5392
  static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5393
#else
5394
  static_chain_incoming_rtx = static_chain_rtx;
5395
#endif
5396
#endif
5397
 
5398
  if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5399
    pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5400
}
5401
 
5402
/* Produce exact duplicate of insn INSN after AFTER.
5403
   Care updating of libcall regions if present.  */
5404
 
5405
rtx
5406
emit_copy_of_insn_after (rtx insn, rtx after)
5407
{
5408
  rtx new;
5409
  rtx note1, note2, link;
5410
 
5411
  switch (GET_CODE (insn))
5412
    {
5413
    case INSN:
5414
      new = emit_insn_after (copy_insn (PATTERN (insn)), after);
5415
      break;
5416
 
5417
    case JUMP_INSN:
5418
      new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5419
      break;
5420
 
5421
    case CALL_INSN:
5422
      new = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5423
      if (CALL_INSN_FUNCTION_USAGE (insn))
5424
        CALL_INSN_FUNCTION_USAGE (new)
5425
          = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5426
      SIBLING_CALL_P (new) = SIBLING_CALL_P (insn);
5427
      CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn);
5428
      break;
5429
 
5430
    default:
5431
      gcc_unreachable ();
5432
    }
5433
 
5434
  /* Update LABEL_NUSES.  */
5435
  mark_jump_label (PATTERN (new), new, 0);
5436
 
5437
  INSN_LOCATOR (new) = INSN_LOCATOR (insn);
5438
 
5439
  /* If the old insn is frame related, then so is the new one.  This is
5440
     primarily needed for IA-64 unwind info which marks epilogue insns,
5441
     which may be duplicated by the basic block reordering code.  */
5442
  RTX_FRAME_RELATED_P (new) = RTX_FRAME_RELATED_P (insn);
5443
 
5444
  /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
5445
     make them.  */
5446
  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5447
    if (REG_NOTE_KIND (link) != REG_LABEL)
5448
      {
5449
        if (GET_CODE (link) == EXPR_LIST)
5450
          REG_NOTES (new)
5451
            = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
5452
                                              XEXP (link, 0),
5453
                                              REG_NOTES (new)));
5454
        else
5455
          REG_NOTES (new)
5456
            = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
5457
                                              XEXP (link, 0),
5458
                                              REG_NOTES (new)));
5459
      }
5460
 
5461
  /* Fix the libcall sequences.  */
5462
  if ((note1 = find_reg_note (new, REG_RETVAL, NULL_RTX)) != NULL)
5463
    {
5464
      rtx p = new;
5465
      while ((note2 = find_reg_note (p, REG_LIBCALL, NULL_RTX)) == NULL)
5466
        p = PREV_INSN (p);
5467
      XEXP (note1, 0) = p;
5468
      XEXP (note2, 0) = new;
5469
    }
5470
  INSN_CODE (new) = INSN_CODE (insn);
5471
  return new;
5472
}
5473
 
5474
static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
5475
rtx
5476
gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5477
{
5478
  if (hard_reg_clobbers[mode][regno])
5479
    return hard_reg_clobbers[mode][regno];
5480
  else
5481
    return (hard_reg_clobbers[mode][regno] =
5482
            gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5483
}
5484
 
5485
#include "gt-emit-rtl.h"

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.