OpenCores
URL https://opencores.org/ocsvn/openrisc/openrisc/trunk

Subversion Repositories openrisc

[/] [openrisc/] [trunk/] [gnu-old/] [gcc-4.2.2/] [gcc/] [emit-rtl.c] - Blame information for rev 856

Go to most recent revision | Details | Compare with Previous | View Log

Line No. Rev Author Line
1 38 julius
/* Emit RTL for the GCC expander.
2
   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3
   1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4
   Free Software Foundation, Inc.
5
 
6
This file is part of GCC.
7
 
8
GCC is free software; you can redistribute it and/or modify it under
9
the terms of the GNU General Public License as published by the Free
10
Software Foundation; either version 3, or (at your option) any later
11
version.
12
 
13
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14
WARRANTY; without even the implied warranty of MERCHANTABILITY or
15
FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16
for more details.
17
 
18
You should have received a copy of the GNU General Public License
19
along with GCC; see the file COPYING3.  If not see
20
<http://www.gnu.org/licenses/>.  */
21
 
22
 
23
/* Middle-to-low level generation of rtx code and insns.
24
 
25
   This file contains support functions for creating rtl expressions
26
   and manipulating them in the doubly-linked chain of insns.
27
 
28
   The patterns of the insns are created by machine-dependent
29
   routines in insn-emit.c, which is generated automatically from
30
   the machine description.  These routines make the individual rtx's
31
   of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
32
   which are automatically generated from rtl.def; what is machine
33
   dependent is the kind of rtx's they make and what arguments they
34
   use.  */
35
 
36
#include "config.h"
37
#include "system.h"
38
#include "coretypes.h"
39
#include "tm.h"
40
#include "toplev.h"
41
#include "rtl.h"
42
#include "tree.h"
43
#include "tm_p.h"
44
#include "flags.h"
45
#include "function.h"
46
#include "expr.h"
47
#include "regs.h"
48
#include "hard-reg-set.h"
49
#include "hashtab.h"
50
#include "insn-config.h"
51
#include "recog.h"
52
#include "real.h"
53
#include "bitmap.h"
54
#include "basic-block.h"
55
#include "ggc.h"
56
#include "debug.h"
57
#include "langhooks.h"
58
#include "tree-pass.h"
59
 
60
/* Commonly used modes.  */
61
 
62
enum machine_mode byte_mode;    /* Mode whose width is BITS_PER_UNIT.  */
63
enum machine_mode word_mode;    /* Mode whose width is BITS_PER_WORD.  */
64
enum machine_mode double_mode;  /* Mode whose width is DOUBLE_TYPE_SIZE.  */
65
enum machine_mode ptr_mode;     /* Mode whose width is POINTER_SIZE.  */
66
 
67
 
68
/* This is *not* reset after each function.  It gives each CODE_LABEL
69
   in the entire compilation a unique label number.  */
70
 
71
static GTY(()) int label_num = 1;
72
 
73
/* Nonzero means do not generate NOTEs for source line numbers.  */
74
 
75
static int no_line_numbers;
76
 
77
/* Commonly used rtx's, so that we only need space for one copy.
78
   These are initialized once for the entire compilation.
79
   All of these are unique; no other rtx-object will be equal to any
80
   of these.  */
81
 
82
rtx global_rtl[GR_MAX];
83
 
84
/* Commonly used RTL for hard registers.  These objects are not necessarily
85
   unique, so we allocate them separately from global_rtl.  They are
86
   initialized once per compilation unit, then copied into regno_reg_rtx
87
   at the beginning of each function.  */
88
static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
89
 
90
/* We record floating-point CONST_DOUBLEs in each floating-point mode for
91
   the values of 0, 1, and 2.  For the integer entries and VOIDmode, we
92
   record a copy of const[012]_rtx.  */
93
 
94
rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
95
 
96
rtx const_true_rtx;
97
 
98
REAL_VALUE_TYPE dconst0;
99
REAL_VALUE_TYPE dconst1;
100
REAL_VALUE_TYPE dconst2;
101
REAL_VALUE_TYPE dconst3;
102
REAL_VALUE_TYPE dconst10;
103
REAL_VALUE_TYPE dconstm1;
104
REAL_VALUE_TYPE dconstm2;
105
REAL_VALUE_TYPE dconsthalf;
106
REAL_VALUE_TYPE dconstthird;
107
REAL_VALUE_TYPE dconstpi;
108
REAL_VALUE_TYPE dconste;
109
 
110
/* All references to the following fixed hard registers go through
111
   these unique rtl objects.  On machines where the frame-pointer and
112
   arg-pointer are the same register, they use the same unique object.
113
 
114
   After register allocation, other rtl objects which used to be pseudo-regs
115
   may be clobbered to refer to the frame-pointer register.
116
   But references that were originally to the frame-pointer can be
117
   distinguished from the others because they contain frame_pointer_rtx.
118
 
119
   When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
120
   tricky: until register elimination has taken place hard_frame_pointer_rtx
121
   should be used if it is being set, and frame_pointer_rtx otherwise.  After
122
   register elimination hard_frame_pointer_rtx should always be used.
123
   On machines where the two registers are same (most) then these are the
124
   same.
125
 
126
   In an inline procedure, the stack and frame pointer rtxs may not be
127
   used for anything else.  */
128
rtx static_chain_rtx;           /* (REG:Pmode STATIC_CHAIN_REGNUM) */
129
rtx static_chain_incoming_rtx;  /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
130
rtx pic_offset_table_rtx;       /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
131
 
132
/* This is used to implement __builtin_return_address for some machines.
133
   See for instance the MIPS port.  */
134
rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
135
 
136
/* We make one copy of (const_int C) where C is in
137
   [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
138
   to save space during the compilation and simplify comparisons of
139
   integers.  */
140
 
141
rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
142
 
143
/* A hash table storing CONST_INTs whose absolute value is greater
144
   than MAX_SAVED_CONST_INT.  */
145
 
146
static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
147
     htab_t const_int_htab;
148
 
149
/* A hash table storing memory attribute structures.  */
150
static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
151
     htab_t mem_attrs_htab;
152
 
153
/* A hash table storing register attribute structures.  */
154
static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
155
     htab_t reg_attrs_htab;
156
 
157
/* A hash table storing all CONST_DOUBLEs.  */
158
static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
159
     htab_t const_double_htab;
160
 
161
#define first_insn (cfun->emit->x_first_insn)
162
#define last_insn (cfun->emit->x_last_insn)
163
#define cur_insn_uid (cfun->emit->x_cur_insn_uid)
164
#define last_location (cfun->emit->x_last_location)
165
#define first_label_num (cfun->emit->x_first_label_num)
166
 
167
static rtx make_call_insn_raw (rtx);
168
static rtx find_line_note (rtx);
169
static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
170
static void unshare_all_decls (tree);
171
static void reset_used_decls (tree);
172
static void mark_label_nuses (rtx);
173
static hashval_t const_int_htab_hash (const void *);
174
static int const_int_htab_eq (const void *, const void *);
175
static hashval_t const_double_htab_hash (const void *);
176
static int const_double_htab_eq (const void *, const void *);
177
static rtx lookup_const_double (rtx);
178
static hashval_t mem_attrs_htab_hash (const void *);
179
static int mem_attrs_htab_eq (const void *, const void *);
180
static mem_attrs *get_mem_attrs (HOST_WIDE_INT, tree, rtx, rtx, unsigned int,
181
                                 enum machine_mode);
182
static hashval_t reg_attrs_htab_hash (const void *);
183
static int reg_attrs_htab_eq (const void *, const void *);
184
static reg_attrs *get_reg_attrs (tree, int);
185
static tree component_ref_for_mem_expr (tree);
186
static rtx gen_const_vector (enum machine_mode, int);
187
static void copy_rtx_if_shared_1 (rtx *orig);
188
 
189
/* Probability of the conditional branch currently proceeded by try_split.
190
   Set to -1 otherwise.  */
191
int split_branch_probability = -1;
192
 
193
/* Returns a hash code for X (which is a really a CONST_INT).  */
194
 
195
static hashval_t
196
const_int_htab_hash (const void *x)
197
{
198
  return (hashval_t) INTVAL ((rtx) x);
199
}
200
 
201
/* Returns nonzero if the value represented by X (which is really a
202
   CONST_INT) is the same as that given by Y (which is really a
203
   HOST_WIDE_INT *).  */
204
 
205
static int
206
const_int_htab_eq (const void *x, const void *y)
207
{
208
  return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y));
209
}
210
 
211
/* Returns a hash code for X (which is really a CONST_DOUBLE).  */
212
static hashval_t
213
const_double_htab_hash (const void *x)
214
{
215
  rtx value = (rtx) x;
216
  hashval_t h;
217
 
218
  if (GET_MODE (value) == VOIDmode)
219
    h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
220
  else
221
    {
222
      h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
223
      /* MODE is used in the comparison, so it should be in the hash.  */
224
      h ^= GET_MODE (value);
225
    }
226
  return h;
227
}
228
 
229
/* Returns nonzero if the value represented by X (really a ...)
230
   is the same as that represented by Y (really a ...) */
231
static int
232
const_double_htab_eq (const void *x, const void *y)
233
{
234
  rtx a = (rtx)x, b = (rtx)y;
235
 
236
  if (GET_MODE (a) != GET_MODE (b))
237
    return 0;
238
  if (GET_MODE (a) == VOIDmode)
239
    return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
240
            && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
241
  else
242
    return real_identical (CONST_DOUBLE_REAL_VALUE (a),
243
                           CONST_DOUBLE_REAL_VALUE (b));
244
}
245
 
246
/* Returns a hash code for X (which is a really a mem_attrs *).  */
247
 
248
static hashval_t
249
mem_attrs_htab_hash (const void *x)
250
{
251
  mem_attrs *p = (mem_attrs *) x;
252
 
253
  return (p->alias ^ (p->align * 1000)
254
          ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
255
          ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
256
          ^ (size_t) iterative_hash_expr (p->expr, 0));
257
}
258
 
259
/* Returns nonzero if the value represented by X (which is really a
260
   mem_attrs *) is the same as that given by Y (which is also really a
261
   mem_attrs *).  */
262
 
263
static int
264
mem_attrs_htab_eq (const void *x, const void *y)
265
{
266
  mem_attrs *p = (mem_attrs *) x;
267
  mem_attrs *q = (mem_attrs *) y;
268
 
269
  return (p->alias == q->alias && p->offset == q->offset
270
          && p->size == q->size && p->align == q->align
271
          && (p->expr == q->expr
272
              || (p->expr != NULL_TREE && q->expr != NULL_TREE
273
                  && operand_equal_p (p->expr, q->expr, 0))));
274
}
275
 
276
/* Allocate a new mem_attrs structure and insert it into the hash table if
277
   one identical to it is not already in the table.  We are doing this for
278
   MEM of mode MODE.  */
279
 
280
static mem_attrs *
281
get_mem_attrs (HOST_WIDE_INT alias, tree expr, rtx offset, rtx size,
282
               unsigned int align, enum machine_mode mode)
283
{
284
  mem_attrs attrs;
285
  void **slot;
286
 
287
  /* If everything is the default, we can just return zero.
288
     This must match what the corresponding MEM_* macros return when the
289
     field is not present.  */
290
  if (alias == 0 && expr == 0 && offset == 0
291
      && (size == 0
292
          || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
293
      && (STRICT_ALIGNMENT && mode != BLKmode
294
          ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
295
    return 0;
296
 
297
  attrs.alias = alias;
298
  attrs.expr = expr;
299
  attrs.offset = offset;
300
  attrs.size = size;
301
  attrs.align = align;
302
 
303
  slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
304
  if (*slot == 0)
305
    {
306
      *slot = ggc_alloc (sizeof (mem_attrs));
307
      memcpy (*slot, &attrs, sizeof (mem_attrs));
308
    }
309
 
310
  return *slot;
311
}
312
 
313
/* Returns a hash code for X (which is a really a reg_attrs *).  */
314
 
315
static hashval_t
316
reg_attrs_htab_hash (const void *x)
317
{
318
  reg_attrs *p = (reg_attrs *) x;
319
 
320
  return ((p->offset * 1000) ^ (long) p->decl);
321
}
322
 
323
/* Returns nonzero if the value represented by X (which is really a
324
   reg_attrs *) is the same as that given by Y (which is also really a
325
   reg_attrs *).  */
326
 
327
static int
328
reg_attrs_htab_eq (const void *x, const void *y)
329
{
330
  reg_attrs *p = (reg_attrs *) x;
331
  reg_attrs *q = (reg_attrs *) y;
332
 
333
  return (p->decl == q->decl && p->offset == q->offset);
334
}
335
/* Allocate a new reg_attrs structure and insert it into the hash table if
336
   one identical to it is not already in the table.  We are doing this for
337
   MEM of mode MODE.  */
338
 
339
static reg_attrs *
340
get_reg_attrs (tree decl, int offset)
341
{
342
  reg_attrs attrs;
343
  void **slot;
344
 
345
  /* If everything is the default, we can just return zero.  */
346
  if (decl == 0 && offset == 0)
347
    return 0;
348
 
349
  attrs.decl = decl;
350
  attrs.offset = offset;
351
 
352
  slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
353
  if (*slot == 0)
354
    {
355
      *slot = ggc_alloc (sizeof (reg_attrs));
356
      memcpy (*slot, &attrs, sizeof (reg_attrs));
357
    }
358
 
359
  return *slot;
360
}
361
 
362
/* Generate a new REG rtx.  Make sure ORIGINAL_REGNO is set properly, and
363
   don't attempt to share with the various global pieces of rtl (such as
364
   frame_pointer_rtx).  */
365
 
366
rtx
367
gen_raw_REG (enum machine_mode mode, int regno)
368
{
369
  rtx x = gen_rtx_raw_REG (mode, regno);
370
  ORIGINAL_REGNO (x) = regno;
371
  return x;
372
}
373
 
374
/* There are some RTL codes that require special attention; the generation
375
   functions do the raw handling.  If you add to this list, modify
376
   special_rtx in gengenrtl.c as well.  */
377
 
378
rtx
379
gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
380
{
381
  void **slot;
382
 
383
  if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
384
    return const_int_rtx[arg + MAX_SAVED_CONST_INT];
385
 
386
#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
387
  if (const_true_rtx && arg == STORE_FLAG_VALUE)
388
    return const_true_rtx;
389
#endif
390
 
391
  /* Look up the CONST_INT in the hash table.  */
392
  slot = htab_find_slot_with_hash (const_int_htab, &arg,
393
                                   (hashval_t) arg, INSERT);
394
  if (*slot == 0)
395
    *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
396
 
397
  return (rtx) *slot;
398
}
399
 
400
rtx
401
gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
402
{
403
  return GEN_INT (trunc_int_for_mode (c, mode));
404
}
405
 
406
/* CONST_DOUBLEs might be created from pairs of integers, or from
407
   REAL_VALUE_TYPEs.  Also, their length is known only at run time,
408
   so we cannot use gen_rtx_raw_CONST_DOUBLE.  */
409
 
410
/* Determine whether REAL, a CONST_DOUBLE, already exists in the
411
   hash table.  If so, return its counterpart; otherwise add it
412
   to the hash table and return it.  */
413
static rtx
414
lookup_const_double (rtx real)
415
{
416
  void **slot = htab_find_slot (const_double_htab, real, INSERT);
417
  if (*slot == 0)
418
    *slot = real;
419
 
420
  return (rtx) *slot;
421
}
422
 
423
/* Return a CONST_DOUBLE rtx for a floating-point value specified by
424
   VALUE in mode MODE.  */
425
rtx
426
const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
427
{
428
  rtx real = rtx_alloc (CONST_DOUBLE);
429
  PUT_MODE (real, mode);
430
 
431
  real->u.rv = value;
432
 
433
  return lookup_const_double (real);
434
}
435
 
436
/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
437
   of ints: I0 is the low-order word and I1 is the high-order word.
438
   Do not use this routine for non-integer modes; convert to
439
   REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE.  */
440
 
441
rtx
442
immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
443
{
444
  rtx value;
445
  unsigned int i;
446
 
447
  /* There are the following cases (note that there are no modes with
448
     HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < 2 * HOST_BITS_PER_WIDE_INT):
449
 
450
     1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
451
        gen_int_mode.
452
     2) GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT, but the value of
453
        the integer fits into HOST_WIDE_INT anyway (i.e., i1 consists only
454
        from copies of the sign bit, and sign of i0 and i1 are the same),  then
455
        we return a CONST_INT for i0.
456
     3) Otherwise, we create a CONST_DOUBLE for i0 and i1.  */
457
  if (mode != VOIDmode)
458
    {
459
      gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
460
                  || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
461
                  /* We can get a 0 for an error mark.  */
462
                  || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
463
                  || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
464
 
465
      if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
466
        return gen_int_mode (i0, mode);
467
 
468
      gcc_assert (GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT);
469
    }
470
 
471
  /* If this integer fits in one word, return a CONST_INT.  */
472
  if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
473
    return GEN_INT (i0);
474
 
475
  /* We use VOIDmode for integers.  */
476
  value = rtx_alloc (CONST_DOUBLE);
477
  PUT_MODE (value, VOIDmode);
478
 
479
  CONST_DOUBLE_LOW (value) = i0;
480
  CONST_DOUBLE_HIGH (value) = i1;
481
 
482
  for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
483
    XWINT (value, i) = 0;
484
 
485
  return lookup_const_double (value);
486
}
487
 
488
rtx
489
gen_rtx_REG (enum machine_mode mode, unsigned int regno)
490
{
491
  /* In case the MD file explicitly references the frame pointer, have
492
     all such references point to the same frame pointer.  This is
493
     used during frame pointer elimination to distinguish the explicit
494
     references to these registers from pseudos that happened to be
495
     assigned to them.
496
 
497
     If we have eliminated the frame pointer or arg pointer, we will
498
     be using it as a normal register, for example as a spill
499
     register.  In such cases, we might be accessing it in a mode that
500
     is not Pmode and therefore cannot use the pre-allocated rtx.
501
 
502
     Also don't do this when we are making new REGs in reload, since
503
     we don't want to get confused with the real pointers.  */
504
 
505
  if (mode == Pmode && !reload_in_progress)
506
    {
507
      if (regno == FRAME_POINTER_REGNUM
508
          && (!reload_completed || frame_pointer_needed))
509
        return frame_pointer_rtx;
510
#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
511
      if (regno == HARD_FRAME_POINTER_REGNUM
512
          && (!reload_completed || frame_pointer_needed))
513
        return hard_frame_pointer_rtx;
514
#endif
515
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
516
      if (regno == ARG_POINTER_REGNUM)
517
        return arg_pointer_rtx;
518
#endif
519
#ifdef RETURN_ADDRESS_POINTER_REGNUM
520
      if (regno == RETURN_ADDRESS_POINTER_REGNUM)
521
        return return_address_pointer_rtx;
522
#endif
523
      if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
524
          && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
525
        return pic_offset_table_rtx;
526
      if (regno == STACK_POINTER_REGNUM)
527
        return stack_pointer_rtx;
528
    }
529
 
530
#if 0
531
  /* If the per-function register table has been set up, try to re-use
532
     an existing entry in that table to avoid useless generation of RTL.
533
 
534
     This code is disabled for now until we can fix the various backends
535
     which depend on having non-shared hard registers in some cases.   Long
536
     term we want to re-enable this code as it can significantly cut down
537
     on the amount of useless RTL that gets generated.
538
 
539
     We'll also need to fix some code that runs after reload that wants to
540
     set ORIGINAL_REGNO.  */
541
 
542
  if (cfun
543
      && cfun->emit
544
      && regno_reg_rtx
545
      && regno < FIRST_PSEUDO_REGISTER
546
      && reg_raw_mode[regno] == mode)
547
    return regno_reg_rtx[regno];
548
#endif
549
 
550
  return gen_raw_REG (mode, regno);
551
}
552
 
553
rtx
554
gen_rtx_MEM (enum machine_mode mode, rtx addr)
555
{
556
  rtx rt = gen_rtx_raw_MEM (mode, addr);
557
 
558
  /* This field is not cleared by the mere allocation of the rtx, so
559
     we clear it here.  */
560
  MEM_ATTRS (rt) = 0;
561
 
562
  return rt;
563
}
564
 
565
/* Generate a memory referring to non-trapping constant memory.  */
566
 
567
rtx
568
gen_const_mem (enum machine_mode mode, rtx addr)
569
{
570
  rtx mem = gen_rtx_MEM (mode, addr);
571
  MEM_READONLY_P (mem) = 1;
572
  MEM_NOTRAP_P (mem) = 1;
573
  return mem;
574
}
575
 
576
/* Generate a MEM referring to fixed portions of the frame, e.g., register
577
   save areas.  */
578
 
579
rtx
580
gen_frame_mem (enum machine_mode mode, rtx addr)
581
{
582
  rtx mem = gen_rtx_MEM (mode, addr);
583
  MEM_NOTRAP_P (mem) = 1;
584
  set_mem_alias_set (mem, get_frame_alias_set ());
585
  return mem;
586
}
587
 
588
/* Generate a MEM referring to a temporary use of the stack, not part
589
    of the fixed stack frame.  For example, something which is pushed
590
    by a target splitter.  */
591
rtx
592
gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
593
{
594
  rtx mem = gen_rtx_MEM (mode, addr);
595
  MEM_NOTRAP_P (mem) = 1;
596
  if (!current_function_calls_alloca)
597
    set_mem_alias_set (mem, get_frame_alias_set ());
598
  return mem;
599
}
600
 
601
/* We want to create (subreg:OMODE (obj:IMODE) OFFSET).  Return true if
602
   this construct would be valid, and false otherwise.  */
603
 
604
bool
605
validate_subreg (enum machine_mode omode, enum machine_mode imode,
606
                 rtx reg, unsigned int offset)
607
{
608
  unsigned int isize = GET_MODE_SIZE (imode);
609
  unsigned int osize = GET_MODE_SIZE (omode);
610
 
611
  /* All subregs must be aligned.  */
612
  if (offset % osize != 0)
613
    return false;
614
 
615
  /* The subreg offset cannot be outside the inner object.  */
616
  if (offset >= isize)
617
    return false;
618
 
619
  /* ??? This should not be here.  Temporarily continue to allow word_mode
620
     subregs of anything.  The most common offender is (subreg:SI (reg:DF)).
621
     Generally, backends are doing something sketchy but it'll take time to
622
     fix them all.  */
623
  if (omode == word_mode)
624
    ;
625
  /* ??? Similarly, e.g. with (subreg:DF (reg:TI)).  Though store_bit_field
626
     is the culprit here, and not the backends.  */
627
  else if (osize >= UNITS_PER_WORD && isize >= osize)
628
    ;
629
  /* Allow component subregs of complex and vector.  Though given the below
630
     extraction rules, it's not always clear what that means.  */
631
  else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
632
           && GET_MODE_INNER (imode) == omode)
633
    ;
634
  /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
635
     i.e. (subreg:V4SF (reg:SF) 0).  This surely isn't the cleanest way to
636
     represent this.  It's questionable if this ought to be represented at
637
     all -- why can't this all be hidden in post-reload splitters that make
638
     arbitrarily mode changes to the registers themselves.  */
639
  else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
640
    ;
641
  /* Subregs involving floating point modes are not allowed to
642
     change size.  Therefore (subreg:DI (reg:DF) 0) is fine, but
643
     (subreg:SI (reg:DF) 0) isn't.  */
644
  else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
645
    {
646
      if (isize != osize)
647
        return false;
648
    }
649
 
650
  /* Paradoxical subregs must have offset zero.  */
651
  if (osize > isize)
652
    return offset == 0;
653
 
654
  /* This is a normal subreg.  Verify that the offset is representable.  */
655
 
656
  /* For hard registers, we already have most of these rules collected in
657
     subreg_offset_representable_p.  */
658
  if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
659
    {
660
      unsigned int regno = REGNO (reg);
661
 
662
#ifdef CANNOT_CHANGE_MODE_CLASS
663
      if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
664
          && GET_MODE_INNER (imode) == omode)
665
        ;
666
      else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
667
        return false;
668
#endif
669
 
670
      return subreg_offset_representable_p (regno, imode, offset, omode);
671
    }
672
 
673
  /* For pseudo registers, we want most of the same checks.  Namely:
674
     If the register no larger than a word, the subreg must be lowpart.
675
     If the register is larger than a word, the subreg must be the lowpart
676
     of a subword.  A subreg does *not* perform arbitrary bit extraction.
677
     Given that we've already checked mode/offset alignment, we only have
678
     to check subword subregs here.  */
679
  if (osize < UNITS_PER_WORD)
680
    {
681
      enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
682
      unsigned int low_off = subreg_lowpart_offset (omode, wmode);
683
      if (offset % UNITS_PER_WORD != low_off)
684
        return false;
685
    }
686
  return true;
687
}
688
 
689
rtx
690
gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
691
{
692
  gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
693
  return gen_rtx_raw_SUBREG (mode, reg, offset);
694
}
695
 
696
/* Generate a SUBREG representing the least-significant part of REG if MODE
697
   is smaller than mode of REG, otherwise paradoxical SUBREG.  */
698
 
699
rtx
700
gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
701
{
702
  enum machine_mode inmode;
703
 
704
  inmode = GET_MODE (reg);
705
  if (inmode == VOIDmode)
706
    inmode = mode;
707
  return gen_rtx_SUBREG (mode, reg,
708
                         subreg_lowpart_offset (mode, inmode));
709
}
710
 
711
/* gen_rtvec (n, [rt1, ..., rtn])
712
**
713
**          This routine creates an rtvec and stores within it the
714
**      pointers to rtx's which are its arguments.
715
*/
716
 
717
/*VARARGS1*/
718
rtvec
719
gen_rtvec (int n, ...)
720
{
721
  int i, save_n;
722
  rtx *vector;
723
  va_list p;
724
 
725
  va_start (p, n);
726
 
727
  if (n == 0)
728
    return NULL_RTVEC;          /* Don't allocate an empty rtvec...     */
729
 
730
  vector = alloca (n * sizeof (rtx));
731
 
732
  for (i = 0; i < n; i++)
733
    vector[i] = va_arg (p, rtx);
734
 
735
  /* The definition of VA_* in K&R C causes `n' to go out of scope.  */
736
  save_n = n;
737
  va_end (p);
738
 
739
  return gen_rtvec_v (save_n, vector);
740
}
741
 
742
rtvec
743
gen_rtvec_v (int n, rtx *argp)
744
{
745
  int i;
746
  rtvec rt_val;
747
 
748
  if (n == 0)
749
    return NULL_RTVEC;          /* Don't allocate an empty rtvec...     */
750
 
751
  rt_val = rtvec_alloc (n);     /* Allocate an rtvec...                 */
752
 
753
  for (i = 0; i < n; i++)
754
    rt_val->elem[i] = *argp++;
755
 
756
  return rt_val;
757
}
758
 
759
/* Generate a REG rtx for a new pseudo register of mode MODE.
760
   This pseudo is assigned the next sequential register number.  */
761
 
762
rtx
763
gen_reg_rtx (enum machine_mode mode)
764
{
765
  struct function *f = cfun;
766
  rtx val;
767
 
768
  /* Don't let anything called after initial flow analysis create new
769
     registers.  */
770
  gcc_assert (!no_new_pseudos);
771
 
772
  if (generating_concat_p
773
      && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
774
          || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
775
    {
776
      /* For complex modes, don't make a single pseudo.
777
         Instead, make a CONCAT of two pseudos.
778
         This allows noncontiguous allocation of the real and imaginary parts,
779
         which makes much better code.  Besides, allocating DCmode
780
         pseudos overstrains reload on some machines like the 386.  */
781
      rtx realpart, imagpart;
782
      enum machine_mode partmode = GET_MODE_INNER (mode);
783
 
784
      realpart = gen_reg_rtx (partmode);
785
      imagpart = gen_reg_rtx (partmode);
786
      return gen_rtx_CONCAT (mode, realpart, imagpart);
787
    }
788
 
789
  /* Make sure regno_pointer_align, and regno_reg_rtx are large
790
     enough to have an element for this pseudo reg number.  */
791
 
792
  if (reg_rtx_no == f->emit->regno_pointer_align_length)
793
    {
794
      int old_size = f->emit->regno_pointer_align_length;
795
      char *new;
796
      rtx *new1;
797
 
798
      new = ggc_realloc (f->emit->regno_pointer_align, old_size * 2);
799
      memset (new + old_size, 0, old_size);
800
      f->emit->regno_pointer_align = (unsigned char *) new;
801
 
802
      new1 = ggc_realloc (f->emit->x_regno_reg_rtx,
803
                          old_size * 2 * sizeof (rtx));
804
      memset (new1 + old_size, 0, old_size * sizeof (rtx));
805
      regno_reg_rtx = new1;
806
 
807
      f->emit->regno_pointer_align_length = old_size * 2;
808
    }
809
 
810
  val = gen_raw_REG (mode, reg_rtx_no);
811
  regno_reg_rtx[reg_rtx_no++] = val;
812
  return val;
813
}
814
 
815
/* Generate a register with same attributes as REG, but offsetted by OFFSET.
816
   Do the big endian correction if needed.  */
817
 
818
rtx
819
gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno, int offset)
820
{
821
  rtx new = gen_rtx_REG (mode, regno);
822
  tree decl;
823
  HOST_WIDE_INT var_size;
824
 
825
  /* PR middle-end/14084
826
     The problem appears when a variable is stored in a larger register
827
     and later it is used in the original mode or some mode in between
828
     or some part of variable is accessed.
829
 
830
     On little endian machines there is no problem because
831
     the REG_OFFSET of the start of the variable is the same when
832
     accessed in any mode (it is 0).
833
 
834
     However, this is not true on big endian machines.
835
     The offset of the start of the variable is different when accessed
836
     in different modes.
837
     When we are taking a part of the REG we have to change the OFFSET
838
     from offset WRT size of mode of REG to offset WRT size of variable.
839
 
840
     If we would not do the big endian correction the resulting REG_OFFSET
841
     would be larger than the size of the DECL.
842
 
843
     Examples of correction, for BYTES_BIG_ENDIAN WORDS_BIG_ENDIAN machine:
844
 
845
     REG.mode  MODE  DECL size  old offset  new offset  description
846
     DI        SI    4          4           0           int32 in SImode
847
     DI        SI    1          4           0           char in SImode
848
     DI        QI    1          7           0           char in QImode
849
     DI        QI    4          5           1           1st element in QImode
850
                                                        of char[4]
851
     DI        HI    4          6           2           1st element in HImode
852
                                                        of int16[2]
853
 
854
     If the size of DECL is equal or greater than the size of REG
855
     we can't do this correction because the register holds the
856
     whole variable or a part of the variable and thus the REG_OFFSET
857
     is already correct.  */
858
 
859
  decl = REG_EXPR (reg);
860
  if ((BYTES_BIG_ENDIAN || WORDS_BIG_ENDIAN)
861
      && decl != NULL
862
      && offset > 0
863
      && GET_MODE_SIZE (GET_MODE (reg)) > GET_MODE_SIZE (mode)
864
      && ((var_size = int_size_in_bytes (TREE_TYPE (decl))) > 0
865
          && var_size < GET_MODE_SIZE (GET_MODE (reg))))
866
    {
867
      int offset_le;
868
 
869
      /* Convert machine endian to little endian WRT size of mode of REG.  */
870
      if (WORDS_BIG_ENDIAN)
871
        offset_le = ((GET_MODE_SIZE (GET_MODE (reg)) - 1 - offset)
872
                     / UNITS_PER_WORD) * UNITS_PER_WORD;
873
      else
874
        offset_le = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
875
 
876
      if (BYTES_BIG_ENDIAN)
877
        offset_le += ((GET_MODE_SIZE (GET_MODE (reg)) - 1 - offset)
878
                      % UNITS_PER_WORD);
879
      else
880
        offset_le += offset % UNITS_PER_WORD;
881
 
882
      if (offset_le >= var_size)
883
        {
884
          /* MODE is wider than the variable so the new reg will cover
885
             the whole variable so the resulting OFFSET should be 0.  */
886
          offset = 0;
887
        }
888
      else
889
        {
890
          /* Convert little endian to machine endian WRT size of variable.  */
891
          if (WORDS_BIG_ENDIAN)
892
            offset = ((var_size - 1 - offset_le)
893
                      / UNITS_PER_WORD) * UNITS_PER_WORD;
894
          else
895
            offset = (offset_le / UNITS_PER_WORD) * UNITS_PER_WORD;
896
 
897
          if (BYTES_BIG_ENDIAN)
898
            offset += ((var_size - 1 - offset_le)
899
                       % UNITS_PER_WORD);
900
          else
901
            offset += offset_le % UNITS_PER_WORD;
902
        }
903
    }
904
 
905
  REG_ATTRS (new) = get_reg_attrs (REG_EXPR (reg),
906
                                   REG_OFFSET (reg) + offset);
907
  return new;
908
}
909
 
910
/* Set the decl for MEM to DECL.  */
911
 
912
void
913
set_reg_attrs_from_mem (rtx reg, rtx mem)
914
{
915
  if (MEM_OFFSET (mem) && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
916
    REG_ATTRS (reg)
917
      = get_reg_attrs (MEM_EXPR (mem), INTVAL (MEM_OFFSET (mem)));
918
}
919
 
920
/* Set the register attributes for registers contained in PARM_RTX.
921
   Use needed values from memory attributes of MEM.  */
922
 
923
void
924
set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
925
{
926
  if (REG_P (parm_rtx))
927
    set_reg_attrs_from_mem (parm_rtx, mem);
928
  else if (GET_CODE (parm_rtx) == PARALLEL)
929
    {
930
      /* Check for a NULL entry in the first slot, used to indicate that the
931
         parameter goes both on the stack and in registers.  */
932
      int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
933
      for (; i < XVECLEN (parm_rtx, 0); i++)
934
        {
935
          rtx x = XVECEXP (parm_rtx, 0, i);
936
          if (REG_P (XEXP (x, 0)))
937
            REG_ATTRS (XEXP (x, 0))
938
              = get_reg_attrs (MEM_EXPR (mem),
939
                               INTVAL (XEXP (x, 1)));
940
        }
941
    }
942
}
943
 
944
/* Assign the RTX X to declaration T.  */
945
void
946
set_decl_rtl (tree t, rtx x)
947
{
948
  DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
949
 
950
  if (!x)
951
    return;
952
  /* For register, we maintain the reverse information too.  */
953
  if (REG_P (x))
954
    REG_ATTRS (x) = get_reg_attrs (t, 0);
955
  else if (GET_CODE (x) == SUBREG)
956
    REG_ATTRS (SUBREG_REG (x))
957
      = get_reg_attrs (t, -SUBREG_BYTE (x));
958
  if (GET_CODE (x) == CONCAT)
959
    {
960
      if (REG_P (XEXP (x, 0)))
961
        REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
962
      if (REG_P (XEXP (x, 1)))
963
        REG_ATTRS (XEXP (x, 1))
964
          = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
965
    }
966
  if (GET_CODE (x) == PARALLEL)
967
    {
968
      int i;
969
      for (i = 0; i < XVECLEN (x, 0); i++)
970
        {
971
          rtx y = XVECEXP (x, 0, i);
972
          if (REG_P (XEXP (y, 0)))
973
            REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
974
        }
975
    }
976
}
977
 
978
/* Assign the RTX X to parameter declaration T.  */
979
void
980
set_decl_incoming_rtl (tree t, rtx x)
981
{
982
  DECL_INCOMING_RTL (t) = x;
983
 
984
  if (!x)
985
    return;
986
  /* For register, we maintain the reverse information too.  */
987
  if (REG_P (x))
988
    REG_ATTRS (x) = get_reg_attrs (t, 0);
989
  else if (GET_CODE (x) == SUBREG)
990
    REG_ATTRS (SUBREG_REG (x))
991
      = get_reg_attrs (t, -SUBREG_BYTE (x));
992
  if (GET_CODE (x) == CONCAT)
993
    {
994
      if (REG_P (XEXP (x, 0)))
995
        REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
996
      if (REG_P (XEXP (x, 1)))
997
        REG_ATTRS (XEXP (x, 1))
998
          = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
999
    }
1000
  if (GET_CODE (x) == PARALLEL)
1001
    {
1002
      int i, start;
1003
 
1004
      /* Check for a NULL entry, used to indicate that the parameter goes
1005
         both on the stack and in registers.  */
1006
      if (XEXP (XVECEXP (x, 0, 0), 0))
1007
        start = 0;
1008
      else
1009
        start = 1;
1010
 
1011
      for (i = start; i < XVECLEN (x, 0); i++)
1012
        {
1013
          rtx y = XVECEXP (x, 0, i);
1014
          if (REG_P (XEXP (y, 0)))
1015
            REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1016
        }
1017
    }
1018
}
1019
 
1020
/* Identify REG (which may be a CONCAT) as a user register.  */
1021
 
1022
void
1023
mark_user_reg (rtx reg)
1024
{
1025
  if (GET_CODE (reg) == CONCAT)
1026
    {
1027
      REG_USERVAR_P (XEXP (reg, 0)) = 1;
1028
      REG_USERVAR_P (XEXP (reg, 1)) = 1;
1029
    }
1030
  else
1031
    {
1032
      gcc_assert (REG_P (reg));
1033
      REG_USERVAR_P (reg) = 1;
1034
    }
1035
}
1036
 
1037
/* Identify REG as a probable pointer register and show its alignment
1038
   as ALIGN, if nonzero.  */
1039
 
1040
void
1041
mark_reg_pointer (rtx reg, int align)
1042
{
1043
  if (! REG_POINTER (reg))
1044
    {
1045
      REG_POINTER (reg) = 1;
1046
 
1047
      if (align)
1048
        REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1049
    }
1050
  else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1051
    /* We can no-longer be sure just how aligned this pointer is.  */
1052
    REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1053
}
1054
 
1055
/* Return 1 plus largest pseudo reg number used in the current function.  */
1056
 
1057
int
1058
max_reg_num (void)
1059
{
1060
  return reg_rtx_no;
1061
}
1062
 
1063
/* Return 1 + the largest label number used so far in the current function.  */
1064
 
1065
int
1066
max_label_num (void)
1067
{
1068
  return label_num;
1069
}
1070
 
1071
/* Return first label number used in this function (if any were used).  */
1072
 
1073
int
1074
get_first_label_num (void)
1075
{
1076
  return first_label_num;
1077
}
1078
 
1079
/* If the rtx for label was created during the expansion of a nested
1080
   function, then first_label_num won't include this label number.
1081
   Fix this now so that array indicies work later.  */
1082
 
1083
void
1084
maybe_set_first_label_num (rtx x)
1085
{
1086
  if (CODE_LABEL_NUMBER (x) < first_label_num)
1087
    first_label_num = CODE_LABEL_NUMBER (x);
1088
}
1089
 
1090
/* Return a value representing some low-order bits of X, where the number
1091
   of low-order bits is given by MODE.  Note that no conversion is done
1092
   between floating-point and fixed-point values, rather, the bit
1093
   representation is returned.
1094
 
1095
   This function handles the cases in common between gen_lowpart, below,
1096
   and two variants in cse.c and combine.c.  These are the cases that can
1097
   be safely handled at all points in the compilation.
1098
 
1099
   If this is not a case we can handle, return 0.  */
1100
 
1101
rtx
1102
gen_lowpart_common (enum machine_mode mode, rtx x)
1103
{
1104
  int msize = GET_MODE_SIZE (mode);
1105
  int xsize;
1106
  int offset = 0;
1107
  enum machine_mode innermode;
1108
 
1109
  /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1110
     so we have to make one up.  Yuk.  */
1111
  innermode = GET_MODE (x);
1112
  if (GET_CODE (x) == CONST_INT
1113
      && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1114
    innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1115
  else if (innermode == VOIDmode)
1116
    innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
1117
 
1118
  xsize = GET_MODE_SIZE (innermode);
1119
 
1120
  gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1121
 
1122
  if (innermode == mode)
1123
    return x;
1124
 
1125
  /* MODE must occupy no more words than the mode of X.  */
1126
  if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1127
      > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1128
    return 0;
1129
 
1130
  /* Don't allow generating paradoxical FLOAT_MODE subregs.  */
1131
  if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1132
    return 0;
1133
 
1134
  offset = subreg_lowpart_offset (mode, innermode);
1135
 
1136
  if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1137
      && (GET_MODE_CLASS (mode) == MODE_INT
1138
          || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1139
    {
1140
      /* If we are getting the low-order part of something that has been
1141
         sign- or zero-extended, we can either just use the object being
1142
         extended or make a narrower extension.  If we want an even smaller
1143
         piece than the size of the object being extended, call ourselves
1144
         recursively.
1145
 
1146
         This case is used mostly by combine and cse.  */
1147
 
1148
      if (GET_MODE (XEXP (x, 0)) == mode)
1149
        return XEXP (x, 0);
1150
      else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1151
        return gen_lowpart_common (mode, XEXP (x, 0));
1152
      else if (msize < xsize)
1153
        return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1154
    }
1155
  else if (GET_CODE (x) == SUBREG || REG_P (x)
1156
           || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1157
           || GET_CODE (x) == CONST_DOUBLE || GET_CODE (x) == CONST_INT)
1158
    return simplify_gen_subreg (mode, x, innermode, offset);
1159
 
1160
  /* Otherwise, we can't do this.  */
1161
  return 0;
1162
}
1163
 
1164
rtx
1165
gen_highpart (enum machine_mode mode, rtx x)
1166
{
1167
  unsigned int msize = GET_MODE_SIZE (mode);
1168
  rtx result;
1169
 
1170
  /* This case loses if X is a subreg.  To catch bugs early,
1171
     complain if an invalid MODE is used even in other cases.  */
1172
  gcc_assert (msize <= UNITS_PER_WORD
1173
              || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1174
 
1175
  result = simplify_gen_subreg (mode, x, GET_MODE (x),
1176
                                subreg_highpart_offset (mode, GET_MODE (x)));
1177
  gcc_assert (result);
1178
 
1179
  /* simplify_gen_subreg is not guaranteed to return a valid operand for
1180
     the target if we have a MEM.  gen_highpart must return a valid operand,
1181
     emitting code if necessary to do so.  */
1182
  if (MEM_P (result))
1183
    {
1184
      result = validize_mem (result);
1185
      gcc_assert (result);
1186
    }
1187
 
1188
  return result;
1189
}
1190
 
1191
/* Like gen_highpart, but accept mode of EXP operand in case EXP can
1192
   be VOIDmode constant.  */
1193
rtx
1194
gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1195
{
1196
  if (GET_MODE (exp) != VOIDmode)
1197
    {
1198
      gcc_assert (GET_MODE (exp) == innermode);
1199
      return gen_highpart (outermode, exp);
1200
    }
1201
  return simplify_gen_subreg (outermode, exp, innermode,
1202
                              subreg_highpart_offset (outermode, innermode));
1203
}
1204
 
1205
/* Return offset in bytes to get OUTERMODE low part
1206
   of the value in mode INNERMODE stored in memory in target format.  */
1207
 
1208
unsigned int
1209
subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1210
{
1211
  unsigned int offset = 0;
1212
  int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1213
 
1214
  if (difference > 0)
1215
    {
1216
      if (WORDS_BIG_ENDIAN)
1217
        offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1218
      if (BYTES_BIG_ENDIAN)
1219
        offset += difference % UNITS_PER_WORD;
1220
    }
1221
 
1222
  return offset;
1223
}
1224
 
1225
/* Return offset in bytes to get OUTERMODE high part
1226
   of the value in mode INNERMODE stored in memory in target format.  */
1227
unsigned int
1228
subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1229
{
1230
  unsigned int offset = 0;
1231
  int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1232
 
1233
  gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1234
 
1235
  if (difference > 0)
1236
    {
1237
      if (! WORDS_BIG_ENDIAN)
1238
        offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1239
      if (! BYTES_BIG_ENDIAN)
1240
        offset += difference % UNITS_PER_WORD;
1241
    }
1242
 
1243
  return offset;
1244
}
1245
 
1246
/* Return 1 iff X, assumed to be a SUBREG,
1247
   refers to the least significant part of its containing reg.
1248
   If X is not a SUBREG, always return 1 (it is its own low part!).  */
1249
 
1250
int
1251
subreg_lowpart_p (rtx x)
1252
{
1253
  if (GET_CODE (x) != SUBREG)
1254
    return 1;
1255
  else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1256
    return 0;
1257
 
1258
  return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1259
          == SUBREG_BYTE (x));
1260
}
1261
 
1262
/* Return subword OFFSET of operand OP.
1263
   The word number, OFFSET, is interpreted as the word number starting
1264
   at the low-order address.  OFFSET 0 is the low-order word if not
1265
   WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1266
 
1267
   If we cannot extract the required word, we return zero.  Otherwise,
1268
   an rtx corresponding to the requested word will be returned.
1269
 
1270
   VALIDATE_ADDRESS is nonzero if the address should be validated.  Before
1271
   reload has completed, a valid address will always be returned.  After
1272
   reload, if a valid address cannot be returned, we return zero.
1273
 
1274
   If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1275
   it is the responsibility of the caller.
1276
 
1277
   MODE is the mode of OP in case it is a CONST_INT.
1278
 
1279
   ??? This is still rather broken for some cases.  The problem for the
1280
   moment is that all callers of this thing provide no 'goal mode' to
1281
   tell us to work with.  This exists because all callers were written
1282
   in a word based SUBREG world.
1283
   Now use of this function can be deprecated by simplify_subreg in most
1284
   cases.
1285
 */
1286
 
1287
rtx
1288
operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1289
{
1290
  if (mode == VOIDmode)
1291
    mode = GET_MODE (op);
1292
 
1293
  gcc_assert (mode != VOIDmode);
1294
 
1295
  /* If OP is narrower than a word, fail.  */
1296
  if (mode != BLKmode
1297
      && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1298
    return 0;
1299
 
1300
  /* If we want a word outside OP, return zero.  */
1301
  if (mode != BLKmode
1302
      && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1303
    return const0_rtx;
1304
 
1305
  /* Form a new MEM at the requested address.  */
1306
  if (MEM_P (op))
1307
    {
1308
      rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1309
 
1310
      if (! validate_address)
1311
        return new;
1312
 
1313
      else if (reload_completed)
1314
        {
1315
          if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1316
            return 0;
1317
        }
1318
      else
1319
        return replace_equiv_address (new, XEXP (new, 0));
1320
    }
1321
 
1322
  /* Rest can be handled by simplify_subreg.  */
1323
  return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1324
}
1325
 
1326
/* Similar to `operand_subword', but never return 0.  If we can't
1327
   extract the required subword, put OP into a register and try again.
1328
   The second attempt must succeed.  We always validate the address in
1329
   this case.
1330
 
1331
   MODE is the mode of OP, in case it is CONST_INT.  */
1332
 
1333
rtx
1334
operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1335
{
1336
  rtx result = operand_subword (op, offset, 1, mode);
1337
 
1338
  if (result)
1339
    return result;
1340
 
1341
  if (mode != BLKmode && mode != VOIDmode)
1342
    {
1343
      /* If this is a register which can not be accessed by words, copy it
1344
         to a pseudo register.  */
1345
      if (REG_P (op))
1346
        op = copy_to_reg (op);
1347
      else
1348
        op = force_reg (mode, op);
1349
    }
1350
 
1351
  result = operand_subword (op, offset, 1, mode);
1352
  gcc_assert (result);
1353
 
1354
  return result;
1355
}
1356
 
1357
/* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1358
   or (2) a component ref of something variable.  Represent the later with
1359
   a NULL expression.  */
1360
 
1361
static tree
1362
component_ref_for_mem_expr (tree ref)
1363
{
1364
  tree inner = TREE_OPERAND (ref, 0);
1365
 
1366
  if (TREE_CODE (inner) == COMPONENT_REF)
1367
    inner = component_ref_for_mem_expr (inner);
1368
  else
1369
    {
1370
      /* Now remove any conversions: they don't change what the underlying
1371
         object is.  Likewise for SAVE_EXPR.  */
1372
      while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1373
             || TREE_CODE (inner) == NON_LVALUE_EXPR
1374
             || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1375
             || TREE_CODE (inner) == SAVE_EXPR)
1376
        inner = TREE_OPERAND (inner, 0);
1377
 
1378
      if (! DECL_P (inner))
1379
        inner = NULL_TREE;
1380
    }
1381
 
1382
  if (inner == TREE_OPERAND (ref, 0))
1383
    return ref;
1384
  else
1385
    return build3 (COMPONENT_REF, TREE_TYPE (ref), inner,
1386
                   TREE_OPERAND (ref, 1), NULL_TREE);
1387
}
1388
 
1389
/* Returns 1 if both MEM_EXPR can be considered equal
1390
   and 0 otherwise.  */
1391
 
1392
int
1393
mem_expr_equal_p (tree expr1, tree expr2)
1394
{
1395
  if (expr1 == expr2)
1396
    return 1;
1397
 
1398
  if (! expr1 || ! expr2)
1399
    return 0;
1400
 
1401
  if (TREE_CODE (expr1) != TREE_CODE (expr2))
1402
    return 0;
1403
 
1404
  if (TREE_CODE (expr1) == COMPONENT_REF)
1405
    return
1406
      mem_expr_equal_p (TREE_OPERAND (expr1, 0),
1407
                        TREE_OPERAND (expr2, 0))
1408
      && mem_expr_equal_p (TREE_OPERAND (expr1, 1), /* field decl */
1409
                           TREE_OPERAND (expr2, 1));
1410
 
1411
  if (INDIRECT_REF_P (expr1))
1412
    return mem_expr_equal_p (TREE_OPERAND (expr1, 0),
1413
                             TREE_OPERAND (expr2, 0));
1414
 
1415
  /* ARRAY_REFs, ARRAY_RANGE_REFs and BIT_FIELD_REFs should already
1416
              have been resolved here.  */
1417
  gcc_assert (DECL_P (expr1));
1418
 
1419
  /* Decls with different pointers can't be equal.  */
1420
  return 0;
1421
}
1422
 
1423
/* Given REF, a MEM, and T, either the type of X or the expression
1424
   corresponding to REF, set the memory attributes.  OBJECTP is nonzero
1425
   if we are making a new object of this type.  BITPOS is nonzero if
1426
   there is an offset outstanding on T that will be applied later.  */
1427
 
1428
void
1429
set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1430
                                 HOST_WIDE_INT bitpos)
1431
{
1432
  HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
1433
  tree expr = MEM_EXPR (ref);
1434
  rtx offset = MEM_OFFSET (ref);
1435
  rtx size = MEM_SIZE (ref);
1436
  unsigned int align = MEM_ALIGN (ref);
1437
  HOST_WIDE_INT apply_bitpos = 0;
1438
  tree type;
1439
 
1440
  /* It can happen that type_for_mode was given a mode for which there
1441
     is no language-level type.  In which case it returns NULL, which
1442
     we can see here.  */
1443
  if (t == NULL_TREE)
1444
    return;
1445
 
1446
  type = TYPE_P (t) ? t : TREE_TYPE (t);
1447
  if (type == error_mark_node)
1448
    return;
1449
 
1450
  /* If we have already set DECL_RTL = ref, get_alias_set will get the
1451
     wrong answer, as it assumes that DECL_RTL already has the right alias
1452
     info.  Callers should not set DECL_RTL until after the call to
1453
     set_mem_attributes.  */
1454
  gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1455
 
1456
  /* Get the alias set from the expression or type (perhaps using a
1457
     front-end routine) and use it.  */
1458
  alias = get_alias_set (t);
1459
 
1460
  MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1461
  MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
1462
  MEM_POINTER (ref) = POINTER_TYPE_P (type);
1463
 
1464
  /* If we are making an object of this type, or if this is a DECL, we know
1465
     that it is a scalar if the type is not an aggregate.  */
1466
  if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
1467
    MEM_SCALAR_P (ref) = 1;
1468
 
1469
  /* We can set the alignment from the type if we are making an object,
1470
     this is an INDIRECT_REF, or if TYPE_ALIGN_OK.  */
1471
  if (objectp || TREE_CODE (t) == INDIRECT_REF
1472
      || TREE_CODE (t) == ALIGN_INDIRECT_REF
1473
      || TYPE_ALIGN_OK (type))
1474
    align = MAX (align, TYPE_ALIGN (type));
1475
  else
1476
    if (TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
1477
      {
1478
        if (integer_zerop (TREE_OPERAND (t, 1)))
1479
          /* We don't know anything about the alignment.  */
1480
          align = BITS_PER_UNIT;
1481
        else
1482
          align = tree_low_cst (TREE_OPERAND (t, 1), 1);
1483
      }
1484
 
1485
  /* If the size is known, we can set that.  */
1486
  if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1487
    size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1488
 
1489
  /* If T is not a type, we may be able to deduce some more information about
1490
     the expression.  */
1491
  if (! TYPE_P (t))
1492
    {
1493
      tree base;
1494
 
1495
      if (TREE_THIS_VOLATILE (t))
1496
        MEM_VOLATILE_P (ref) = 1;
1497
 
1498
      /* Now remove any conversions: they don't change what the underlying
1499
         object is.  Likewise for SAVE_EXPR.  */
1500
      while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
1501
             || TREE_CODE (t) == NON_LVALUE_EXPR
1502
             || TREE_CODE (t) == VIEW_CONVERT_EXPR
1503
             || TREE_CODE (t) == SAVE_EXPR)
1504
        t = TREE_OPERAND (t, 0);
1505
 
1506
      /* We may look through structure-like accesses for the purposes of
1507
         examining TREE_THIS_NOTRAP, but not array-like accesses.  */
1508
      base = t;
1509
      while (TREE_CODE (base) == COMPONENT_REF
1510
             || TREE_CODE (base) == REALPART_EXPR
1511
             || TREE_CODE (base) == IMAGPART_EXPR
1512
             || TREE_CODE (base) == BIT_FIELD_REF)
1513
        base = TREE_OPERAND (base, 0);
1514
 
1515
      if (DECL_P (base))
1516
        {
1517
          if (CODE_CONTAINS_STRUCT (TREE_CODE (base), TS_DECL_WITH_VIS))
1518
            MEM_NOTRAP_P (ref) = !DECL_WEAK (base);
1519
          else
1520
            MEM_NOTRAP_P (ref) = 1;
1521
        }
1522
      else
1523
        MEM_NOTRAP_P (ref) = TREE_THIS_NOTRAP (base);
1524
 
1525
      base = get_base_address (base);
1526
      if (base && DECL_P (base)
1527
          && TREE_READONLY (base)
1528
          && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
1529
        {
1530
          tree base_type = TREE_TYPE (base);
1531
          gcc_assert (!(base_type && TYPE_NEEDS_CONSTRUCTING (base_type))
1532
                      || DECL_ARTIFICIAL (base));
1533
          MEM_READONLY_P (ref) = 1;
1534
        }
1535
 
1536
      /* If this expression uses it's parent's alias set, mark it such
1537
         that we won't change it.  */
1538
      if (component_uses_parent_alias_set (t))
1539
        MEM_KEEP_ALIAS_SET_P (ref) = 1;
1540
 
1541
      /* If this is a decl, set the attributes of the MEM from it.  */
1542
      if (DECL_P (t))
1543
        {
1544
          expr = t;
1545
          offset = const0_rtx;
1546
          apply_bitpos = bitpos;
1547
          size = (DECL_SIZE_UNIT (t)
1548
                  && host_integerp (DECL_SIZE_UNIT (t), 1)
1549
                  ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1550
          align = DECL_ALIGN (t);
1551
        }
1552
 
1553
      /* If this is a constant, we know the alignment.  */
1554
      else if (CONSTANT_CLASS_P (t))
1555
        {
1556
          align = TYPE_ALIGN (type);
1557
#ifdef CONSTANT_ALIGNMENT
1558
          align = CONSTANT_ALIGNMENT (t, align);
1559
#endif
1560
        }
1561
 
1562
      /* If this is a field reference and not a bit-field, record it.  */
1563
      /* ??? There is some information that can be gleened from bit-fields,
1564
         such as the word offset in the structure that might be modified.
1565
         But skip it for now.  */
1566
      else if (TREE_CODE (t) == COMPONENT_REF
1567
               && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1568
        {
1569
          expr = component_ref_for_mem_expr (t);
1570
          offset = const0_rtx;
1571
          apply_bitpos = bitpos;
1572
          /* ??? Any reason the field size would be different than
1573
             the size we got from the type?  */
1574
        }
1575
 
1576
      /* If this is an array reference, look for an outer field reference.  */
1577
      else if (TREE_CODE (t) == ARRAY_REF)
1578
        {
1579
          tree off_tree = size_zero_node;
1580
          /* We can't modify t, because we use it at the end of the
1581
             function.  */
1582
          tree t2 = t;
1583
 
1584
          do
1585
            {
1586
              tree index = TREE_OPERAND (t2, 1);
1587
              tree low_bound = array_ref_low_bound (t2);
1588
              tree unit_size = array_ref_element_size (t2);
1589
 
1590
              /* We assume all arrays have sizes that are a multiple of a byte.
1591
                 First subtract the lower bound, if any, in the type of the
1592
                 index, then convert to sizetype and multiply by the size of
1593
                 the array element.  */
1594
              if (! integer_zerop (low_bound))
1595
                index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1596
                                     index, low_bound);
1597
 
1598
              off_tree = size_binop (PLUS_EXPR,
1599
                                     size_binop (MULT_EXPR,
1600
                                                 fold_convert (sizetype,
1601
                                                               index),
1602
                                                 unit_size),
1603
                                     off_tree);
1604
              t2 = TREE_OPERAND (t2, 0);
1605
            }
1606
          while (TREE_CODE (t2) == ARRAY_REF);
1607
 
1608
          if (DECL_P (t2))
1609
            {
1610
              expr = t2;
1611
              offset = NULL;
1612
              if (host_integerp (off_tree, 1))
1613
                {
1614
                  HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1615
                  HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1616
                  align = DECL_ALIGN (t2);
1617
                  if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
1618
                    align = aoff;
1619
                  offset = GEN_INT (ioff);
1620
                  apply_bitpos = bitpos;
1621
                }
1622
            }
1623
          else if (TREE_CODE (t2) == COMPONENT_REF)
1624
            {
1625
              expr = component_ref_for_mem_expr (t2);
1626
              if (host_integerp (off_tree, 1))
1627
                {
1628
                  offset = GEN_INT (tree_low_cst (off_tree, 1));
1629
                  apply_bitpos = bitpos;
1630
                }
1631
              /* ??? Any reason the field size would be different than
1632
                 the size we got from the type?  */
1633
            }
1634
          else if (flag_argument_noalias > 1
1635
                   && (INDIRECT_REF_P (t2))
1636
                   && TREE_CODE (TREE_OPERAND (t2, 0)) == PARM_DECL)
1637
            {
1638
              expr = t2;
1639
              offset = NULL;
1640
            }
1641
        }
1642
 
1643
      /* If this is a Fortran indirect argument reference, record the
1644
         parameter decl.  */
1645
      else if (flag_argument_noalias > 1
1646
               && (INDIRECT_REF_P (t))
1647
               && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
1648
        {
1649
          expr = t;
1650
          offset = NULL;
1651
        }
1652
    }
1653
 
1654
  /* If we modified OFFSET based on T, then subtract the outstanding
1655
     bit position offset.  Similarly, increase the size of the accessed
1656
     object to contain the negative offset.  */
1657
  if (apply_bitpos)
1658
    {
1659
      offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
1660
      if (size)
1661
        size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
1662
    }
1663
 
1664
  if (TREE_CODE (t) == ALIGN_INDIRECT_REF)
1665
    {
1666
      /* Force EXPR and OFFSE to NULL, since we don't know exactly what
1667
         we're overlapping.  */
1668
      offset = NULL;
1669
      expr = NULL;
1670
    }
1671
 
1672
  /* Now set the attributes we computed above.  */
1673
  MEM_ATTRS (ref)
1674
    = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
1675
 
1676
  /* If this is already known to be a scalar or aggregate, we are done.  */
1677
  if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1678
    return;
1679
 
1680
  /* If it is a reference into an aggregate, this is part of an aggregate.
1681
     Otherwise we don't know.  */
1682
  else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1683
           || TREE_CODE (t) == ARRAY_RANGE_REF
1684
           || TREE_CODE (t) == BIT_FIELD_REF)
1685
    MEM_IN_STRUCT_P (ref) = 1;
1686
}
1687
 
1688
void
1689
set_mem_attributes (rtx ref, tree t, int objectp)
1690
{
1691
  set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1692
}
1693
 
1694
/* Set the decl for MEM to DECL.  */
1695
 
1696
void
1697
set_mem_attrs_from_reg (rtx mem, rtx reg)
1698
{
1699
  MEM_ATTRS (mem)
1700
    = get_mem_attrs (MEM_ALIAS_SET (mem), REG_EXPR (reg),
1701
                     GEN_INT (REG_OFFSET (reg)),
1702
                     MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1703
}
1704
 
1705
/* Set the alias set of MEM to SET.  */
1706
 
1707
void
1708
set_mem_alias_set (rtx mem, HOST_WIDE_INT set)
1709
{
1710
#ifdef ENABLE_CHECKING
1711
  /* If the new and old alias sets don't conflict, something is wrong.  */
1712
  gcc_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1713
#endif
1714
 
1715
  MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
1716
                                   MEM_SIZE (mem), MEM_ALIGN (mem),
1717
                                   GET_MODE (mem));
1718
}
1719
 
1720
/* Set the alignment of MEM to ALIGN bits.  */
1721
 
1722
void
1723
set_mem_align (rtx mem, unsigned int align)
1724
{
1725
  MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1726
                                   MEM_OFFSET (mem), MEM_SIZE (mem), align,
1727
                                   GET_MODE (mem));
1728
}
1729
 
1730
/* Set the expr for MEM to EXPR.  */
1731
 
1732
void
1733
set_mem_expr (rtx mem, tree expr)
1734
{
1735
  MEM_ATTRS (mem)
1736
    = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1737
                     MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1738
}
1739
 
1740
/* Set the offset of MEM to OFFSET.  */
1741
 
1742
void
1743
set_mem_offset (rtx mem, rtx offset)
1744
{
1745
  MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1746
                                   offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1747
                                   GET_MODE (mem));
1748
}
1749
 
1750
/* Set the size of MEM to SIZE.  */
1751
 
1752
void
1753
set_mem_size (rtx mem, rtx size)
1754
{
1755
  MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1756
                                   MEM_OFFSET (mem), size, MEM_ALIGN (mem),
1757
                                   GET_MODE (mem));
1758
}
1759
 
1760
/* Return a memory reference like MEMREF, but with its mode changed to MODE
1761
   and its address changed to ADDR.  (VOIDmode means don't change the mode.
1762
   NULL for ADDR means don't change the address.)  VALIDATE is nonzero if the
1763
   returned memory location is required to be valid.  The memory
1764
   attributes are not changed.  */
1765
 
1766
static rtx
1767
change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
1768
{
1769
  rtx new;
1770
 
1771
  gcc_assert (MEM_P (memref));
1772
  if (mode == VOIDmode)
1773
    mode = GET_MODE (memref);
1774
  if (addr == 0)
1775
    addr = XEXP (memref, 0);
1776
  if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1777
      && (!validate || memory_address_p (mode, addr)))
1778
    return memref;
1779
 
1780
  if (validate)
1781
    {
1782
      if (reload_in_progress || reload_completed)
1783
        gcc_assert (memory_address_p (mode, addr));
1784
      else
1785
        addr = memory_address (mode, addr);
1786
    }
1787
 
1788
  if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1789
    return memref;
1790
 
1791
  new = gen_rtx_MEM (mode, addr);
1792
  MEM_COPY_ATTRIBUTES (new, memref);
1793
  return new;
1794
}
1795
 
1796
/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1797
   way we are changing MEMREF, so we only preserve the alias set.  */
1798
 
1799
rtx
1800
change_address (rtx memref, enum machine_mode mode, rtx addr)
1801
{
1802
  rtx new = change_address_1 (memref, mode, addr, 1), size;
1803
  enum machine_mode mmode = GET_MODE (new);
1804
  unsigned int align;
1805
 
1806
  size = mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode));
1807
  align = mmode == BLKmode ? BITS_PER_UNIT : GET_MODE_ALIGNMENT (mmode);
1808
 
1809
  /* If there are no changes, just return the original memory reference.  */
1810
  if (new == memref)
1811
    {
1812
      if (MEM_ATTRS (memref) == 0
1813
          || (MEM_EXPR (memref) == NULL
1814
              && MEM_OFFSET (memref) == NULL
1815
              && MEM_SIZE (memref) == size
1816
              && MEM_ALIGN (memref) == align))
1817
        return new;
1818
 
1819
      new = gen_rtx_MEM (mmode, XEXP (memref, 0));
1820
      MEM_COPY_ATTRIBUTES (new, memref);
1821
    }
1822
 
1823
  MEM_ATTRS (new)
1824
    = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0, size, align, mmode);
1825
 
1826
  return new;
1827
}
1828
 
1829
/* Return a memory reference like MEMREF, but with its mode changed
1830
   to MODE and its address offset by OFFSET bytes.  If VALIDATE is
1831
   nonzero, the memory address is forced to be valid.
1832
   If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1833
   and caller is responsible for adjusting MEMREF base register.  */
1834
 
1835
rtx
1836
adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
1837
                  int validate, int adjust)
1838
{
1839
  rtx addr = XEXP (memref, 0);
1840
  rtx new;
1841
  rtx memoffset = MEM_OFFSET (memref);
1842
  rtx size = 0;
1843
  unsigned int memalign = MEM_ALIGN (memref);
1844
 
1845
  /* If there are no changes, just return the original memory reference.  */
1846
  if (mode == GET_MODE (memref) && !offset
1847
      && (!validate || memory_address_p (mode, addr)))
1848
    return memref;
1849
 
1850
  /* ??? Prefer to create garbage instead of creating shared rtl.
1851
     This may happen even if offset is nonzero -- consider
1852
     (plus (plus reg reg) const_int) -- so do this always.  */
1853
  addr = copy_rtx (addr);
1854
 
1855
  if (adjust)
1856
    {
1857
      /* If MEMREF is a LO_SUM and the offset is within the alignment of the
1858
         object, we can merge it into the LO_SUM.  */
1859
      if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
1860
          && offset >= 0
1861
          && (unsigned HOST_WIDE_INT) offset
1862
              < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
1863
        addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
1864
                               plus_constant (XEXP (addr, 1), offset));
1865
      else
1866
        addr = plus_constant (addr, offset);
1867
    }
1868
 
1869
  new = change_address_1 (memref, mode, addr, validate);
1870
 
1871
  /* Compute the new values of the memory attributes due to this adjustment.
1872
     We add the offsets and update the alignment.  */
1873
  if (memoffset)
1874
    memoffset = GEN_INT (offset + INTVAL (memoffset));
1875
 
1876
  /* Compute the new alignment by taking the MIN of the alignment and the
1877
     lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
1878
     if zero.  */
1879
  if (offset != 0)
1880
    memalign
1881
      = MIN (memalign,
1882
             (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
1883
 
1884
  /* We can compute the size in a number of ways.  */
1885
  if (GET_MODE (new) != BLKmode)
1886
    size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
1887
  else if (MEM_SIZE (memref))
1888
    size = plus_constant (MEM_SIZE (memref), -offset);
1889
 
1890
  MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
1891
                                   memoffset, size, memalign, GET_MODE (new));
1892
 
1893
  /* At some point, we should validate that this offset is within the object,
1894
     if all the appropriate values are known.  */
1895
  return new;
1896
}
1897
 
1898
/* Return a memory reference like MEMREF, but with its mode changed
1899
   to MODE and its address changed to ADDR, which is assumed to be
1900
   MEMREF offseted by OFFSET bytes.  If VALIDATE is
1901
   nonzero, the memory address is forced to be valid.  */
1902
 
1903
rtx
1904
adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
1905
                             HOST_WIDE_INT offset, int validate)
1906
{
1907
  memref = change_address_1 (memref, VOIDmode, addr, validate);
1908
  return adjust_address_1 (memref, mode, offset, validate, 0);
1909
}
1910
 
1911
/* Return a memory reference like MEMREF, but whose address is changed by
1912
   adding OFFSET, an RTX, to it.  POW2 is the highest power of two factor
1913
   known to be in OFFSET (possibly 1).  */
1914
 
1915
rtx
1916
offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
1917
{
1918
  rtx new, addr = XEXP (memref, 0);
1919
 
1920
  new = simplify_gen_binary (PLUS, Pmode, addr, offset);
1921
 
1922
  /* At this point we don't know _why_ the address is invalid.  It
1923
     could have secondary memory references, multiplies or anything.
1924
 
1925
     However, if we did go and rearrange things, we can wind up not
1926
     being able to recognize the magic around pic_offset_table_rtx.
1927
     This stuff is fragile, and is yet another example of why it is
1928
     bad to expose PIC machinery too early.  */
1929
  if (! memory_address_p (GET_MODE (memref), new)
1930
      && GET_CODE (addr) == PLUS
1931
      && XEXP (addr, 0) == pic_offset_table_rtx)
1932
    {
1933
      addr = force_reg (GET_MODE (addr), addr);
1934
      new = simplify_gen_binary (PLUS, Pmode, addr, offset);
1935
    }
1936
 
1937
  update_temp_slot_address (XEXP (memref, 0), new);
1938
  new = change_address_1 (memref, VOIDmode, new, 1);
1939
 
1940
  /* If there are no changes, just return the original memory reference.  */
1941
  if (new == memref)
1942
    return new;
1943
 
1944
  /* Update the alignment to reflect the offset.  Reset the offset, which
1945
     we don't know.  */
1946
  MEM_ATTRS (new)
1947
    = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
1948
                     MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
1949
                     GET_MODE (new));
1950
  return new;
1951
}
1952
 
1953
/* Return a memory reference like MEMREF, but with its address changed to
1954
   ADDR.  The caller is asserting that the actual piece of memory pointed
1955
   to is the same, just the form of the address is being changed, such as
1956
   by putting something into a register.  */
1957
 
1958
rtx
1959
replace_equiv_address (rtx memref, rtx addr)
1960
{
1961
  /* change_address_1 copies the memory attribute structure without change
1962
     and that's exactly what we want here.  */
1963
  update_temp_slot_address (XEXP (memref, 0), addr);
1964
  return change_address_1 (memref, VOIDmode, addr, 1);
1965
}
1966
 
1967
/* Likewise, but the reference is not required to be valid.  */
1968
 
1969
rtx
1970
replace_equiv_address_nv (rtx memref, rtx addr)
1971
{
1972
  return change_address_1 (memref, VOIDmode, addr, 0);
1973
}
1974
 
1975
/* Return a memory reference like MEMREF, but with its mode widened to
1976
   MODE and offset by OFFSET.  This would be used by targets that e.g.
1977
   cannot issue QImode memory operations and have to use SImode memory
1978
   operations plus masking logic.  */
1979
 
1980
rtx
1981
widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
1982
{
1983
  rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
1984
  tree expr = MEM_EXPR (new);
1985
  rtx memoffset = MEM_OFFSET (new);
1986
  unsigned int size = GET_MODE_SIZE (mode);
1987
 
1988
  /* If there are no changes, just return the original memory reference.  */
1989
  if (new == memref)
1990
    return new;
1991
 
1992
  /* If we don't know what offset we were at within the expression, then
1993
     we can't know if we've overstepped the bounds.  */
1994
  if (! memoffset)
1995
    expr = NULL_TREE;
1996
 
1997
  while (expr)
1998
    {
1999
      if (TREE_CODE (expr) == COMPONENT_REF)
2000
        {
2001
          tree field = TREE_OPERAND (expr, 1);
2002
          tree offset = component_ref_field_offset (expr);
2003
 
2004
          if (! DECL_SIZE_UNIT (field))
2005
            {
2006
              expr = NULL_TREE;
2007
              break;
2008
            }
2009
 
2010
          /* Is the field at least as large as the access?  If so, ok,
2011
             otherwise strip back to the containing structure.  */
2012
          if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2013
              && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2014
              && INTVAL (memoffset) >= 0)
2015
            break;
2016
 
2017
          if (! host_integerp (offset, 1))
2018
            {
2019
              expr = NULL_TREE;
2020
              break;
2021
            }
2022
 
2023
          expr = TREE_OPERAND (expr, 0);
2024
          memoffset
2025
            = (GEN_INT (INTVAL (memoffset)
2026
                        + tree_low_cst (offset, 1)
2027
                        + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2028
                           / BITS_PER_UNIT)));
2029
        }
2030
      /* Similarly for the decl.  */
2031
      else if (DECL_P (expr)
2032
               && DECL_SIZE_UNIT (expr)
2033
               && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2034
               && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2035
               && (! memoffset || INTVAL (memoffset) >= 0))
2036
        break;
2037
      else
2038
        {
2039
          /* The widened memory access overflows the expression, which means
2040
             that it could alias another expression.  Zap it.  */
2041
          expr = NULL_TREE;
2042
          break;
2043
        }
2044
    }
2045
 
2046
  if (! expr)
2047
    memoffset = NULL_RTX;
2048
 
2049
  /* The widened memory may alias other stuff, so zap the alias set.  */
2050
  /* ??? Maybe use get_alias_set on any remaining expression.  */
2051
 
2052
  MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2053
                                   MEM_ALIGN (new), mode);
2054
 
2055
  return new;
2056
}
2057
 
2058
/* Return a newly created CODE_LABEL rtx with a unique label number.  */
2059
 
2060
rtx
2061
gen_label_rtx (void)
2062
{
2063
  return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2064
                             NULL, label_num++, NULL);
2065
}
2066
 
2067
/* For procedure integration.  */
2068
 
2069
/* Install new pointers to the first and last insns in the chain.
2070
   Also, set cur_insn_uid to one higher than the last in use.
2071
   Used for an inline-procedure after copying the insn chain.  */
2072
 
2073
void
2074
set_new_first_and_last_insn (rtx first, rtx last)
2075
{
2076
  rtx insn;
2077
 
2078
  first_insn = first;
2079
  last_insn = last;
2080
  cur_insn_uid = 0;
2081
 
2082
  for (insn = first; insn; insn = NEXT_INSN (insn))
2083
    cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2084
 
2085
  cur_insn_uid++;
2086
}
2087
 
2088
/* Go through all the RTL insn bodies and copy any invalid shared
2089
   structure.  This routine should only be called once.  */
2090
 
2091
static void
2092
unshare_all_rtl_1 (tree fndecl, rtx insn)
2093
{
2094
  tree decl;
2095
 
2096
  /* Make sure that virtual parameters are not shared.  */
2097
  for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2098
    SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2099
 
2100
  /* Make sure that virtual stack slots are not shared.  */
2101
  unshare_all_decls (DECL_INITIAL (fndecl));
2102
 
2103
  /* Unshare just about everything else.  */
2104
  unshare_all_rtl_in_chain (insn);
2105
 
2106
  /* Make sure the addresses of stack slots found outside the insn chain
2107
     (such as, in DECL_RTL of a variable) are not shared
2108
     with the insn chain.
2109
 
2110
     This special care is necessary when the stack slot MEM does not
2111
     actually appear in the insn chain.  If it does appear, its address
2112
     is unshared from all else at that point.  */
2113
  stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2114
}
2115
 
2116
/* Go through all the RTL insn bodies and copy any invalid shared
2117
   structure, again.  This is a fairly expensive thing to do so it
2118
   should be done sparingly.  */
2119
 
2120
void
2121
unshare_all_rtl_again (rtx insn)
2122
{
2123
  rtx p;
2124
  tree decl;
2125
 
2126
  for (p = insn; p; p = NEXT_INSN (p))
2127
    if (INSN_P (p))
2128
      {
2129
        reset_used_flags (PATTERN (p));
2130
        reset_used_flags (REG_NOTES (p));
2131
        reset_used_flags (LOG_LINKS (p));
2132
      }
2133
 
2134
  /* Make sure that virtual stack slots are not shared.  */
2135
  reset_used_decls (DECL_INITIAL (cfun->decl));
2136
 
2137
  /* Make sure that virtual parameters are not shared.  */
2138
  for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2139
    reset_used_flags (DECL_RTL (decl));
2140
 
2141
  reset_used_flags (stack_slot_list);
2142
 
2143
  unshare_all_rtl_1 (cfun->decl, insn);
2144
}
2145
 
2146
unsigned int
2147
unshare_all_rtl (void)
2148
{
2149
  unshare_all_rtl_1 (current_function_decl, get_insns ());
2150
  return 0;
2151
}
2152
 
2153
struct tree_opt_pass pass_unshare_all_rtl =
2154
{
2155
  "unshare",                            /* name */
2156
  NULL,                                 /* gate */
2157
  unshare_all_rtl,                      /* execute */
2158
  NULL,                                 /* sub */
2159
  NULL,                                 /* next */
2160
  0,                                    /* static_pass_number */
2161
  0,                                    /* tv_id */
2162
  0,                                    /* properties_required */
2163
  0,                                    /* properties_provided */
2164
  0,                                    /* properties_destroyed */
2165
  0,                                    /* todo_flags_start */
2166
  TODO_dump_func,                       /* todo_flags_finish */
2167
 
2168
};
2169
 
2170
 
2171
/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2172
   Recursively does the same for subexpressions.  */
2173
 
2174
static void
2175
verify_rtx_sharing (rtx orig, rtx insn)
2176
{
2177
  rtx x = orig;
2178
  int i;
2179
  enum rtx_code code;
2180
  const char *format_ptr;
2181
 
2182
  if (x == 0)
2183
    return;
2184
 
2185
  code = GET_CODE (x);
2186
 
2187
  /* These types may be freely shared.  */
2188
 
2189
  switch (code)
2190
    {
2191
    case REG:
2192
    case CONST_INT:
2193
    case CONST_DOUBLE:
2194
    case CONST_VECTOR:
2195
    case SYMBOL_REF:
2196
    case LABEL_REF:
2197
    case CODE_LABEL:
2198
    case PC:
2199
    case CC0:
2200
    case SCRATCH:
2201
      return;
2202
      /* SCRATCH must be shared because they represent distinct values.  */
2203
    case CLOBBER:
2204
      if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2205
        return;
2206
      break;
2207
 
2208
    case CONST:
2209
      /* CONST can be shared if it contains a SYMBOL_REF.  If it contains
2210
         a LABEL_REF, it isn't sharable.  */
2211
      if (GET_CODE (XEXP (x, 0)) == PLUS
2212
          && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2213
          && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2214
        return;
2215
      break;
2216
 
2217
    case MEM:
2218
      /* A MEM is allowed to be shared if its address is constant.  */
2219
      if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2220
          || reload_completed || reload_in_progress)
2221
        return;
2222
 
2223
      break;
2224
 
2225
    default:
2226
      break;
2227
    }
2228
 
2229
  /* This rtx may not be shared.  If it has already been seen,
2230
     replace it with a copy of itself.  */
2231
#ifdef ENABLE_CHECKING
2232
  if (RTX_FLAG (x, used))
2233
    {
2234
      error ("invalid rtl sharing found in the insn");
2235
      debug_rtx (insn);
2236
      error ("shared rtx");
2237
      debug_rtx (x);
2238
      internal_error ("internal consistency failure");
2239
    }
2240
#endif
2241
  gcc_assert (!RTX_FLAG (x, used));
2242
 
2243
  RTX_FLAG (x, used) = 1;
2244
 
2245
  /* Now scan the subexpressions recursively.  */
2246
 
2247
  format_ptr = GET_RTX_FORMAT (code);
2248
 
2249
  for (i = 0; i < GET_RTX_LENGTH (code); i++)
2250
    {
2251
      switch (*format_ptr++)
2252
        {
2253
        case 'e':
2254
          verify_rtx_sharing (XEXP (x, i), insn);
2255
          break;
2256
 
2257
        case 'E':
2258
          if (XVEC (x, i) != NULL)
2259
            {
2260
              int j;
2261
              int len = XVECLEN (x, i);
2262
 
2263
              for (j = 0; j < len; j++)
2264
                {
2265
                  /* We allow sharing of ASM_OPERANDS inside single
2266
                     instruction.  */
2267
                  if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2268
                      && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2269
                          == ASM_OPERANDS))
2270
                    verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2271
                  else
2272
                    verify_rtx_sharing (XVECEXP (x, i, j), insn);
2273
                }
2274
            }
2275
          break;
2276
        }
2277
    }
2278
  return;
2279
}
2280
 
2281
/* Go through all the RTL insn bodies and check that there is no unexpected
2282
   sharing in between the subexpressions.  */
2283
 
2284
void
2285
verify_rtl_sharing (void)
2286
{
2287
  rtx p;
2288
 
2289
  for (p = get_insns (); p; p = NEXT_INSN (p))
2290
    if (INSN_P (p))
2291
      {
2292
        reset_used_flags (PATTERN (p));
2293
        reset_used_flags (REG_NOTES (p));
2294
        reset_used_flags (LOG_LINKS (p));
2295
      }
2296
 
2297
  for (p = get_insns (); p; p = NEXT_INSN (p))
2298
    if (INSN_P (p))
2299
      {
2300
        verify_rtx_sharing (PATTERN (p), p);
2301
        verify_rtx_sharing (REG_NOTES (p), p);
2302
        verify_rtx_sharing (LOG_LINKS (p), p);
2303
      }
2304
}
2305
 
2306
/* Go through all the RTL insn bodies and copy any invalid shared structure.
2307
   Assumes the mark bits are cleared at entry.  */
2308
 
2309
void
2310
unshare_all_rtl_in_chain (rtx insn)
2311
{
2312
  for (; insn; insn = NEXT_INSN (insn))
2313
    if (INSN_P (insn))
2314
      {
2315
        PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2316
        REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2317
        LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2318
      }
2319
}
2320
 
2321
/* Go through all virtual stack slots of a function and copy any
2322
   shared structure.  */
2323
static void
2324
unshare_all_decls (tree blk)
2325
{
2326
  tree t;
2327
 
2328
  /* Copy shared decls.  */
2329
  for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2330
    if (DECL_RTL_SET_P (t))
2331
      SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
2332
 
2333
  /* Now process sub-blocks.  */
2334
  for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2335
    unshare_all_decls (t);
2336
}
2337
 
2338
/* Go through all virtual stack slots of a function and mark them as
2339
   not shared.  */
2340
static void
2341
reset_used_decls (tree blk)
2342
{
2343
  tree t;
2344
 
2345
  /* Mark decls.  */
2346
  for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2347
    if (DECL_RTL_SET_P (t))
2348
      reset_used_flags (DECL_RTL (t));
2349
 
2350
  /* Now process sub-blocks.  */
2351
  for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2352
    reset_used_decls (t);
2353
}
2354
 
2355
/* Mark ORIG as in use, and return a copy of it if it was already in use.
2356
   Recursively does the same for subexpressions.  Uses
2357
   copy_rtx_if_shared_1 to reduce stack space.  */
2358
 
2359
rtx
2360
copy_rtx_if_shared (rtx orig)
2361
{
2362
  copy_rtx_if_shared_1 (&orig);
2363
  return orig;
2364
}
2365
 
2366
/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2367
   use.  Recursively does the same for subexpressions.  */
2368
 
2369
static void
2370
copy_rtx_if_shared_1 (rtx *orig1)
2371
{
2372
  rtx x;
2373
  int i;
2374
  enum rtx_code code;
2375
  rtx *last_ptr;
2376
  const char *format_ptr;
2377
  int copied = 0;
2378
  int length;
2379
 
2380
  /* Repeat is used to turn tail-recursion into iteration.  */
2381
repeat:
2382
  x = *orig1;
2383
 
2384
  if (x == 0)
2385
    return;
2386
 
2387
  code = GET_CODE (x);
2388
 
2389
  /* These types may be freely shared.  */
2390
 
2391
  switch (code)
2392
    {
2393
    case REG:
2394
    case CONST_INT:
2395
    case CONST_DOUBLE:
2396
    case CONST_VECTOR:
2397
    case SYMBOL_REF:
2398
    case LABEL_REF:
2399
    case CODE_LABEL:
2400
    case PC:
2401
    case CC0:
2402
    case SCRATCH:
2403
      /* SCRATCH must be shared because they represent distinct values.  */
2404
      return;
2405
    case CLOBBER:
2406
      if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2407
        return;
2408
      break;
2409
 
2410
    case CONST:
2411
      /* CONST can be shared if it contains a SYMBOL_REF.  If it contains
2412
         a LABEL_REF, it isn't sharable.  */
2413
      if (GET_CODE (XEXP (x, 0)) == PLUS
2414
          && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2415
          && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2416
        return;
2417
      break;
2418
 
2419
    case INSN:
2420
    case JUMP_INSN:
2421
    case CALL_INSN:
2422
    case NOTE:
2423
    case BARRIER:
2424
      /* The chain of insns is not being copied.  */
2425
      return;
2426
 
2427
    default:
2428
      break;
2429
    }
2430
 
2431
  /* This rtx may not be shared.  If it has already been seen,
2432
     replace it with a copy of itself.  */
2433
 
2434
  if (RTX_FLAG (x, used))
2435
    {
2436
      x = shallow_copy_rtx (x);
2437
      copied = 1;
2438
    }
2439
  RTX_FLAG (x, used) = 1;
2440
 
2441
  /* Now scan the subexpressions recursively.
2442
     We can store any replaced subexpressions directly into X
2443
     since we know X is not shared!  Any vectors in X
2444
     must be copied if X was copied.  */
2445
 
2446
  format_ptr = GET_RTX_FORMAT (code);
2447
  length = GET_RTX_LENGTH (code);
2448
  last_ptr = NULL;
2449
 
2450
  for (i = 0; i < length; i++)
2451
    {
2452
      switch (*format_ptr++)
2453
        {
2454
        case 'e':
2455
          if (last_ptr)
2456
            copy_rtx_if_shared_1 (last_ptr);
2457
          last_ptr = &XEXP (x, i);
2458
          break;
2459
 
2460
        case 'E':
2461
          if (XVEC (x, i) != NULL)
2462
            {
2463
              int j;
2464
              int len = XVECLEN (x, i);
2465
 
2466
              /* Copy the vector iff I copied the rtx and the length
2467
                 is nonzero.  */
2468
              if (copied && len > 0)
2469
                XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2470
 
2471
              /* Call recursively on all inside the vector.  */
2472
              for (j = 0; j < len; j++)
2473
                {
2474
                  if (last_ptr)
2475
                    copy_rtx_if_shared_1 (last_ptr);
2476
                  last_ptr = &XVECEXP (x, i, j);
2477
                }
2478
            }
2479
          break;
2480
        }
2481
    }
2482
  *orig1 = x;
2483
  if (last_ptr)
2484
    {
2485
      orig1 = last_ptr;
2486
      goto repeat;
2487
    }
2488
  return;
2489
}
2490
 
2491
/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2492
   to look for shared sub-parts.  */
2493
 
2494
void
2495
reset_used_flags (rtx x)
2496
{
2497
  int i, j;
2498
  enum rtx_code code;
2499
  const char *format_ptr;
2500
  int length;
2501
 
2502
  /* Repeat is used to turn tail-recursion into iteration.  */
2503
repeat:
2504
  if (x == 0)
2505
    return;
2506
 
2507
  code = GET_CODE (x);
2508
 
2509
  /* These types may be freely shared so we needn't do any resetting
2510
     for them.  */
2511
 
2512
  switch (code)
2513
    {
2514
    case REG:
2515
    case CONST_INT:
2516
    case CONST_DOUBLE:
2517
    case CONST_VECTOR:
2518
    case SYMBOL_REF:
2519
    case CODE_LABEL:
2520
    case PC:
2521
    case CC0:
2522
      return;
2523
 
2524
    case INSN:
2525
    case JUMP_INSN:
2526
    case CALL_INSN:
2527
    case NOTE:
2528
    case LABEL_REF:
2529
    case BARRIER:
2530
      /* The chain of insns is not being copied.  */
2531
      return;
2532
 
2533
    default:
2534
      break;
2535
    }
2536
 
2537
  RTX_FLAG (x, used) = 0;
2538
 
2539
  format_ptr = GET_RTX_FORMAT (code);
2540
  length = GET_RTX_LENGTH (code);
2541
 
2542
  for (i = 0; i < length; i++)
2543
    {
2544
      switch (*format_ptr++)
2545
        {
2546
        case 'e':
2547
          if (i == length-1)
2548
            {
2549
              x = XEXP (x, i);
2550
              goto repeat;
2551
            }
2552
          reset_used_flags (XEXP (x, i));
2553
          break;
2554
 
2555
        case 'E':
2556
          for (j = 0; j < XVECLEN (x, i); j++)
2557
            reset_used_flags (XVECEXP (x, i, j));
2558
          break;
2559
        }
2560
    }
2561
}
2562
 
2563
/* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2564
   to look for shared sub-parts.  */
2565
 
2566
void
2567
set_used_flags (rtx x)
2568
{
2569
  int i, j;
2570
  enum rtx_code code;
2571
  const char *format_ptr;
2572
 
2573
  if (x == 0)
2574
    return;
2575
 
2576
  code = GET_CODE (x);
2577
 
2578
  /* These types may be freely shared so we needn't do any resetting
2579
     for them.  */
2580
 
2581
  switch (code)
2582
    {
2583
    case REG:
2584
    case CONST_INT:
2585
    case CONST_DOUBLE:
2586
    case CONST_VECTOR:
2587
    case SYMBOL_REF:
2588
    case CODE_LABEL:
2589
    case PC:
2590
    case CC0:
2591
      return;
2592
 
2593
    case INSN:
2594
    case JUMP_INSN:
2595
    case CALL_INSN:
2596
    case NOTE:
2597
    case LABEL_REF:
2598
    case BARRIER:
2599
      /* The chain of insns is not being copied.  */
2600
      return;
2601
 
2602
    default:
2603
      break;
2604
    }
2605
 
2606
  RTX_FLAG (x, used) = 1;
2607
 
2608
  format_ptr = GET_RTX_FORMAT (code);
2609
  for (i = 0; i < GET_RTX_LENGTH (code); i++)
2610
    {
2611
      switch (*format_ptr++)
2612
        {
2613
        case 'e':
2614
          set_used_flags (XEXP (x, i));
2615
          break;
2616
 
2617
        case 'E':
2618
          for (j = 0; j < XVECLEN (x, i); j++)
2619
            set_used_flags (XVECEXP (x, i, j));
2620
          break;
2621
        }
2622
    }
2623
}
2624
 
2625
/* Copy X if necessary so that it won't be altered by changes in OTHER.
2626
   Return X or the rtx for the pseudo reg the value of X was copied into.
2627
   OTHER must be valid as a SET_DEST.  */
2628
 
2629
rtx
2630
make_safe_from (rtx x, rtx other)
2631
{
2632
  while (1)
2633
    switch (GET_CODE (other))
2634
      {
2635
      case SUBREG:
2636
        other = SUBREG_REG (other);
2637
        break;
2638
      case STRICT_LOW_PART:
2639
      case SIGN_EXTEND:
2640
      case ZERO_EXTEND:
2641
        other = XEXP (other, 0);
2642
        break;
2643
      default:
2644
        goto done;
2645
      }
2646
 done:
2647
  if ((MEM_P (other)
2648
       && ! CONSTANT_P (x)
2649
       && !REG_P (x)
2650
       && GET_CODE (x) != SUBREG)
2651
      || (REG_P (other)
2652
          && (REGNO (other) < FIRST_PSEUDO_REGISTER
2653
              || reg_mentioned_p (other, x))))
2654
    {
2655
      rtx temp = gen_reg_rtx (GET_MODE (x));
2656
      emit_move_insn (temp, x);
2657
      return temp;
2658
    }
2659
  return x;
2660
}
2661
 
2662
/* Emission of insns (adding them to the doubly-linked list).  */
2663
 
2664
/* Return the first insn of the current sequence or current function.  */
2665
 
2666
rtx
2667
get_insns (void)
2668
{
2669
  return first_insn;
2670
}
2671
 
2672
/* Specify a new insn as the first in the chain.  */
2673
 
2674
void
2675
set_first_insn (rtx insn)
2676
{
2677
  gcc_assert (!PREV_INSN (insn));
2678
  first_insn = insn;
2679
}
2680
 
2681
/* Return the last insn emitted in current sequence or current function.  */
2682
 
2683
rtx
2684
get_last_insn (void)
2685
{
2686
  return last_insn;
2687
}
2688
 
2689
/* Specify a new insn as the last in the chain.  */
2690
 
2691
void
2692
set_last_insn (rtx insn)
2693
{
2694
  gcc_assert (!NEXT_INSN (insn));
2695
  last_insn = insn;
2696
}
2697
 
2698
/* Return the last insn emitted, even if it is in a sequence now pushed.  */
2699
 
2700
rtx
2701
get_last_insn_anywhere (void)
2702
{
2703
  struct sequence_stack *stack;
2704
  if (last_insn)
2705
    return last_insn;
2706
  for (stack = seq_stack; stack; stack = stack->next)
2707
    if (stack->last != 0)
2708
      return stack->last;
2709
  return 0;
2710
}
2711
 
2712
/* Return the first nonnote insn emitted in current sequence or current
2713
   function.  This routine looks inside SEQUENCEs.  */
2714
 
2715
rtx
2716
get_first_nonnote_insn (void)
2717
{
2718
  rtx insn = first_insn;
2719
 
2720
  if (insn)
2721
    {
2722
      if (NOTE_P (insn))
2723
        for (insn = next_insn (insn);
2724
             insn && NOTE_P (insn);
2725
             insn = next_insn (insn))
2726
          continue;
2727
      else
2728
        {
2729
          if (NONJUMP_INSN_P (insn)
2730
              && GET_CODE (PATTERN (insn)) == SEQUENCE)
2731
            insn = XVECEXP (PATTERN (insn), 0, 0);
2732
        }
2733
    }
2734
 
2735
  return insn;
2736
}
2737
 
2738
/* Return the last nonnote insn emitted in current sequence or current
2739
   function.  This routine looks inside SEQUENCEs.  */
2740
 
2741
rtx
2742
get_last_nonnote_insn (void)
2743
{
2744
  rtx insn = last_insn;
2745
 
2746
  if (insn)
2747
    {
2748
      if (NOTE_P (insn))
2749
        for (insn = previous_insn (insn);
2750
             insn && NOTE_P (insn);
2751
             insn = previous_insn (insn))
2752
          continue;
2753
      else
2754
        {
2755
          if (NONJUMP_INSN_P (insn)
2756
              && GET_CODE (PATTERN (insn)) == SEQUENCE)
2757
            insn = XVECEXP (PATTERN (insn), 0,
2758
                            XVECLEN (PATTERN (insn), 0) - 1);
2759
        }
2760
    }
2761
 
2762
  return insn;
2763
}
2764
 
2765
/* Return a number larger than any instruction's uid in this function.  */
2766
 
2767
int
2768
get_max_uid (void)
2769
{
2770
  return cur_insn_uid;
2771
}
2772
 
2773
/* Renumber instructions so that no instruction UIDs are wasted.  */
2774
 
2775
void
2776
renumber_insns (void)
2777
{
2778
  rtx insn;
2779
 
2780
  /* If we're not supposed to renumber instructions, don't.  */
2781
  if (!flag_renumber_insns)
2782
    return;
2783
 
2784
  /* If there aren't that many instructions, then it's not really
2785
     worth renumbering them.  */
2786
  if (flag_renumber_insns == 1 && get_max_uid () < 25000)
2787
    return;
2788
 
2789
  cur_insn_uid = 1;
2790
 
2791
  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2792
    {
2793
      if (dump_file)
2794
        fprintf (dump_file, "Renumbering insn %d to %d\n",
2795
                 INSN_UID (insn), cur_insn_uid);
2796
      INSN_UID (insn) = cur_insn_uid++;
2797
    }
2798
}
2799
 
2800
/* Return the next insn.  If it is a SEQUENCE, return the first insn
2801
   of the sequence.  */
2802
 
2803
rtx
2804
next_insn (rtx insn)
2805
{
2806
  if (insn)
2807
    {
2808
      insn = NEXT_INSN (insn);
2809
      if (insn && NONJUMP_INSN_P (insn)
2810
          && GET_CODE (PATTERN (insn)) == SEQUENCE)
2811
        insn = XVECEXP (PATTERN (insn), 0, 0);
2812
    }
2813
 
2814
  return insn;
2815
}
2816
 
2817
/* Return the previous insn.  If it is a SEQUENCE, return the last insn
2818
   of the sequence.  */
2819
 
2820
rtx
2821
previous_insn (rtx insn)
2822
{
2823
  if (insn)
2824
    {
2825
      insn = PREV_INSN (insn);
2826
      if (insn && NONJUMP_INSN_P (insn)
2827
          && GET_CODE (PATTERN (insn)) == SEQUENCE)
2828
        insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2829
    }
2830
 
2831
  return insn;
2832
}
2833
 
2834
/* Return the next insn after INSN that is not a NOTE.  This routine does not
2835
   look inside SEQUENCEs.  */
2836
 
2837
rtx
2838
next_nonnote_insn (rtx insn)
2839
{
2840
  while (insn)
2841
    {
2842
      insn = NEXT_INSN (insn);
2843
      if (insn == 0 || !NOTE_P (insn))
2844
        break;
2845
    }
2846
 
2847
  return insn;
2848
}
2849
 
2850
/* Return the previous insn before INSN that is not a NOTE.  This routine does
2851
   not look inside SEQUENCEs.  */
2852
 
2853
rtx
2854
prev_nonnote_insn (rtx insn)
2855
{
2856
  while (insn)
2857
    {
2858
      insn = PREV_INSN (insn);
2859
      if (insn == 0 || !NOTE_P (insn))
2860
        break;
2861
    }
2862
 
2863
  return insn;
2864
}
2865
 
2866
/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
2867
   or 0, if there is none.  This routine does not look inside
2868
   SEQUENCEs.  */
2869
 
2870
rtx
2871
next_real_insn (rtx insn)
2872
{
2873
  while (insn)
2874
    {
2875
      insn = NEXT_INSN (insn);
2876
      if (insn == 0 || INSN_P (insn))
2877
        break;
2878
    }
2879
 
2880
  return insn;
2881
}
2882
 
2883
/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
2884
   or 0, if there is none.  This routine does not look inside
2885
   SEQUENCEs.  */
2886
 
2887
rtx
2888
prev_real_insn (rtx insn)
2889
{
2890
  while (insn)
2891
    {
2892
      insn = PREV_INSN (insn);
2893
      if (insn == 0 || INSN_P (insn))
2894
        break;
2895
    }
2896
 
2897
  return insn;
2898
}
2899
 
2900
/* Return the last CALL_INSN in the current list, or 0 if there is none.
2901
   This routine does not look inside SEQUENCEs.  */
2902
 
2903
rtx
2904
last_call_insn (void)
2905
{
2906
  rtx insn;
2907
 
2908
  for (insn = get_last_insn ();
2909
       insn && !CALL_P (insn);
2910
       insn = PREV_INSN (insn))
2911
    ;
2912
 
2913
  return insn;
2914
}
2915
 
2916
/* Find the next insn after INSN that really does something.  This routine
2917
   does not look inside SEQUENCEs.  Until reload has completed, this is the
2918
   same as next_real_insn.  */
2919
 
2920
int
2921
active_insn_p (rtx insn)
2922
{
2923
  return (CALL_P (insn) || JUMP_P (insn)
2924
          || (NONJUMP_INSN_P (insn)
2925
              && (! reload_completed
2926
                  || (GET_CODE (PATTERN (insn)) != USE
2927
                      && GET_CODE (PATTERN (insn)) != CLOBBER))));
2928
}
2929
 
2930
rtx
2931
next_active_insn (rtx insn)
2932
{
2933
  while (insn)
2934
    {
2935
      insn = NEXT_INSN (insn);
2936
      if (insn == 0 || active_insn_p (insn))
2937
        break;
2938
    }
2939
 
2940
  return insn;
2941
}
2942
 
2943
/* Find the last insn before INSN that really does something.  This routine
2944
   does not look inside SEQUENCEs.  Until reload has completed, this is the
2945
   same as prev_real_insn.  */
2946
 
2947
rtx
2948
prev_active_insn (rtx insn)
2949
{
2950
  while (insn)
2951
    {
2952
      insn = PREV_INSN (insn);
2953
      if (insn == 0 || active_insn_p (insn))
2954
        break;
2955
    }
2956
 
2957
  return insn;
2958
}
2959
 
2960
/* Return the next CODE_LABEL after the insn INSN, or 0 if there is none.  */
2961
 
2962
rtx
2963
next_label (rtx insn)
2964
{
2965
  while (insn)
2966
    {
2967
      insn = NEXT_INSN (insn);
2968
      if (insn == 0 || LABEL_P (insn))
2969
        break;
2970
    }
2971
 
2972
  return insn;
2973
}
2974
 
2975
/* Return the last CODE_LABEL before the insn INSN, or 0 if there is none.  */
2976
 
2977
rtx
2978
prev_label (rtx insn)
2979
{
2980
  while (insn)
2981
    {
2982
      insn = PREV_INSN (insn);
2983
      if (insn == 0 || LABEL_P (insn))
2984
        break;
2985
    }
2986
 
2987
  return insn;
2988
}
2989
 
2990
/* Return the last label to mark the same position as LABEL.  Return null
2991
   if LABEL itself is null.  */
2992
 
2993
rtx
2994
skip_consecutive_labels (rtx label)
2995
{
2996
  rtx insn;
2997
 
2998
  for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
2999
    if (LABEL_P (insn))
3000
      label = insn;
3001
 
3002
  return label;
3003
}
3004
 
3005
#ifdef HAVE_cc0
3006
/* INSN uses CC0 and is being moved into a delay slot.  Set up REG_CC_SETTER
3007
   and REG_CC_USER notes so we can find it.  */
3008
 
3009
void
3010
link_cc0_insns (rtx insn)
3011
{
3012
  rtx user = next_nonnote_insn (insn);
3013
 
3014
  if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
3015
    user = XVECEXP (PATTERN (user), 0, 0);
3016
 
3017
  REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
3018
                                        REG_NOTES (user));
3019
  REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
3020
}
3021
 
3022
/* Return the next insn that uses CC0 after INSN, which is assumed to
3023
   set it.  This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3024
   applied to the result of this function should yield INSN).
3025
 
3026
   Normally, this is simply the next insn.  However, if a REG_CC_USER note
3027
   is present, it contains the insn that uses CC0.
3028
 
3029
   Return 0 if we can't find the insn.  */
3030
 
3031
rtx
3032
next_cc0_user (rtx insn)
3033
{
3034
  rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3035
 
3036
  if (note)
3037
    return XEXP (note, 0);
3038
 
3039
  insn = next_nonnote_insn (insn);
3040
  if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3041
    insn = XVECEXP (PATTERN (insn), 0, 0);
3042
 
3043
  if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3044
    return insn;
3045
 
3046
  return 0;
3047
}
3048
 
3049
/* Find the insn that set CC0 for INSN.  Unless INSN has a REG_CC_SETTER
3050
   note, it is the previous insn.  */
3051
 
3052
rtx
3053
prev_cc0_setter (rtx insn)
3054
{
3055
  rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3056
 
3057
  if (note)
3058
    return XEXP (note, 0);
3059
 
3060
  insn = prev_nonnote_insn (insn);
3061
  gcc_assert (sets_cc0_p (PATTERN (insn)));
3062
 
3063
  return insn;
3064
}
3065
#endif
3066
 
3067
/* Increment the label uses for all labels present in rtx.  */
3068
 
3069
static void
3070
mark_label_nuses (rtx x)
3071
{
3072
  enum rtx_code code;
3073
  int i, j;
3074
  const char *fmt;
3075
 
3076
  code = GET_CODE (x);
3077
  if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
3078
    LABEL_NUSES (XEXP (x, 0))++;
3079
 
3080
  fmt = GET_RTX_FORMAT (code);
3081
  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3082
    {
3083
      if (fmt[i] == 'e')
3084
        mark_label_nuses (XEXP (x, i));
3085
      else if (fmt[i] == 'E')
3086
        for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3087
          mark_label_nuses (XVECEXP (x, i, j));
3088
    }
3089
}
3090
 
3091
 
3092
/* Try splitting insns that can be split for better scheduling.
3093
   PAT is the pattern which might split.
3094
   TRIAL is the insn providing PAT.
3095
   LAST is nonzero if we should return the last insn of the sequence produced.
3096
 
3097
   If this routine succeeds in splitting, it returns the first or last
3098
   replacement insn depending on the value of LAST.  Otherwise, it
3099
   returns TRIAL.  If the insn to be returned can be split, it will be.  */
3100
 
3101
rtx
3102
try_split (rtx pat, rtx trial, int last)
3103
{
3104
  rtx before = PREV_INSN (trial);
3105
  rtx after = NEXT_INSN (trial);
3106
  int has_barrier = 0;
3107
  rtx tem;
3108
  rtx note, seq;
3109
  int probability;
3110
  rtx insn_last, insn;
3111
  int njumps = 0;
3112
 
3113
  if (any_condjump_p (trial)
3114
      && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3115
    split_branch_probability = INTVAL (XEXP (note, 0));
3116
  probability = split_branch_probability;
3117
 
3118
  seq = split_insns (pat, trial);
3119
 
3120
  split_branch_probability = -1;
3121
 
3122
  /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3123
     We may need to handle this specially.  */
3124
  if (after && BARRIER_P (after))
3125
    {
3126
      has_barrier = 1;
3127
      after = NEXT_INSN (after);
3128
    }
3129
 
3130
  if (!seq)
3131
    return trial;
3132
 
3133
  /* Avoid infinite loop if any insn of the result matches
3134
     the original pattern.  */
3135
  insn_last = seq;
3136
  while (1)
3137
    {
3138
      if (INSN_P (insn_last)
3139
          && rtx_equal_p (PATTERN (insn_last), pat))
3140
        return trial;
3141
      if (!NEXT_INSN (insn_last))
3142
        break;
3143
      insn_last = NEXT_INSN (insn_last);
3144
    }
3145
 
3146
  /* Mark labels.  */
3147
  for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3148
    {
3149
      if (JUMP_P (insn))
3150
        {
3151
          mark_jump_label (PATTERN (insn), insn, 0);
3152
          njumps++;
3153
          if (probability != -1
3154
              && any_condjump_p (insn)
3155
              && !find_reg_note (insn, REG_BR_PROB, 0))
3156
            {
3157
              /* We can preserve the REG_BR_PROB notes only if exactly
3158
                 one jump is created, otherwise the machine description
3159
                 is responsible for this step using
3160
                 split_branch_probability variable.  */
3161
              gcc_assert (njumps == 1);
3162
              REG_NOTES (insn)
3163
                = gen_rtx_EXPR_LIST (REG_BR_PROB,
3164
                                     GEN_INT (probability),
3165
                                     REG_NOTES (insn));
3166
            }
3167
        }
3168
    }
3169
 
3170
  /* If we are splitting a CALL_INSN, look for the CALL_INSN
3171
     in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it.  */
3172
  if (CALL_P (trial))
3173
    {
3174
      for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3175
        if (CALL_P (insn))
3176
          {
3177
            rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3178
            while (*p)
3179
              p = &XEXP (*p, 1);
3180
            *p = CALL_INSN_FUNCTION_USAGE (trial);
3181
            SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3182
          }
3183
    }
3184
 
3185
  /* Copy notes, particularly those related to the CFG.  */
3186
  for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3187
    {
3188
      switch (REG_NOTE_KIND (note))
3189
        {
3190
        case REG_EH_REGION:
3191
          insn = insn_last;
3192
          while (insn != NULL_RTX)
3193
            {
3194
              if (CALL_P (insn)
3195
                  || (flag_non_call_exceptions && INSN_P (insn)
3196
                      && may_trap_p (PATTERN (insn))))
3197
                REG_NOTES (insn)
3198
                  = gen_rtx_EXPR_LIST (REG_EH_REGION,
3199
                                       XEXP (note, 0),
3200
                                       REG_NOTES (insn));
3201
              insn = PREV_INSN (insn);
3202
            }
3203
          break;
3204
 
3205
        case REG_NORETURN:
3206
        case REG_SETJMP:
3207
          insn = insn_last;
3208
          while (insn != NULL_RTX)
3209
            {
3210
              if (CALL_P (insn))
3211
                REG_NOTES (insn)
3212
                  = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3213
                                       XEXP (note, 0),
3214
                                       REG_NOTES (insn));
3215
              insn = PREV_INSN (insn);
3216
            }
3217
          break;
3218
 
3219
        case REG_NON_LOCAL_GOTO:
3220
          insn = insn_last;
3221
          while (insn != NULL_RTX)
3222
            {
3223
              if (JUMP_P (insn))
3224
                REG_NOTES (insn)
3225
                  = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3226
                                       XEXP (note, 0),
3227
                                       REG_NOTES (insn));
3228
              insn = PREV_INSN (insn);
3229
            }
3230
          break;
3231
 
3232
        default:
3233
          break;
3234
        }
3235
    }
3236
 
3237
  /* If there are LABELS inside the split insns increment the
3238
     usage count so we don't delete the label.  */
3239
  if (NONJUMP_INSN_P (trial))
3240
    {
3241
      insn = insn_last;
3242
      while (insn != NULL_RTX)
3243
        {
3244
          if (NONJUMP_INSN_P (insn))
3245
            mark_label_nuses (PATTERN (insn));
3246
 
3247
          insn = PREV_INSN (insn);
3248
        }
3249
    }
3250
 
3251
  tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
3252
 
3253
  delete_insn (trial);
3254
  if (has_barrier)
3255
    emit_barrier_after (tem);
3256
 
3257
  /* Recursively call try_split for each new insn created; by the
3258
     time control returns here that insn will be fully split, so
3259
     set LAST and continue from the insn after the one returned.
3260
     We can't use next_active_insn here since AFTER may be a note.
3261
     Ignore deleted insns, which can be occur if not optimizing.  */
3262
  for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3263
    if (! INSN_DELETED_P (tem) && INSN_P (tem))
3264
      tem = try_split (PATTERN (tem), tem, 1);
3265
 
3266
  /* Return either the first or the last insn, depending on which was
3267
     requested.  */
3268
  return last
3269
    ? (after ? PREV_INSN (after) : last_insn)
3270
    : NEXT_INSN (before);
3271
}
3272
 
3273
/* Make and return an INSN rtx, initializing all its slots.
3274
   Store PATTERN in the pattern slots.  */
3275
 
3276
rtx
3277
make_insn_raw (rtx pattern)
3278
{
3279
  rtx insn;
3280
 
3281
  insn = rtx_alloc (INSN);
3282
 
3283
  INSN_UID (insn) = cur_insn_uid++;
3284
  PATTERN (insn) = pattern;
3285
  INSN_CODE (insn) = -1;
3286
  LOG_LINKS (insn) = NULL;
3287
  REG_NOTES (insn) = NULL;
3288
  INSN_LOCATOR (insn) = 0;
3289
  BLOCK_FOR_INSN (insn) = NULL;
3290
 
3291
#ifdef ENABLE_RTL_CHECKING
3292
  if (insn
3293
      && INSN_P (insn)
3294
      && (returnjump_p (insn)
3295
          || (GET_CODE (insn) == SET
3296
              && SET_DEST (insn) == pc_rtx)))
3297
    {
3298
      warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3299
      debug_rtx (insn);
3300
    }
3301
#endif
3302
 
3303
  return insn;
3304
}
3305
 
3306
/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn.  */
3307
 
3308
rtx
3309
make_jump_insn_raw (rtx pattern)
3310
{
3311
  rtx insn;
3312
 
3313
  insn = rtx_alloc (JUMP_INSN);
3314
  INSN_UID (insn) = cur_insn_uid++;
3315
 
3316
  PATTERN (insn) = pattern;
3317
  INSN_CODE (insn) = -1;
3318
  LOG_LINKS (insn) = NULL;
3319
  REG_NOTES (insn) = NULL;
3320
  JUMP_LABEL (insn) = NULL;
3321
  INSN_LOCATOR (insn) = 0;
3322
  BLOCK_FOR_INSN (insn) = NULL;
3323
 
3324
  return insn;
3325
}
3326
 
3327
/* Like `make_insn_raw' but make a CALL_INSN instead of an insn.  */
3328
 
3329
static rtx
3330
make_call_insn_raw (rtx pattern)
3331
{
3332
  rtx insn;
3333
 
3334
  insn = rtx_alloc (CALL_INSN);
3335
  INSN_UID (insn) = cur_insn_uid++;
3336
 
3337
  PATTERN (insn) = pattern;
3338
  INSN_CODE (insn) = -1;
3339
  LOG_LINKS (insn) = NULL;
3340
  REG_NOTES (insn) = NULL;
3341
  CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3342
  INSN_LOCATOR (insn) = 0;
3343
  BLOCK_FOR_INSN (insn) = NULL;
3344
 
3345
  return insn;
3346
}
3347
 
3348
/* Add INSN to the end of the doubly-linked list.
3349
   INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE.  */
3350
 
3351
void
3352
add_insn (rtx insn)
3353
{
3354
  PREV_INSN (insn) = last_insn;
3355
  NEXT_INSN (insn) = 0;
3356
 
3357
  if (NULL != last_insn)
3358
    NEXT_INSN (last_insn) = insn;
3359
 
3360
  if (NULL == first_insn)
3361
    first_insn = insn;
3362
 
3363
  last_insn = insn;
3364
}
3365
 
3366
/* Add INSN into the doubly-linked list after insn AFTER.  This and
3367
   the next should be the only functions called to insert an insn once
3368
   delay slots have been filled since only they know how to update a
3369
   SEQUENCE.  */
3370
 
3371
void
3372
add_insn_after (rtx insn, rtx after)
3373
{
3374
  rtx next = NEXT_INSN (after);
3375
  basic_block bb;
3376
 
3377
  gcc_assert (!optimize || !INSN_DELETED_P (after));
3378
 
3379
  NEXT_INSN (insn) = next;
3380
  PREV_INSN (insn) = after;
3381
 
3382
  if (next)
3383
    {
3384
      PREV_INSN (next) = insn;
3385
      if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3386
        PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3387
    }
3388
  else if (last_insn == after)
3389
    last_insn = insn;
3390
  else
3391
    {
3392
      struct sequence_stack *stack = seq_stack;
3393
      /* Scan all pending sequences too.  */
3394
      for (; stack; stack = stack->next)
3395
        if (after == stack->last)
3396
          {
3397
            stack->last = insn;
3398
            break;
3399
          }
3400
 
3401
      gcc_assert (stack);
3402
    }
3403
 
3404
  if (!BARRIER_P (after)
3405
      && !BARRIER_P (insn)
3406
      && (bb = BLOCK_FOR_INSN (after)))
3407
    {
3408
      set_block_for_insn (insn, bb);
3409
      if (INSN_P (insn))
3410
        bb->flags |= BB_DIRTY;
3411
      /* Should not happen as first in the BB is always
3412
         either NOTE or LABEL.  */
3413
      if (BB_END (bb) == after
3414
          /* Avoid clobbering of structure when creating new BB.  */
3415
          && !BARRIER_P (insn)
3416
          && (!NOTE_P (insn)
3417
              || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3418
        BB_END (bb) = insn;
3419
    }
3420
 
3421
  NEXT_INSN (after) = insn;
3422
  if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
3423
    {
3424
      rtx sequence = PATTERN (after);
3425
      NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3426
    }
3427
}
3428
 
3429
/* Add INSN into the doubly-linked list before insn BEFORE.  This and
3430
   the previous should be the only functions called to insert an insn once
3431
   delay slots have been filled since only they know how to update a
3432
   SEQUENCE.  */
3433
 
3434
void
3435
add_insn_before (rtx insn, rtx before)
3436
{
3437
  rtx prev = PREV_INSN (before);
3438
  basic_block bb;
3439
 
3440
  gcc_assert (!optimize || !INSN_DELETED_P (before));
3441
 
3442
  PREV_INSN (insn) = prev;
3443
  NEXT_INSN (insn) = before;
3444
 
3445
  if (prev)
3446
    {
3447
      NEXT_INSN (prev) = insn;
3448
      if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3449
        {
3450
          rtx sequence = PATTERN (prev);
3451
          NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3452
        }
3453
    }
3454
  else if (first_insn == before)
3455
    first_insn = insn;
3456
  else
3457
    {
3458
      struct sequence_stack *stack = seq_stack;
3459
      /* Scan all pending sequences too.  */
3460
      for (; stack; stack = stack->next)
3461
        if (before == stack->first)
3462
          {
3463
            stack->first = insn;
3464
            break;
3465
          }
3466
 
3467
      gcc_assert (stack);
3468
    }
3469
 
3470
  if (!BARRIER_P (before)
3471
      && !BARRIER_P (insn)
3472
      && (bb = BLOCK_FOR_INSN (before)))
3473
    {
3474
      set_block_for_insn (insn, bb);
3475
      if (INSN_P (insn))
3476
        bb->flags |= BB_DIRTY;
3477
      /* Should not happen as first in the BB is always either NOTE or
3478
         LABEL.  */
3479
      gcc_assert (BB_HEAD (bb) != insn
3480
                  /* Avoid clobbering of structure when creating new BB.  */
3481
                  || BARRIER_P (insn)
3482
                  || (NOTE_P (insn)
3483
                      && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK));
3484
    }
3485
 
3486
  PREV_INSN (before) = insn;
3487
  if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
3488
    PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3489
}
3490
 
3491
/* Remove an insn from its doubly-linked list.  This function knows how
3492
   to handle sequences.  */
3493
void
3494
remove_insn (rtx insn)
3495
{
3496
  rtx next = NEXT_INSN (insn);
3497
  rtx prev = PREV_INSN (insn);
3498
  basic_block bb;
3499
 
3500
  if (prev)
3501
    {
3502
      NEXT_INSN (prev) = next;
3503
      if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3504
        {
3505
          rtx sequence = PATTERN (prev);
3506
          NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3507
        }
3508
    }
3509
  else if (first_insn == insn)
3510
    first_insn = next;
3511
  else
3512
    {
3513
      struct sequence_stack *stack = seq_stack;
3514
      /* Scan all pending sequences too.  */
3515
      for (; stack; stack = stack->next)
3516
        if (insn == stack->first)
3517
          {
3518
            stack->first = next;
3519
            break;
3520
          }
3521
 
3522
      gcc_assert (stack);
3523
    }
3524
 
3525
  if (next)
3526
    {
3527
      PREV_INSN (next) = prev;
3528
      if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3529
        PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3530
    }
3531
  else if (last_insn == insn)
3532
    last_insn = prev;
3533
  else
3534
    {
3535
      struct sequence_stack *stack = seq_stack;
3536
      /* Scan all pending sequences too.  */
3537
      for (; stack; stack = stack->next)
3538
        if (insn == stack->last)
3539
          {
3540
            stack->last = prev;
3541
            break;
3542
          }
3543
 
3544
      gcc_assert (stack);
3545
    }
3546
  if (!BARRIER_P (insn)
3547
      && (bb = BLOCK_FOR_INSN (insn)))
3548
    {
3549
      if (INSN_P (insn))
3550
        bb->flags |= BB_DIRTY;
3551
      if (BB_HEAD (bb) == insn)
3552
        {
3553
          /* Never ever delete the basic block note without deleting whole
3554
             basic block.  */
3555
          gcc_assert (!NOTE_P (insn));
3556
          BB_HEAD (bb) = next;
3557
        }
3558
      if (BB_END (bb) == insn)
3559
        BB_END (bb) = prev;
3560
    }
3561
}
3562
 
3563
/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN.  */
3564
 
3565
void
3566
add_function_usage_to (rtx call_insn, rtx call_fusage)
3567
{
3568
  gcc_assert (call_insn && CALL_P (call_insn));
3569
 
3570
  /* Put the register usage information on the CALL.  If there is already
3571
     some usage information, put ours at the end.  */
3572
  if (CALL_INSN_FUNCTION_USAGE (call_insn))
3573
    {
3574
      rtx link;
3575
 
3576
      for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3577
           link = XEXP (link, 1))
3578
        ;
3579
 
3580
      XEXP (link, 1) = call_fusage;
3581
    }
3582
  else
3583
    CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3584
}
3585
 
3586
/* Delete all insns made since FROM.
3587
   FROM becomes the new last instruction.  */
3588
 
3589
void
3590
delete_insns_since (rtx from)
3591
{
3592
  if (from == 0)
3593
    first_insn = 0;
3594
  else
3595
    NEXT_INSN (from) = 0;
3596
  last_insn = from;
3597
}
3598
 
3599
/* This function is deprecated, please use sequences instead.
3600
 
3601
   Move a consecutive bunch of insns to a different place in the chain.
3602
   The insns to be moved are those between FROM and TO.
3603
   They are moved to a new position after the insn AFTER.
3604
   AFTER must not be FROM or TO or any insn in between.
3605
 
3606
   This function does not know about SEQUENCEs and hence should not be
3607
   called after delay-slot filling has been done.  */
3608
 
3609
void
3610
reorder_insns_nobb (rtx from, rtx to, rtx after)
3611
{
3612
  /* Splice this bunch out of where it is now.  */
3613
  if (PREV_INSN (from))
3614
    NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3615
  if (NEXT_INSN (to))
3616
    PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3617
  if (last_insn == to)
3618
    last_insn = PREV_INSN (from);
3619
  if (first_insn == from)
3620
    first_insn = NEXT_INSN (to);
3621
 
3622
  /* Make the new neighbors point to it and it to them.  */
3623
  if (NEXT_INSN (after))
3624
    PREV_INSN (NEXT_INSN (after)) = to;
3625
 
3626
  NEXT_INSN (to) = NEXT_INSN (after);
3627
  PREV_INSN (from) = after;
3628
  NEXT_INSN (after) = from;
3629
  if (after == last_insn)
3630
    last_insn = to;
3631
}
3632
 
3633
/* Same as function above, but take care to update BB boundaries.  */
3634
void
3635
reorder_insns (rtx from, rtx to, rtx after)
3636
{
3637
  rtx prev = PREV_INSN (from);
3638
  basic_block bb, bb2;
3639
 
3640
  reorder_insns_nobb (from, to, after);
3641
 
3642
  if (!BARRIER_P (after)
3643
      && (bb = BLOCK_FOR_INSN (after)))
3644
    {
3645
      rtx x;
3646
      bb->flags |= BB_DIRTY;
3647
 
3648
      if (!BARRIER_P (from)
3649
          && (bb2 = BLOCK_FOR_INSN (from)))
3650
        {
3651
          if (BB_END (bb2) == to)
3652
            BB_END (bb2) = prev;
3653
          bb2->flags |= BB_DIRTY;
3654
        }
3655
 
3656
      if (BB_END (bb) == after)
3657
        BB_END (bb) = to;
3658
 
3659
      for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3660
        if (!BARRIER_P (x))
3661
          set_block_for_insn (x, bb);
3662
    }
3663
}
3664
 
3665
/* Return the line note insn preceding INSN.  */
3666
 
3667
static rtx
3668
find_line_note (rtx insn)
3669
{
3670
  if (no_line_numbers)
3671
    return 0;
3672
 
3673
  for (; insn; insn = PREV_INSN (insn))
3674
    if (NOTE_P (insn)
3675
        && NOTE_LINE_NUMBER (insn) >= 0)
3676
      break;
3677
 
3678
  return insn;
3679
}
3680
 
3681
 
3682
/* Emit insn(s) of given code and pattern
3683
   at a specified place within the doubly-linked list.
3684
 
3685
   All of the emit_foo global entry points accept an object
3686
   X which is either an insn list or a PATTERN of a single
3687
   instruction.
3688
 
3689
   There are thus a few canonical ways to generate code and
3690
   emit it at a specific place in the instruction stream.  For
3691
   example, consider the instruction named SPOT and the fact that
3692
   we would like to emit some instructions before SPOT.  We might
3693
   do it like this:
3694
 
3695
        start_sequence ();
3696
        ... emit the new instructions ...
3697
        insns_head = get_insns ();
3698
        end_sequence ();
3699
 
3700
        emit_insn_before (insns_head, SPOT);
3701
 
3702
   It used to be common to generate SEQUENCE rtl instead, but that
3703
   is a relic of the past which no longer occurs.  The reason is that
3704
   SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
3705
   generated would almost certainly die right after it was created.  */
3706
 
3707
/* Make X be output before the instruction BEFORE.  */
3708
 
3709
rtx
3710
emit_insn_before_noloc (rtx x, rtx before)
3711
{
3712
  rtx last = before;
3713
  rtx insn;
3714
 
3715
  gcc_assert (before);
3716
 
3717
  if (x == NULL_RTX)
3718
    return last;
3719
 
3720
  switch (GET_CODE (x))
3721
    {
3722
    case INSN:
3723
    case JUMP_INSN:
3724
    case CALL_INSN:
3725
    case CODE_LABEL:
3726
    case BARRIER:
3727
    case NOTE:
3728
      insn = x;
3729
      while (insn)
3730
        {
3731
          rtx next = NEXT_INSN (insn);
3732
          add_insn_before (insn, before);
3733
          last = insn;
3734
          insn = next;
3735
        }
3736
      break;
3737
 
3738
#ifdef ENABLE_RTL_CHECKING
3739
    case SEQUENCE:
3740
      gcc_unreachable ();
3741
      break;
3742
#endif
3743
 
3744
    default:
3745
      last = make_insn_raw (x);
3746
      add_insn_before (last, before);
3747
      break;
3748
    }
3749
 
3750
  return last;
3751
}
3752
 
3753
/* Make an instruction with body X and code JUMP_INSN
3754
   and output it before the instruction BEFORE.  */
3755
 
3756
rtx
3757
emit_jump_insn_before_noloc (rtx x, rtx before)
3758
{
3759
  rtx insn, last = NULL_RTX;
3760
 
3761
  gcc_assert (before);
3762
 
3763
  switch (GET_CODE (x))
3764
    {
3765
    case INSN:
3766
    case JUMP_INSN:
3767
    case CALL_INSN:
3768
    case CODE_LABEL:
3769
    case BARRIER:
3770
    case NOTE:
3771
      insn = x;
3772
      while (insn)
3773
        {
3774
          rtx next = NEXT_INSN (insn);
3775
          add_insn_before (insn, before);
3776
          last = insn;
3777
          insn = next;
3778
        }
3779
      break;
3780
 
3781
#ifdef ENABLE_RTL_CHECKING
3782
    case SEQUENCE:
3783
      gcc_unreachable ();
3784
      break;
3785
#endif
3786
 
3787
    default:
3788
      last = make_jump_insn_raw (x);
3789
      add_insn_before (last, before);
3790
      break;
3791
    }
3792
 
3793
  return last;
3794
}
3795
 
3796
/* Make an instruction with body X and code CALL_INSN
3797
   and output it before the instruction BEFORE.  */
3798
 
3799
rtx
3800
emit_call_insn_before_noloc (rtx x, rtx before)
3801
{
3802
  rtx last = NULL_RTX, insn;
3803
 
3804
  gcc_assert (before);
3805
 
3806
  switch (GET_CODE (x))
3807
    {
3808
    case INSN:
3809
    case JUMP_INSN:
3810
    case CALL_INSN:
3811
    case CODE_LABEL:
3812
    case BARRIER:
3813
    case NOTE:
3814
      insn = x;
3815
      while (insn)
3816
        {
3817
          rtx next = NEXT_INSN (insn);
3818
          add_insn_before (insn, before);
3819
          last = insn;
3820
          insn = next;
3821
        }
3822
      break;
3823
 
3824
#ifdef ENABLE_RTL_CHECKING
3825
    case SEQUENCE:
3826
      gcc_unreachable ();
3827
      break;
3828
#endif
3829
 
3830
    default:
3831
      last = make_call_insn_raw (x);
3832
      add_insn_before (last, before);
3833
      break;
3834
    }
3835
 
3836
  return last;
3837
}
3838
 
3839
/* Make an insn of code BARRIER
3840
   and output it before the insn BEFORE.  */
3841
 
3842
rtx
3843
emit_barrier_before (rtx before)
3844
{
3845
  rtx insn = rtx_alloc (BARRIER);
3846
 
3847
  INSN_UID (insn) = cur_insn_uid++;
3848
 
3849
  add_insn_before (insn, before);
3850
  return insn;
3851
}
3852
 
3853
/* Emit the label LABEL before the insn BEFORE.  */
3854
 
3855
rtx
3856
emit_label_before (rtx label, rtx before)
3857
{
3858
  /* This can be called twice for the same label as a result of the
3859
     confusion that follows a syntax error!  So make it harmless.  */
3860
  if (INSN_UID (label) == 0)
3861
    {
3862
      INSN_UID (label) = cur_insn_uid++;
3863
      add_insn_before (label, before);
3864
    }
3865
 
3866
  return label;
3867
}
3868
 
3869
/* Emit a note of subtype SUBTYPE before the insn BEFORE.  */
3870
 
3871
rtx
3872
emit_note_before (int subtype, rtx before)
3873
{
3874
  rtx note = rtx_alloc (NOTE);
3875
  INSN_UID (note) = cur_insn_uid++;
3876
#ifndef USE_MAPPED_LOCATION
3877
  NOTE_SOURCE_FILE (note) = 0;
3878
#endif
3879
  NOTE_LINE_NUMBER (note) = subtype;
3880
  BLOCK_FOR_INSN (note) = NULL;
3881
 
3882
  add_insn_before (note, before);
3883
  return note;
3884
}
3885
 
3886
/* Helper for emit_insn_after, handles lists of instructions
3887
   efficiently.  */
3888
 
3889
static rtx emit_insn_after_1 (rtx, rtx);
3890
 
3891
static rtx
3892
emit_insn_after_1 (rtx first, rtx after)
3893
{
3894
  rtx last;
3895
  rtx after_after;
3896
  basic_block bb;
3897
 
3898
  if (!BARRIER_P (after)
3899
      && (bb = BLOCK_FOR_INSN (after)))
3900
    {
3901
      bb->flags |= BB_DIRTY;
3902
      for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
3903
        if (!BARRIER_P (last))
3904
          set_block_for_insn (last, bb);
3905
      if (!BARRIER_P (last))
3906
        set_block_for_insn (last, bb);
3907
      if (BB_END (bb) == after)
3908
        BB_END (bb) = last;
3909
    }
3910
  else
3911
    for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
3912
      continue;
3913
 
3914
  after_after = NEXT_INSN (after);
3915
 
3916
  NEXT_INSN (after) = first;
3917
  PREV_INSN (first) = after;
3918
  NEXT_INSN (last) = after_after;
3919
  if (after_after)
3920
    PREV_INSN (after_after) = last;
3921
 
3922
  if (after == last_insn)
3923
    last_insn = last;
3924
  return last;
3925
}
3926
 
3927
/* Make X be output after the insn AFTER.  */
3928
 
3929
rtx
3930
emit_insn_after_noloc (rtx x, rtx after)
3931
{
3932
  rtx last = after;
3933
 
3934
  gcc_assert (after);
3935
 
3936
  if (x == NULL_RTX)
3937
    return last;
3938
 
3939
  switch (GET_CODE (x))
3940
    {
3941
    case INSN:
3942
    case JUMP_INSN:
3943
    case CALL_INSN:
3944
    case CODE_LABEL:
3945
    case BARRIER:
3946
    case NOTE:
3947
      last = emit_insn_after_1 (x, after);
3948
      break;
3949
 
3950
#ifdef ENABLE_RTL_CHECKING
3951
    case SEQUENCE:
3952
      gcc_unreachable ();
3953
      break;
3954
#endif
3955
 
3956
    default:
3957
      last = make_insn_raw (x);
3958
      add_insn_after (last, after);
3959
      break;
3960
    }
3961
 
3962
  return last;
3963
}
3964
 
3965
/* Similar to emit_insn_after, except that line notes are to be inserted so
3966
   as to act as if this insn were at FROM.  */
3967
 
3968
void
3969
emit_insn_after_with_line_notes (rtx x, rtx after, rtx from)
3970
{
3971
  rtx from_line = find_line_note (from);
3972
  rtx after_line = find_line_note (after);
3973
  rtx insn = emit_insn_after (x, after);
3974
 
3975
  if (from_line)
3976
    emit_note_copy_after (from_line, after);
3977
 
3978
  if (after_line)
3979
    emit_note_copy_after (after_line, insn);
3980
}
3981
 
3982
/* Make an insn of code JUMP_INSN with body X
3983
   and output it after the insn AFTER.  */
3984
 
3985
rtx
3986
emit_jump_insn_after_noloc (rtx x, rtx after)
3987
{
3988
  rtx last;
3989
 
3990
  gcc_assert (after);
3991
 
3992
  switch (GET_CODE (x))
3993
    {
3994
    case INSN:
3995
    case JUMP_INSN:
3996
    case CALL_INSN:
3997
    case CODE_LABEL:
3998
    case BARRIER:
3999
    case NOTE:
4000
      last = emit_insn_after_1 (x, after);
4001
      break;
4002
 
4003
#ifdef ENABLE_RTL_CHECKING
4004
    case SEQUENCE:
4005
      gcc_unreachable ();
4006
      break;
4007
#endif
4008
 
4009
    default:
4010
      last = make_jump_insn_raw (x);
4011
      add_insn_after (last, after);
4012
      break;
4013
    }
4014
 
4015
  return last;
4016
}
4017
 
4018
/* Make an instruction with body X and code CALL_INSN
4019
   and output it after the instruction AFTER.  */
4020
 
4021
rtx
4022
emit_call_insn_after_noloc (rtx x, rtx after)
4023
{
4024
  rtx last;
4025
 
4026
  gcc_assert (after);
4027
 
4028
  switch (GET_CODE (x))
4029
    {
4030
    case INSN:
4031
    case JUMP_INSN:
4032
    case CALL_INSN:
4033
    case CODE_LABEL:
4034
    case BARRIER:
4035
    case NOTE:
4036
      last = emit_insn_after_1 (x, after);
4037
      break;
4038
 
4039
#ifdef ENABLE_RTL_CHECKING
4040
    case SEQUENCE:
4041
      gcc_unreachable ();
4042
      break;
4043
#endif
4044
 
4045
    default:
4046
      last = make_call_insn_raw (x);
4047
      add_insn_after (last, after);
4048
      break;
4049
    }
4050
 
4051
  return last;
4052
}
4053
 
4054
/* Make an insn of code BARRIER
4055
   and output it after the insn AFTER.  */
4056
 
4057
rtx
4058
emit_barrier_after (rtx after)
4059
{
4060
  rtx insn = rtx_alloc (BARRIER);
4061
 
4062
  INSN_UID (insn) = cur_insn_uid++;
4063
 
4064
  add_insn_after (insn, after);
4065
  return insn;
4066
}
4067
 
4068
/* Emit the label LABEL after the insn AFTER.  */
4069
 
4070
rtx
4071
emit_label_after (rtx label, rtx after)
4072
{
4073
  /* This can be called twice for the same label
4074
     as a result of the confusion that follows a syntax error!
4075
     So make it harmless.  */
4076
  if (INSN_UID (label) == 0)
4077
    {
4078
      INSN_UID (label) = cur_insn_uid++;
4079
      add_insn_after (label, after);
4080
    }
4081
 
4082
  return label;
4083
}
4084
 
4085
/* Emit a note of subtype SUBTYPE after the insn AFTER.  */
4086
 
4087
rtx
4088
emit_note_after (int subtype, rtx after)
4089
{
4090
  rtx note = rtx_alloc (NOTE);
4091
  INSN_UID (note) = cur_insn_uid++;
4092
#ifndef USE_MAPPED_LOCATION
4093
  NOTE_SOURCE_FILE (note) = 0;
4094
#endif
4095
  NOTE_LINE_NUMBER (note) = subtype;
4096
  BLOCK_FOR_INSN (note) = NULL;
4097
  add_insn_after (note, after);
4098
  return note;
4099
}
4100
 
4101
/* Emit a copy of note ORIG after the insn AFTER.  */
4102
 
4103
rtx
4104
emit_note_copy_after (rtx orig, rtx after)
4105
{
4106
  rtx note;
4107
 
4108
  if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
4109
    {
4110
      cur_insn_uid++;
4111
      return 0;
4112
    }
4113
 
4114
  note = rtx_alloc (NOTE);
4115
  INSN_UID (note) = cur_insn_uid++;
4116
  NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
4117
  NOTE_DATA (note) = NOTE_DATA (orig);
4118
  BLOCK_FOR_INSN (note) = NULL;
4119
  add_insn_after (note, after);
4120
  return note;
4121
}
4122
 
4123
/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to SCOPE.  */
4124
rtx
4125
emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4126
{
4127
  rtx last = emit_insn_after_noloc (pattern, after);
4128
 
4129
  if (pattern == NULL_RTX || !loc)
4130
    return last;
4131
 
4132
  after = NEXT_INSN (after);
4133
  while (1)
4134
    {
4135
      if (active_insn_p (after) && !INSN_LOCATOR (after))
4136
        INSN_LOCATOR (after) = loc;
4137
      if (after == last)
4138
        break;
4139
      after = NEXT_INSN (after);
4140
    }
4141
  return last;
4142
}
4143
 
4144
/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER.  */
4145
rtx
4146
emit_insn_after (rtx pattern, rtx after)
4147
{
4148
  if (INSN_P (after))
4149
    return emit_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4150
  else
4151
    return emit_insn_after_noloc (pattern, after);
4152
}
4153
 
4154
/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to SCOPE.  */
4155
rtx
4156
emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4157
{
4158
  rtx last = emit_jump_insn_after_noloc (pattern, after);
4159
 
4160
  if (pattern == NULL_RTX || !loc)
4161
    return last;
4162
 
4163
  after = NEXT_INSN (after);
4164
  while (1)
4165
    {
4166
      if (active_insn_p (after) && !INSN_LOCATOR (after))
4167
        INSN_LOCATOR (after) = loc;
4168
      if (after == last)
4169
        break;
4170
      after = NEXT_INSN (after);
4171
    }
4172
  return last;
4173
}
4174
 
4175
/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER.  */
4176
rtx
4177
emit_jump_insn_after (rtx pattern, rtx after)
4178
{
4179
  if (INSN_P (after))
4180
    return emit_jump_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4181
  else
4182
    return emit_jump_insn_after_noloc (pattern, after);
4183
}
4184
 
4185
/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to SCOPE.  */
4186
rtx
4187
emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4188
{
4189
  rtx last = emit_call_insn_after_noloc (pattern, after);
4190
 
4191
  if (pattern == NULL_RTX || !loc)
4192
    return last;
4193
 
4194
  after = NEXT_INSN (after);
4195
  while (1)
4196
    {
4197
      if (active_insn_p (after) && !INSN_LOCATOR (after))
4198
        INSN_LOCATOR (after) = loc;
4199
      if (after == last)
4200
        break;
4201
      after = NEXT_INSN (after);
4202
    }
4203
  return last;
4204
}
4205
 
4206
/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER.  */
4207
rtx
4208
emit_call_insn_after (rtx pattern, rtx after)
4209
{
4210
  if (INSN_P (after))
4211
    return emit_call_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4212
  else
4213
    return emit_call_insn_after_noloc (pattern, after);
4214
}
4215
 
4216
/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to SCOPE.  */
4217
rtx
4218
emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4219
{
4220
  rtx first = PREV_INSN (before);
4221
  rtx last = emit_insn_before_noloc (pattern, before);
4222
 
4223
  if (pattern == NULL_RTX || !loc)
4224
    return last;
4225
 
4226
  first = NEXT_INSN (first);
4227
  while (1)
4228
    {
4229
      if (active_insn_p (first) && !INSN_LOCATOR (first))
4230
        INSN_LOCATOR (first) = loc;
4231
      if (first == last)
4232
        break;
4233
      first = NEXT_INSN (first);
4234
    }
4235
  return last;
4236
}
4237
 
4238
/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE.  */
4239
rtx
4240
emit_insn_before (rtx pattern, rtx before)
4241
{
4242
  if (INSN_P (before))
4243
    return emit_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4244
  else
4245
    return emit_insn_before_noloc (pattern, before);
4246
}
4247
 
4248
/* like emit_insn_before_noloc, but set insn_locator according to scope.  */
4249
rtx
4250
emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4251
{
4252
  rtx first = PREV_INSN (before);
4253
  rtx last = emit_jump_insn_before_noloc (pattern, before);
4254
 
4255
  if (pattern == NULL_RTX)
4256
    return last;
4257
 
4258
  first = NEXT_INSN (first);
4259
  while (1)
4260
    {
4261
      if (active_insn_p (first) && !INSN_LOCATOR (first))
4262
        INSN_LOCATOR (first) = loc;
4263
      if (first == last)
4264
        break;
4265
      first = NEXT_INSN (first);
4266
    }
4267
  return last;
4268
}
4269
 
4270
/* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE.  */
4271
rtx
4272
emit_jump_insn_before (rtx pattern, rtx before)
4273
{
4274
  if (INSN_P (before))
4275
    return emit_jump_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4276
  else
4277
    return emit_jump_insn_before_noloc (pattern, before);
4278
}
4279
 
4280
/* like emit_insn_before_noloc, but set insn_locator according to scope.  */
4281
rtx
4282
emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4283
{
4284
  rtx first = PREV_INSN (before);
4285
  rtx last = emit_call_insn_before_noloc (pattern, before);
4286
 
4287
  if (pattern == NULL_RTX)
4288
    return last;
4289
 
4290
  first = NEXT_INSN (first);
4291
  while (1)
4292
    {
4293
      if (active_insn_p (first) && !INSN_LOCATOR (first))
4294
        INSN_LOCATOR (first) = loc;
4295
      if (first == last)
4296
        break;
4297
      first = NEXT_INSN (first);
4298
    }
4299
  return last;
4300
}
4301
 
4302
/* like emit_call_insn_before_noloc,
4303
   but set insn_locator according to before.  */
4304
rtx
4305
emit_call_insn_before (rtx pattern, rtx before)
4306
{
4307
  if (INSN_P (before))
4308
    return emit_call_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4309
  else
4310
    return emit_call_insn_before_noloc (pattern, before);
4311
}
4312
 
4313
/* Take X and emit it at the end of the doubly-linked
4314
   INSN list.
4315
 
4316
   Returns the last insn emitted.  */
4317
 
4318
rtx
4319
emit_insn (rtx x)
4320
{
4321
  rtx last = last_insn;
4322
  rtx insn;
4323
 
4324
  if (x == NULL_RTX)
4325
    return last;
4326
 
4327
  switch (GET_CODE (x))
4328
    {
4329
    case INSN:
4330
    case JUMP_INSN:
4331
    case CALL_INSN:
4332
    case CODE_LABEL:
4333
    case BARRIER:
4334
    case NOTE:
4335
      insn = x;
4336
      while (insn)
4337
        {
4338
          rtx next = NEXT_INSN (insn);
4339
          add_insn (insn);
4340
          last = insn;
4341
          insn = next;
4342
        }
4343
      break;
4344
 
4345
#ifdef ENABLE_RTL_CHECKING
4346
    case SEQUENCE:
4347
      gcc_unreachable ();
4348
      break;
4349
#endif
4350
 
4351
    default:
4352
      last = make_insn_raw (x);
4353
      add_insn (last);
4354
      break;
4355
    }
4356
 
4357
  return last;
4358
}
4359
 
4360
/* Make an insn of code JUMP_INSN with pattern X
4361
   and add it to the end of the doubly-linked list.  */
4362
 
4363
rtx
4364
emit_jump_insn (rtx x)
4365
{
4366
  rtx last = NULL_RTX, insn;
4367
 
4368
  switch (GET_CODE (x))
4369
    {
4370
    case INSN:
4371
    case JUMP_INSN:
4372
    case CALL_INSN:
4373
    case CODE_LABEL:
4374
    case BARRIER:
4375
    case NOTE:
4376
      insn = x;
4377
      while (insn)
4378
        {
4379
          rtx next = NEXT_INSN (insn);
4380
          add_insn (insn);
4381
          last = insn;
4382
          insn = next;
4383
        }
4384
      break;
4385
 
4386
#ifdef ENABLE_RTL_CHECKING
4387
    case SEQUENCE:
4388
      gcc_unreachable ();
4389
      break;
4390
#endif
4391
 
4392
    default:
4393
      last = make_jump_insn_raw (x);
4394
      add_insn (last);
4395
      break;
4396
    }
4397
 
4398
  return last;
4399
}
4400
 
4401
/* Make an insn of code CALL_INSN with pattern X
4402
   and add it to the end of the doubly-linked list.  */
4403
 
4404
rtx
4405
emit_call_insn (rtx x)
4406
{
4407
  rtx insn;
4408
 
4409
  switch (GET_CODE (x))
4410
    {
4411
    case INSN:
4412
    case JUMP_INSN:
4413
    case CALL_INSN:
4414
    case CODE_LABEL:
4415
    case BARRIER:
4416
    case NOTE:
4417
      insn = emit_insn (x);
4418
      break;
4419
 
4420
#ifdef ENABLE_RTL_CHECKING
4421
    case SEQUENCE:
4422
      gcc_unreachable ();
4423
      break;
4424
#endif
4425
 
4426
    default:
4427
      insn = make_call_insn_raw (x);
4428
      add_insn (insn);
4429
      break;
4430
    }
4431
 
4432
  return insn;
4433
}
4434
 
4435
/* Add the label LABEL to the end of the doubly-linked list.  */
4436
 
4437
rtx
4438
emit_label (rtx label)
4439
{
4440
  /* This can be called twice for the same label
4441
     as a result of the confusion that follows a syntax error!
4442
     So make it harmless.  */
4443
  if (INSN_UID (label) == 0)
4444
    {
4445
      INSN_UID (label) = cur_insn_uid++;
4446
      add_insn (label);
4447
    }
4448
  return label;
4449
}
4450
 
4451
/* Make an insn of code BARRIER
4452
   and add it to the end of the doubly-linked list.  */
4453
 
4454
rtx
4455
emit_barrier (void)
4456
{
4457
  rtx barrier = rtx_alloc (BARRIER);
4458
  INSN_UID (barrier) = cur_insn_uid++;
4459
  add_insn (barrier);
4460
  return barrier;
4461
}
4462
 
4463
/* Make line numbering NOTE insn for LOCATION add it to the end
4464
   of the doubly-linked list, but only if line-numbers are desired for
4465
   debugging info and it doesn't match the previous one.  */
4466
 
4467
rtx
4468
emit_line_note (location_t location)
4469
{
4470
  rtx note;
4471
 
4472
#ifdef USE_MAPPED_LOCATION
4473
  if (location == last_location)
4474
    return NULL_RTX;
4475
#else
4476
  if (location.file && last_location.file
4477
      && !strcmp (location.file, last_location.file)
4478
      && location.line == last_location.line)
4479
    return NULL_RTX;
4480
#endif
4481
  last_location = location;
4482
 
4483
  if (no_line_numbers)
4484
    {
4485
      cur_insn_uid++;
4486
      return NULL_RTX;
4487
    }
4488
 
4489
#ifdef USE_MAPPED_LOCATION
4490
  note = emit_note ((int) location);
4491
#else
4492
  note = emit_note (location.line);
4493
  NOTE_SOURCE_FILE (note) = location.file;
4494
#endif
4495
 
4496
  return note;
4497
}
4498
 
4499
/* Emit a copy of note ORIG.  */
4500
 
4501
rtx
4502
emit_note_copy (rtx orig)
4503
{
4504
  rtx note;
4505
 
4506
  if (NOTE_LINE_NUMBER (orig) >= 0 && no_line_numbers)
4507
    {
4508
      cur_insn_uid++;
4509
      return NULL_RTX;
4510
    }
4511
 
4512
  note = rtx_alloc (NOTE);
4513
 
4514
  INSN_UID (note) = cur_insn_uid++;
4515
  NOTE_DATA (note) = NOTE_DATA (orig);
4516
  NOTE_LINE_NUMBER (note) = NOTE_LINE_NUMBER (orig);
4517
  BLOCK_FOR_INSN (note) = NULL;
4518
  add_insn (note);
4519
 
4520
  return note;
4521
}
4522
 
4523
/* Make an insn of code NOTE or type NOTE_NO
4524
   and add it to the end of the doubly-linked list.  */
4525
 
4526
rtx
4527
emit_note (int note_no)
4528
{
4529
  rtx note;
4530
 
4531
  note = rtx_alloc (NOTE);
4532
  INSN_UID (note) = cur_insn_uid++;
4533
  NOTE_LINE_NUMBER (note) = note_no;
4534
  memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4535
  BLOCK_FOR_INSN (note) = NULL;
4536
  add_insn (note);
4537
  return note;
4538
}
4539
 
4540
/* Cause next statement to emit a line note even if the line number
4541
   has not changed.  */
4542
 
4543
void
4544
force_next_line_note (void)
4545
{
4546
#ifdef USE_MAPPED_LOCATION
4547
  last_location = -1;
4548
#else
4549
  last_location.line = -1;
4550
#endif
4551
}
4552
 
4553
/* Place a note of KIND on insn INSN with DATUM as the datum. If a
4554
   note of this type already exists, remove it first.  */
4555
 
4556
rtx
4557
set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
4558
{
4559
  rtx note = find_reg_note (insn, kind, NULL_RTX);
4560
 
4561
  switch (kind)
4562
    {
4563
    case REG_EQUAL:
4564
    case REG_EQUIV:
4565
      /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4566
         has multiple sets (some callers assume single_set
4567
         means the insn only has one set, when in fact it
4568
         means the insn only has one * useful * set).  */
4569
      if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4570
        {
4571
          gcc_assert (!note);
4572
          return NULL_RTX;
4573
        }
4574
 
4575
      /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4576
         It serves no useful purpose and breaks eliminate_regs.  */
4577
      if (GET_CODE (datum) == ASM_OPERANDS)
4578
        return NULL_RTX;
4579
      break;
4580
 
4581
    default:
4582
      break;
4583
    }
4584
 
4585
  if (note)
4586
    {
4587
      XEXP (note, 0) = datum;
4588
      return note;
4589
    }
4590
 
4591
  REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
4592
  return REG_NOTES (insn);
4593
}
4594
 
4595
/* Return an indication of which type of insn should have X as a body.
4596
   The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN.  */
4597
 
4598
static enum rtx_code
4599
classify_insn (rtx x)
4600
{
4601
  if (LABEL_P (x))
4602
    return CODE_LABEL;
4603
  if (GET_CODE (x) == CALL)
4604
    return CALL_INSN;
4605
  if (GET_CODE (x) == RETURN)
4606
    return JUMP_INSN;
4607
  if (GET_CODE (x) == SET)
4608
    {
4609
      if (SET_DEST (x) == pc_rtx)
4610
        return JUMP_INSN;
4611
      else if (GET_CODE (SET_SRC (x)) == CALL)
4612
        return CALL_INSN;
4613
      else
4614
        return INSN;
4615
    }
4616
  if (GET_CODE (x) == PARALLEL)
4617
    {
4618
      int j;
4619
      for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4620
        if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4621
          return CALL_INSN;
4622
        else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4623
                 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4624
          return JUMP_INSN;
4625
        else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4626
                 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4627
          return CALL_INSN;
4628
    }
4629
  return INSN;
4630
}
4631
 
4632
/* Emit the rtl pattern X as an appropriate kind of insn.
4633
   If X is a label, it is simply added into the insn chain.  */
4634
 
4635
rtx
4636
emit (rtx x)
4637
{
4638
  enum rtx_code code = classify_insn (x);
4639
 
4640
  switch (code)
4641
    {
4642
    case CODE_LABEL:
4643
      return emit_label (x);
4644
    case INSN:
4645
      return emit_insn (x);
4646
    case  JUMP_INSN:
4647
      {
4648
        rtx insn = emit_jump_insn (x);
4649
        if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4650
          return emit_barrier ();
4651
        return insn;
4652
      }
4653
    case CALL_INSN:
4654
      return emit_call_insn (x);
4655
    default:
4656
      gcc_unreachable ();
4657
    }
4658
}
4659
 
4660
/* Space for free sequence stack entries.  */
4661
static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
4662
 
4663
/* Begin emitting insns to a sequence.  If this sequence will contain
4664
   something that might cause the compiler to pop arguments to function
4665
   calls (because those pops have previously been deferred; see
4666
   INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
4667
   before calling this function.  That will ensure that the deferred
4668
   pops are not accidentally emitted in the middle of this sequence.  */
4669
 
4670
void
4671
start_sequence (void)
4672
{
4673
  struct sequence_stack *tem;
4674
 
4675
  if (free_sequence_stack != NULL)
4676
    {
4677
      tem = free_sequence_stack;
4678
      free_sequence_stack = tem->next;
4679
    }
4680
  else
4681
    tem = ggc_alloc (sizeof (struct sequence_stack));
4682
 
4683
  tem->next = seq_stack;
4684
  tem->first = first_insn;
4685
  tem->last = last_insn;
4686
 
4687
  seq_stack = tem;
4688
 
4689
  first_insn = 0;
4690
  last_insn = 0;
4691
}
4692
 
4693
/* Set up the insn chain starting with FIRST as the current sequence,
4694
   saving the previously current one.  See the documentation for
4695
   start_sequence for more information about how to use this function.  */
4696
 
4697
void
4698
push_to_sequence (rtx first)
4699
{
4700
  rtx last;
4701
 
4702
  start_sequence ();
4703
 
4704
  for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
4705
 
4706
  first_insn = first;
4707
  last_insn = last;
4708
}
4709
 
4710
/* Set up the outer-level insn chain
4711
   as the current sequence, saving the previously current one.  */
4712
 
4713
void
4714
push_topmost_sequence (void)
4715
{
4716
  struct sequence_stack *stack, *top = NULL;
4717
 
4718
  start_sequence ();
4719
 
4720
  for (stack = seq_stack; stack; stack = stack->next)
4721
    top = stack;
4722
 
4723
  first_insn = top->first;
4724
  last_insn = top->last;
4725
}
4726
 
4727
/* After emitting to the outer-level insn chain, update the outer-level
4728
   insn chain, and restore the previous saved state.  */
4729
 
4730
void
4731
pop_topmost_sequence (void)
4732
{
4733
  struct sequence_stack *stack, *top = NULL;
4734
 
4735
  for (stack = seq_stack; stack; stack = stack->next)
4736
    top = stack;
4737
 
4738
  top->first = first_insn;
4739
  top->last = last_insn;
4740
 
4741
  end_sequence ();
4742
}
4743
 
4744
/* After emitting to a sequence, restore previous saved state.
4745
 
4746
   To get the contents of the sequence just made, you must call
4747
   `get_insns' *before* calling here.
4748
 
4749
   If the compiler might have deferred popping arguments while
4750
   generating this sequence, and this sequence will not be immediately
4751
   inserted into the instruction stream, use do_pending_stack_adjust
4752
   before calling get_insns.  That will ensure that the deferred
4753
   pops are inserted into this sequence, and not into some random
4754
   location in the instruction stream.  See INHIBIT_DEFER_POP for more
4755
   information about deferred popping of arguments.  */
4756
 
4757
void
4758
end_sequence (void)
4759
{
4760
  struct sequence_stack *tem = seq_stack;
4761
 
4762
  first_insn = tem->first;
4763
  last_insn = tem->last;
4764
  seq_stack = tem->next;
4765
 
4766
  memset (tem, 0, sizeof (*tem));
4767
  tem->next = free_sequence_stack;
4768
  free_sequence_stack = tem;
4769
}
4770
 
4771
/* Return 1 if currently emitting into a sequence.  */
4772
 
4773
int
4774
in_sequence_p (void)
4775
{
4776
  return seq_stack != 0;
4777
}
4778
 
4779
/* Put the various virtual registers into REGNO_REG_RTX.  */
4780
 
4781
static void
4782
init_virtual_regs (struct emit_status *es)
4783
{
4784
  rtx *ptr = es->x_regno_reg_rtx;
4785
  ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
4786
  ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
4787
  ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
4788
  ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
4789
  ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
4790
}
4791
 
4792
 
4793
/* Used by copy_insn_1 to avoid copying SCRATCHes more than once.  */
4794
static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
4795
static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
4796
static int copy_insn_n_scratches;
4797
 
4798
/* When an insn is being copied by copy_insn_1, this is nonzero if we have
4799
   copied an ASM_OPERANDS.
4800
   In that case, it is the original input-operand vector.  */
4801
static rtvec orig_asm_operands_vector;
4802
 
4803
/* When an insn is being copied by copy_insn_1, this is nonzero if we have
4804
   copied an ASM_OPERANDS.
4805
   In that case, it is the copied input-operand vector.  */
4806
static rtvec copy_asm_operands_vector;
4807
 
4808
/* Likewise for the constraints vector.  */
4809
static rtvec orig_asm_constraints_vector;
4810
static rtvec copy_asm_constraints_vector;
4811
 
4812
/* Recursively create a new copy of an rtx for copy_insn.
4813
   This function differs from copy_rtx in that it handles SCRATCHes and
4814
   ASM_OPERANDs properly.
4815
   Normally, this function is not used directly; use copy_insn as front end.
4816
   However, you could first copy an insn pattern with copy_insn and then use
4817
   this function afterwards to properly copy any REG_NOTEs containing
4818
   SCRATCHes.  */
4819
 
4820
rtx
4821
copy_insn_1 (rtx orig)
4822
{
4823
  rtx copy;
4824
  int i, j;
4825
  RTX_CODE code;
4826
  const char *format_ptr;
4827
 
4828
  code = GET_CODE (orig);
4829
 
4830
  switch (code)
4831
    {
4832
    case REG:
4833
    case CONST_INT:
4834
    case CONST_DOUBLE:
4835
    case CONST_VECTOR:
4836
    case SYMBOL_REF:
4837
    case CODE_LABEL:
4838
    case PC:
4839
    case CC0:
4840
      return orig;
4841
    case CLOBBER:
4842
      if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
4843
        return orig;
4844
      break;
4845
 
4846
    case SCRATCH:
4847
      for (i = 0; i < copy_insn_n_scratches; i++)
4848
        if (copy_insn_scratch_in[i] == orig)
4849
          return copy_insn_scratch_out[i];
4850
      break;
4851
 
4852
    case CONST:
4853
      /* CONST can be shared if it contains a SYMBOL_REF.  If it contains
4854
         a LABEL_REF, it isn't sharable.  */
4855
      if (GET_CODE (XEXP (orig, 0)) == PLUS
4856
          && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
4857
          && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
4858
        return orig;
4859
      break;
4860
 
4861
      /* A MEM with a constant address is not sharable.  The problem is that
4862
         the constant address may need to be reloaded.  If the mem is shared,
4863
         then reloading one copy of this mem will cause all copies to appear
4864
         to have been reloaded.  */
4865
 
4866
    default:
4867
      break;
4868
    }
4869
 
4870
  /* Copy the various flags, fields, and other information.  We assume
4871
     that all fields need copying, and then clear the fields that should
4872
     not be copied.  That is the sensible default behavior, and forces
4873
     us to explicitly document why we are *not* copying a flag.  */
4874
  copy = shallow_copy_rtx (orig);
4875
 
4876
  /* We do not copy the USED flag, which is used as a mark bit during
4877
     walks over the RTL.  */
4878
  RTX_FLAG (copy, used) = 0;
4879
 
4880
  /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs.  */
4881
  if (INSN_P (orig))
4882
    {
4883
      RTX_FLAG (copy, jump) = 0;
4884
      RTX_FLAG (copy, call) = 0;
4885
      RTX_FLAG (copy, frame_related) = 0;
4886
    }
4887
 
4888
  format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
4889
 
4890
  for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
4891
    switch (*format_ptr++)
4892
      {
4893
      case 'e':
4894
        if (XEXP (orig, i) != NULL)
4895
          XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
4896
        break;
4897
 
4898
      case 'E':
4899
      case 'V':
4900
        if (XVEC (orig, i) == orig_asm_constraints_vector)
4901
          XVEC (copy, i) = copy_asm_constraints_vector;
4902
        else if (XVEC (orig, i) == orig_asm_operands_vector)
4903
          XVEC (copy, i) = copy_asm_operands_vector;
4904
        else if (XVEC (orig, i) != NULL)
4905
          {
4906
            XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
4907
            for (j = 0; j < XVECLEN (copy, i); j++)
4908
              XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
4909
          }
4910
        break;
4911
 
4912
      case 't':
4913
      case 'w':
4914
      case 'i':
4915
      case 's':
4916
      case 'S':
4917
      case 'u':
4918
      case '0':
4919
        /* These are left unchanged.  */
4920
        break;
4921
 
4922
      default:
4923
        gcc_unreachable ();
4924
      }
4925
 
4926
  if (code == SCRATCH)
4927
    {
4928
      i = copy_insn_n_scratches++;
4929
      gcc_assert (i < MAX_RECOG_OPERANDS);
4930
      copy_insn_scratch_in[i] = orig;
4931
      copy_insn_scratch_out[i] = copy;
4932
    }
4933
  else if (code == ASM_OPERANDS)
4934
    {
4935
      orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
4936
      copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
4937
      orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
4938
      copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
4939
    }
4940
 
4941
  return copy;
4942
}
4943
 
4944
/* Create a new copy of an rtx.
4945
   This function differs from copy_rtx in that it handles SCRATCHes and
4946
   ASM_OPERANDs properly.
4947
   INSN doesn't really have to be a full INSN; it could be just the
4948
   pattern.  */
4949
rtx
4950
copy_insn (rtx insn)
4951
{
4952
  copy_insn_n_scratches = 0;
4953
  orig_asm_operands_vector = 0;
4954
  orig_asm_constraints_vector = 0;
4955
  copy_asm_operands_vector = 0;
4956
  copy_asm_constraints_vector = 0;
4957
  return copy_insn_1 (insn);
4958
}
4959
 
4960
/* Initialize data structures and variables in this file
4961
   before generating rtl for each function.  */
4962
 
4963
void
4964
init_emit (void)
4965
{
4966
  struct function *f = cfun;
4967
 
4968
  f->emit = ggc_alloc (sizeof (struct emit_status));
4969
  first_insn = NULL;
4970
  last_insn = NULL;
4971
  cur_insn_uid = 1;
4972
  reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
4973
  last_location = UNKNOWN_LOCATION;
4974
  first_label_num = label_num;
4975
  seq_stack = NULL;
4976
 
4977
  /* Init the tables that describe all the pseudo regs.  */
4978
 
4979
  f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
4980
 
4981
  f->emit->regno_pointer_align
4982
    = ggc_alloc_cleared (f->emit->regno_pointer_align_length
4983
                         * sizeof (unsigned char));
4984
 
4985
  regno_reg_rtx
4986
    = ggc_alloc (f->emit->regno_pointer_align_length * sizeof (rtx));
4987
 
4988
  /* Put copies of all the hard registers into regno_reg_rtx.  */
4989
  memcpy (regno_reg_rtx,
4990
          static_regno_reg_rtx,
4991
          FIRST_PSEUDO_REGISTER * sizeof (rtx));
4992
 
4993
  /* Put copies of all the virtual register rtx into regno_reg_rtx.  */
4994
  init_virtual_regs (f->emit);
4995
 
4996
  /* Indicate that the virtual registers and stack locations are
4997
     all pointers.  */
4998
  REG_POINTER (stack_pointer_rtx) = 1;
4999
  REG_POINTER (frame_pointer_rtx) = 1;
5000
  REG_POINTER (hard_frame_pointer_rtx) = 1;
5001
  REG_POINTER (arg_pointer_rtx) = 1;
5002
 
5003
  REG_POINTER (virtual_incoming_args_rtx) = 1;
5004
  REG_POINTER (virtual_stack_vars_rtx) = 1;
5005
  REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5006
  REG_POINTER (virtual_outgoing_args_rtx) = 1;
5007
  REG_POINTER (virtual_cfa_rtx) = 1;
5008
 
5009
#ifdef STACK_BOUNDARY
5010
  REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5011
  REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5012
  REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5013
  REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5014
 
5015
  REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5016
  REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5017
  REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5018
  REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5019
  REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5020
#endif
5021
 
5022
#ifdef INIT_EXPANDERS
5023
  INIT_EXPANDERS;
5024
#endif
5025
}
5026
 
5027
/* Generate a vector constant for mode MODE and constant value CONSTANT.  */
5028
 
5029
static rtx
5030
gen_const_vector (enum machine_mode mode, int constant)
5031
{
5032
  rtx tem;
5033
  rtvec v;
5034
  int units, i;
5035
  enum machine_mode inner;
5036
 
5037
  units = GET_MODE_NUNITS (mode);
5038
  inner = GET_MODE_INNER (mode);
5039
 
5040
  gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5041
 
5042
  v = rtvec_alloc (units);
5043
 
5044
  /* We need to call this function after we set the scalar const_tiny_rtx
5045
     entries.  */
5046
  gcc_assert (const_tiny_rtx[constant][(int) inner]);
5047
 
5048
  for (i = 0; i < units; ++i)
5049
    RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5050
 
5051
  tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5052
  return tem;
5053
}
5054
 
5055
/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5056
   all elements are zero, and the one vector when all elements are one.  */
5057
rtx
5058
gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5059
{
5060
  enum machine_mode inner = GET_MODE_INNER (mode);
5061
  int nunits = GET_MODE_NUNITS (mode);
5062
  rtx x;
5063
  int i;
5064
 
5065
  /* Check to see if all of the elements have the same value.  */
5066
  x = RTVEC_ELT (v, nunits - 1);
5067
  for (i = nunits - 2; i >= 0; i--)
5068
    if (RTVEC_ELT (v, i) != x)
5069
      break;
5070
 
5071
  /* If the values are all the same, check to see if we can use one of the
5072
     standard constant vectors.  */
5073
  if (i == -1)
5074
    {
5075
      if (x == CONST0_RTX (inner))
5076
        return CONST0_RTX (mode);
5077
      else if (x == CONST1_RTX (inner))
5078
        return CONST1_RTX (mode);
5079
    }
5080
 
5081
  return gen_rtx_raw_CONST_VECTOR (mode, v);
5082
}
5083
 
5084
/* Create some permanent unique rtl objects shared between all functions.
5085
   LINE_NUMBERS is nonzero if line numbers are to be generated.  */
5086
 
5087
void
5088
init_emit_once (int line_numbers)
5089
{
5090
  int i;
5091
  enum machine_mode mode;
5092
  enum machine_mode double_mode;
5093
 
5094
  /* We need reg_raw_mode, so initialize the modes now.  */
5095
  init_reg_modes_once ();
5096
 
5097
  /* Initialize the CONST_INT, CONST_DOUBLE, and memory attribute hash
5098
     tables.  */
5099
  const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5100
                                    const_int_htab_eq, NULL);
5101
 
5102
  const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5103
                                       const_double_htab_eq, NULL);
5104
 
5105
  mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5106
                                    mem_attrs_htab_eq, NULL);
5107
  reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5108
                                    reg_attrs_htab_eq, NULL);
5109
 
5110
  no_line_numbers = ! line_numbers;
5111
 
5112
  /* Compute the word and byte modes.  */
5113
 
5114
  byte_mode = VOIDmode;
5115
  word_mode = VOIDmode;
5116
  double_mode = VOIDmode;
5117
 
5118
  for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5119
       mode != VOIDmode;
5120
       mode = GET_MODE_WIDER_MODE (mode))
5121
    {
5122
      if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5123
          && byte_mode == VOIDmode)
5124
        byte_mode = mode;
5125
 
5126
      if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5127
          && word_mode == VOIDmode)
5128
        word_mode = mode;
5129
    }
5130
 
5131
  for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5132
       mode != VOIDmode;
5133
       mode = GET_MODE_WIDER_MODE (mode))
5134
    {
5135
      if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5136
          && double_mode == VOIDmode)
5137
        double_mode = mode;
5138
    }
5139
 
5140
  ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5141
 
5142
  /* Assign register numbers to the globally defined register rtx.
5143
     This must be done at runtime because the register number field
5144
     is in a union and some compilers can't initialize unions.  */
5145
 
5146
  pc_rtx = gen_rtx_PC (VOIDmode);
5147
  cc0_rtx = gen_rtx_CC0 (VOIDmode);
5148
  stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5149
  frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5150
  if (hard_frame_pointer_rtx == 0)
5151
    hard_frame_pointer_rtx = gen_raw_REG (Pmode,
5152
                                          HARD_FRAME_POINTER_REGNUM);
5153
  if (arg_pointer_rtx == 0)
5154
    arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5155
  virtual_incoming_args_rtx =
5156
    gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5157
  virtual_stack_vars_rtx =
5158
    gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5159
  virtual_stack_dynamic_rtx =
5160
    gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5161
  virtual_outgoing_args_rtx =
5162
    gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5163
  virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5164
 
5165
  /* Initialize RTL for commonly used hard registers.  These are
5166
     copied into regno_reg_rtx as we begin to compile each function.  */
5167
  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5168
    static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5169
 
5170
#ifdef INIT_EXPANDERS
5171
  /* This is to initialize {init|mark|free}_machine_status before the first
5172
     call to push_function_context_to.  This is needed by the Chill front
5173
     end which calls push_function_context_to before the first call to
5174
     init_function_start.  */
5175
  INIT_EXPANDERS;
5176
#endif
5177
 
5178
  /* Create the unique rtx's for certain rtx codes and operand values.  */
5179
 
5180
  /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5181
     tries to use these variables.  */
5182
  for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5183
    const_int_rtx[i + MAX_SAVED_CONST_INT] =
5184
      gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5185
 
5186
  if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5187
      && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5188
    const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5189
  else
5190
    const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5191
 
5192
  REAL_VALUE_FROM_INT (dconst0,   0,  0, double_mode);
5193
  REAL_VALUE_FROM_INT (dconst1,   1,  0, double_mode);
5194
  REAL_VALUE_FROM_INT (dconst2,   2,  0, double_mode);
5195
  REAL_VALUE_FROM_INT (dconst3,   3,  0, double_mode);
5196
  REAL_VALUE_FROM_INT (dconst10, 10,  0, double_mode);
5197
  REAL_VALUE_FROM_INT (dconstm1, -1, -1, double_mode);
5198
  REAL_VALUE_FROM_INT (dconstm2, -2, -1, double_mode);
5199
 
5200
  dconsthalf = dconst1;
5201
  SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5202
 
5203
  real_arithmetic (&dconstthird, RDIV_EXPR, &dconst1, &dconst3);
5204
 
5205
  /* Initialize mathematical constants for constant folding builtins.
5206
     These constants need to be given to at least 160 bits precision.  */
5207
  real_from_string (&dconstpi,
5208
    "3.1415926535897932384626433832795028841971693993751058209749445923078");
5209
  real_from_string (&dconste,
5210
    "2.7182818284590452353602874713526624977572470936999595749669676277241");
5211
 
5212
  for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
5213
    {
5214
      REAL_VALUE_TYPE *r =
5215
        (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5216
 
5217
      for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5218
           mode != VOIDmode;
5219
           mode = GET_MODE_WIDER_MODE (mode))
5220
        const_tiny_rtx[i][(int) mode] =
5221
          CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5222
 
5223
      for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5224
           mode != VOIDmode;
5225
           mode = GET_MODE_WIDER_MODE (mode))
5226
        const_tiny_rtx[i][(int) mode] =
5227
          CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5228
 
5229
      const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5230
 
5231
      for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5232
           mode != VOIDmode;
5233
           mode = GET_MODE_WIDER_MODE (mode))
5234
        const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5235
 
5236
      for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5237
           mode != VOIDmode;
5238
           mode = GET_MODE_WIDER_MODE (mode))
5239
        const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5240
    }
5241
 
5242
  for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5243
       mode != VOIDmode;
5244
       mode = GET_MODE_WIDER_MODE (mode))
5245
    {
5246
      const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5247
      const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5248
    }
5249
 
5250
  for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5251
       mode != VOIDmode;
5252
       mode = GET_MODE_WIDER_MODE (mode))
5253
    {
5254
      const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5255
      const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5256
    }
5257
 
5258
  for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5259
    if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5260
      const_tiny_rtx[0][i] = const0_rtx;
5261
 
5262
  const_tiny_rtx[0][(int) BImode] = const0_rtx;
5263
  if (STORE_FLAG_VALUE == 1)
5264
    const_tiny_rtx[1][(int) BImode] = const1_rtx;
5265
 
5266
#ifdef RETURN_ADDRESS_POINTER_REGNUM
5267
  return_address_pointer_rtx
5268
    = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5269
#endif
5270
 
5271
#ifdef STATIC_CHAIN_REGNUM
5272
  static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5273
 
5274
#ifdef STATIC_CHAIN_INCOMING_REGNUM
5275
  if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5276
    static_chain_incoming_rtx
5277
      = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5278
  else
5279
#endif
5280
    static_chain_incoming_rtx = static_chain_rtx;
5281
#endif
5282
 
5283
#ifdef STATIC_CHAIN
5284
  static_chain_rtx = STATIC_CHAIN;
5285
 
5286
#ifdef STATIC_CHAIN_INCOMING
5287
  static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5288
#else
5289
  static_chain_incoming_rtx = static_chain_rtx;
5290
#endif
5291
#endif
5292
 
5293
  if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5294
    pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5295
}
5296
 
5297
/* Produce exact duplicate of insn INSN after AFTER.
5298
   Care updating of libcall regions if present.  */
5299
 
5300
rtx
5301
emit_copy_of_insn_after (rtx insn, rtx after)
5302
{
5303
  rtx new;
5304
  rtx note1, note2, link;
5305
 
5306
  switch (GET_CODE (insn))
5307
    {
5308
    case INSN:
5309
      new = emit_insn_after (copy_insn (PATTERN (insn)), after);
5310
      break;
5311
 
5312
    case JUMP_INSN:
5313
      new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5314
      break;
5315
 
5316
    case CALL_INSN:
5317
      new = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5318
      if (CALL_INSN_FUNCTION_USAGE (insn))
5319
        CALL_INSN_FUNCTION_USAGE (new)
5320
          = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5321
      SIBLING_CALL_P (new) = SIBLING_CALL_P (insn);
5322
      CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn);
5323
      break;
5324
 
5325
    default:
5326
      gcc_unreachable ();
5327
    }
5328
 
5329
  /* Update LABEL_NUSES.  */
5330
  mark_jump_label (PATTERN (new), new, 0);
5331
 
5332
  INSN_LOCATOR (new) = INSN_LOCATOR (insn);
5333
 
5334
  /* If the old insn is frame related, then so is the new one.  This is
5335
     primarily needed for IA-64 unwind info which marks epilogue insns,
5336
     which may be duplicated by the basic block reordering code.  */
5337
  RTX_FRAME_RELATED_P (new) = RTX_FRAME_RELATED_P (insn);
5338
 
5339
  /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
5340
     make them.  */
5341
  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5342
    if (REG_NOTE_KIND (link) != REG_LABEL)
5343
      {
5344
        if (GET_CODE (link) == EXPR_LIST)
5345
          REG_NOTES (new)
5346
            = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
5347
                                              XEXP (link, 0),
5348
                                              REG_NOTES (new)));
5349
        else
5350
          REG_NOTES (new)
5351
            = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
5352
                                              XEXP (link, 0),
5353
                                              REG_NOTES (new)));
5354
      }
5355
 
5356
  /* Fix the libcall sequences.  */
5357
  if ((note1 = find_reg_note (new, REG_RETVAL, NULL_RTX)) != NULL)
5358
    {
5359
      rtx p = new;
5360
      while ((note2 = find_reg_note (p, REG_LIBCALL, NULL_RTX)) == NULL)
5361
        p = PREV_INSN (p);
5362
      XEXP (note1, 0) = p;
5363
      XEXP (note2, 0) = new;
5364
    }
5365
  INSN_CODE (new) = INSN_CODE (insn);
5366
  return new;
5367
}
5368
 
5369
static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
5370
rtx
5371
gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5372
{
5373
  if (hard_reg_clobbers[mode][regno])
5374
    return hard_reg_clobbers[mode][regno];
5375
  else
5376
    return (hard_reg_clobbers[mode][regno] =
5377
            gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5378
}
5379
 
5380
#include "gt-emit-rtl.h"

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.