| 1 |
280 |
jeremybenn |
/* Emit RTL for the GCC expander.
|
| 2 |
|
|
Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
|
| 3 |
|
|
1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
|
| 4 |
|
|
Free Software Foundation, Inc.
|
| 5 |
|
|
|
| 6 |
|
|
This file is part of GCC.
|
| 7 |
|
|
|
| 8 |
|
|
GCC is free software; you can redistribute it and/or modify it under
|
| 9 |
|
|
the terms of the GNU General Public License as published by the Free
|
| 10 |
|
|
Software Foundation; either version 3, or (at your option) any later
|
| 11 |
|
|
version.
|
| 12 |
|
|
|
| 13 |
|
|
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
|
| 14 |
|
|
WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
| 15 |
|
|
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
| 16 |
|
|
for more details.
|
| 17 |
|
|
|
| 18 |
|
|
You should have received a copy of the GNU General Public License
|
| 19 |
|
|
along with GCC; see the file COPYING3. If not see
|
| 20 |
|
|
<http://www.gnu.org/licenses/>. */
|
| 21 |
|
|
|
| 22 |
|
|
|
| 23 |
|
|
/* Middle-to-low level generation of rtx code and insns.
|
| 24 |
|
|
|
| 25 |
|
|
This file contains support functions for creating rtl expressions
|
| 26 |
|
|
and manipulating them in the doubly-linked chain of insns.
|
| 27 |
|
|
|
| 28 |
|
|
The patterns of the insns are created by machine-dependent
|
| 29 |
|
|
routines in insn-emit.c, which is generated automatically from
|
| 30 |
|
|
the machine description. These routines make the individual rtx's
|
| 31 |
|
|
of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
|
| 32 |
|
|
which are automatically generated from rtl.def; what is machine
|
| 33 |
|
|
dependent is the kind of rtx's they make and what arguments they
|
| 34 |
|
|
use. */
|
| 35 |
|
|
|
| 36 |
|
|
#include "config.h"
|
| 37 |
|
|
#include "system.h"
|
| 38 |
|
|
#include "coretypes.h"
|
| 39 |
|
|
#include "tm.h"
|
| 40 |
|
|
#include "toplev.h"
|
| 41 |
|
|
#include "rtl.h"
|
| 42 |
|
|
#include "tree.h"
|
| 43 |
|
|
#include "tm_p.h"
|
| 44 |
|
|
#include "flags.h"
|
| 45 |
|
|
#include "function.h"
|
| 46 |
|
|
#include "expr.h"
|
| 47 |
|
|
#include "regs.h"
|
| 48 |
|
|
#include "hard-reg-set.h"
|
| 49 |
|
|
#include "hashtab.h"
|
| 50 |
|
|
#include "insn-config.h"
|
| 51 |
|
|
#include "recog.h"
|
| 52 |
|
|
#include "real.h"
|
| 53 |
|
|
#include "fixed-value.h"
|
| 54 |
|
|
#include "bitmap.h"
|
| 55 |
|
|
#include "basic-block.h"
|
| 56 |
|
|
#include "ggc.h"
|
| 57 |
|
|
#include "debug.h"
|
| 58 |
|
|
#include "langhooks.h"
|
| 59 |
|
|
#include "tree-pass.h"
|
| 60 |
|
|
#include "df.h"
|
| 61 |
|
|
#include "params.h"
|
| 62 |
|
|
#include "target.h"
|
| 63 |
|
|
|
| 64 |
|
|
/* Commonly used modes. */
|
| 65 |
|
|
|
| 66 |
|
|
enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
|
| 67 |
|
|
enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
|
| 68 |
|
|
enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
|
| 69 |
|
|
enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
|
| 70 |
|
|
|
| 71 |
|
|
/* Datastructures maintained for currently processed function in RTL form. */
|
| 72 |
|
|
|
| 73 |
|
|
struct rtl_data x_rtl;
|
| 74 |
|
|
|
| 75 |
|
|
/* Indexed by pseudo register number, gives the rtx for that pseudo.
|
| 76 |
|
|
Allocated in parallel with regno_pointer_align.
|
| 77 |
|
|
FIXME: We could put it into emit_status struct, but gengtype is not able to deal
|
| 78 |
|
|
with length attribute nested in top level structures. */
|
| 79 |
|
|
|
| 80 |
|
|
rtx * regno_reg_rtx;
|
| 81 |
|
|
|
| 82 |
|
|
/* This is *not* reset after each function. It gives each CODE_LABEL
|
| 83 |
|
|
in the entire compilation a unique label number. */
|
| 84 |
|
|
|
| 85 |
|
|
static GTY(()) int label_num = 1;
|
| 86 |
|
|
|
| 87 |
|
|
/* Commonly used rtx's, so that we only need space for one copy.
|
| 88 |
|
|
These are initialized once for the entire compilation.
|
| 89 |
|
|
All of these are unique; no other rtx-object will be equal to any
|
| 90 |
|
|
of these. */
|
| 91 |
|
|
|
| 92 |
|
|
rtx global_rtl[GR_MAX];
|
| 93 |
|
|
|
| 94 |
|
|
/* Commonly used RTL for hard registers. These objects are not necessarily
|
| 95 |
|
|
unique, so we allocate them separately from global_rtl. They are
|
| 96 |
|
|
initialized once per compilation unit, then copied into regno_reg_rtx
|
| 97 |
|
|
at the beginning of each function. */
|
| 98 |
|
|
static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
|
| 99 |
|
|
|
| 100 |
|
|
/* We record floating-point CONST_DOUBLEs in each floating-point mode for
|
| 101 |
|
|
the values of 0, 1, and 2. For the integer entries and VOIDmode, we
|
| 102 |
|
|
record a copy of const[012]_rtx. */
|
| 103 |
|
|
|
| 104 |
|
|
rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
|
| 105 |
|
|
|
| 106 |
|
|
rtx const_true_rtx;
|
| 107 |
|
|
|
| 108 |
|
|
REAL_VALUE_TYPE dconst0;
|
| 109 |
|
|
REAL_VALUE_TYPE dconst1;
|
| 110 |
|
|
REAL_VALUE_TYPE dconst2;
|
| 111 |
|
|
REAL_VALUE_TYPE dconstm1;
|
| 112 |
|
|
REAL_VALUE_TYPE dconsthalf;
|
| 113 |
|
|
|
| 114 |
|
|
/* Record fixed-point constant 0 and 1. */
|
| 115 |
|
|
FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
|
| 116 |
|
|
FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
|
| 117 |
|
|
|
| 118 |
|
|
/* All references to the following fixed hard registers go through
|
| 119 |
|
|
these unique rtl objects. On machines where the frame-pointer and
|
| 120 |
|
|
arg-pointer are the same register, they use the same unique object.
|
| 121 |
|
|
|
| 122 |
|
|
After register allocation, other rtl objects which used to be pseudo-regs
|
| 123 |
|
|
may be clobbered to refer to the frame-pointer register.
|
| 124 |
|
|
But references that were originally to the frame-pointer can be
|
| 125 |
|
|
distinguished from the others because they contain frame_pointer_rtx.
|
| 126 |
|
|
|
| 127 |
|
|
When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
|
| 128 |
|
|
tricky: until register elimination has taken place hard_frame_pointer_rtx
|
| 129 |
|
|
should be used if it is being set, and frame_pointer_rtx otherwise. After
|
| 130 |
|
|
register elimination hard_frame_pointer_rtx should always be used.
|
| 131 |
|
|
On machines where the two registers are same (most) then these are the
|
| 132 |
|
|
same.
|
| 133 |
|
|
|
| 134 |
|
|
In an inline procedure, the stack and frame pointer rtxs may not be
|
| 135 |
|
|
used for anything else. */
|
| 136 |
|
|
rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
|
| 137 |
|
|
|
| 138 |
|
|
/* This is used to implement __builtin_return_address for some machines.
|
| 139 |
|
|
See for instance the MIPS port. */
|
| 140 |
|
|
rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
|
| 141 |
|
|
|
| 142 |
|
|
/* We make one copy of (const_int C) where C is in
|
| 143 |
|
|
[- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
|
| 144 |
|
|
to save space during the compilation and simplify comparisons of
|
| 145 |
|
|
integers. */
|
| 146 |
|
|
|
| 147 |
|
|
rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
|
| 148 |
|
|
|
| 149 |
|
|
/* A hash table storing CONST_INTs whose absolute value is greater
|
| 150 |
|
|
than MAX_SAVED_CONST_INT. */
|
| 151 |
|
|
|
| 152 |
|
|
static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
|
| 153 |
|
|
htab_t const_int_htab;
|
| 154 |
|
|
|
| 155 |
|
|
/* A hash table storing memory attribute structures. */
|
| 156 |
|
|
static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
|
| 157 |
|
|
htab_t mem_attrs_htab;
|
| 158 |
|
|
|
| 159 |
|
|
/* A hash table storing register attribute structures. */
|
| 160 |
|
|
static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
|
| 161 |
|
|
htab_t reg_attrs_htab;
|
| 162 |
|
|
|
| 163 |
|
|
/* A hash table storing all CONST_DOUBLEs. */
|
| 164 |
|
|
static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
|
| 165 |
|
|
htab_t const_double_htab;
|
| 166 |
|
|
|
| 167 |
|
|
/* A hash table storing all CONST_FIXEDs. */
|
| 168 |
|
|
static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
|
| 169 |
|
|
htab_t const_fixed_htab;
|
| 170 |
|
|
|
| 171 |
|
|
#define first_insn (crtl->emit.x_first_insn)
|
| 172 |
|
|
#define last_insn (crtl->emit.x_last_insn)
|
| 173 |
|
|
#define cur_insn_uid (crtl->emit.x_cur_insn_uid)
|
| 174 |
|
|
#define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
|
| 175 |
|
|
#define last_location (crtl->emit.x_last_location)
|
| 176 |
|
|
#define first_label_num (crtl->emit.x_first_label_num)
|
| 177 |
|
|
|
| 178 |
|
|
static rtx make_call_insn_raw (rtx);
|
| 179 |
|
|
static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
|
| 180 |
|
|
static void set_used_decls (tree);
|
| 181 |
|
|
static void mark_label_nuses (rtx);
|
| 182 |
|
|
static hashval_t const_int_htab_hash (const void *);
|
| 183 |
|
|
static int const_int_htab_eq (const void *, const void *);
|
| 184 |
|
|
static hashval_t const_double_htab_hash (const void *);
|
| 185 |
|
|
static int const_double_htab_eq (const void *, const void *);
|
| 186 |
|
|
static rtx lookup_const_double (rtx);
|
| 187 |
|
|
static hashval_t const_fixed_htab_hash (const void *);
|
| 188 |
|
|
static int const_fixed_htab_eq (const void *, const void *);
|
| 189 |
|
|
static rtx lookup_const_fixed (rtx);
|
| 190 |
|
|
static hashval_t mem_attrs_htab_hash (const void *);
|
| 191 |
|
|
static int mem_attrs_htab_eq (const void *, const void *);
|
| 192 |
|
|
static mem_attrs *get_mem_attrs (alias_set_type, tree, rtx, rtx, unsigned int,
|
| 193 |
|
|
addr_space_t, enum machine_mode);
|
| 194 |
|
|
static hashval_t reg_attrs_htab_hash (const void *);
|
| 195 |
|
|
static int reg_attrs_htab_eq (const void *, const void *);
|
| 196 |
|
|
static reg_attrs *get_reg_attrs (tree, int);
|
| 197 |
|
|
static rtx gen_const_vector (enum machine_mode, int);
|
| 198 |
|
|
static void copy_rtx_if_shared_1 (rtx *orig);
|
| 199 |
|
|
|
| 200 |
|
|
/* Probability of the conditional branch currently proceeded by try_split.
|
| 201 |
|
|
Set to -1 otherwise. */
|
| 202 |
|
|
int split_branch_probability = -1;
|
| 203 |
|
|
|
| 204 |
|
|
/* Returns a hash code for X (which is a really a CONST_INT). */
|
| 205 |
|
|
|
| 206 |
|
|
static hashval_t
|
| 207 |
|
|
const_int_htab_hash (const void *x)
|
| 208 |
|
|
{
|
| 209 |
|
|
return (hashval_t) INTVAL ((const_rtx) x);
|
| 210 |
|
|
}
|
| 211 |
|
|
|
| 212 |
|
|
/* Returns nonzero if the value represented by X (which is really a
|
| 213 |
|
|
CONST_INT) is the same as that given by Y (which is really a
|
| 214 |
|
|
HOST_WIDE_INT *). */
|
| 215 |
|
|
|
| 216 |
|
|
static int
|
| 217 |
|
|
const_int_htab_eq (const void *x, const void *y)
|
| 218 |
|
|
{
|
| 219 |
|
|
return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
|
| 220 |
|
|
}
|
| 221 |
|
|
|
| 222 |
|
|
/* Returns a hash code for X (which is really a CONST_DOUBLE). */
|
| 223 |
|
|
static hashval_t
|
| 224 |
|
|
const_double_htab_hash (const void *x)
|
| 225 |
|
|
{
|
| 226 |
|
|
const_rtx const value = (const_rtx) x;
|
| 227 |
|
|
hashval_t h;
|
| 228 |
|
|
|
| 229 |
|
|
if (GET_MODE (value) == VOIDmode)
|
| 230 |
|
|
h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
|
| 231 |
|
|
else
|
| 232 |
|
|
{
|
| 233 |
|
|
h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
|
| 234 |
|
|
/* MODE is used in the comparison, so it should be in the hash. */
|
| 235 |
|
|
h ^= GET_MODE (value);
|
| 236 |
|
|
}
|
| 237 |
|
|
return h;
|
| 238 |
|
|
}
|
| 239 |
|
|
|
| 240 |
|
|
/* Returns nonzero if the value represented by X (really a ...)
|
| 241 |
|
|
is the same as that represented by Y (really a ...) */
|
| 242 |
|
|
static int
|
| 243 |
|
|
const_double_htab_eq (const void *x, const void *y)
|
| 244 |
|
|
{
|
| 245 |
|
|
const_rtx const a = (const_rtx)x, b = (const_rtx)y;
|
| 246 |
|
|
|
| 247 |
|
|
if (GET_MODE (a) != GET_MODE (b))
|
| 248 |
|
|
return 0;
|
| 249 |
|
|
if (GET_MODE (a) == VOIDmode)
|
| 250 |
|
|
return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
|
| 251 |
|
|
&& CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
|
| 252 |
|
|
else
|
| 253 |
|
|
return real_identical (CONST_DOUBLE_REAL_VALUE (a),
|
| 254 |
|
|
CONST_DOUBLE_REAL_VALUE (b));
|
| 255 |
|
|
}
|
| 256 |
|
|
|
| 257 |
|
|
/* Returns a hash code for X (which is really a CONST_FIXED). */
|
| 258 |
|
|
|
| 259 |
|
|
static hashval_t
|
| 260 |
|
|
const_fixed_htab_hash (const void *x)
|
| 261 |
|
|
{
|
| 262 |
|
|
const_rtx const value = (const_rtx) x;
|
| 263 |
|
|
hashval_t h;
|
| 264 |
|
|
|
| 265 |
|
|
h = fixed_hash (CONST_FIXED_VALUE (value));
|
| 266 |
|
|
/* MODE is used in the comparison, so it should be in the hash. */
|
| 267 |
|
|
h ^= GET_MODE (value);
|
| 268 |
|
|
return h;
|
| 269 |
|
|
}
|
| 270 |
|
|
|
| 271 |
|
|
/* Returns nonzero if the value represented by X (really a ...)
|
| 272 |
|
|
is the same as that represented by Y (really a ...). */
|
| 273 |
|
|
|
| 274 |
|
|
static int
|
| 275 |
|
|
const_fixed_htab_eq (const void *x, const void *y)
|
| 276 |
|
|
{
|
| 277 |
|
|
const_rtx const a = (const_rtx) x, b = (const_rtx) y;
|
| 278 |
|
|
|
| 279 |
|
|
if (GET_MODE (a) != GET_MODE (b))
|
| 280 |
|
|
return 0;
|
| 281 |
|
|
return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
|
| 282 |
|
|
}
|
| 283 |
|
|
|
| 284 |
|
|
/* Returns a hash code for X (which is a really a mem_attrs *). */
|
| 285 |
|
|
|
| 286 |
|
|
static hashval_t
|
| 287 |
|
|
mem_attrs_htab_hash (const void *x)
|
| 288 |
|
|
{
|
| 289 |
|
|
const mem_attrs *const p = (const mem_attrs *) x;
|
| 290 |
|
|
|
| 291 |
|
|
return (p->alias ^ (p->align * 1000)
|
| 292 |
|
|
^ (p->addrspace * 4000)
|
| 293 |
|
|
^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
|
| 294 |
|
|
^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
|
| 295 |
|
|
^ (size_t) iterative_hash_expr (p->expr, 0));
|
| 296 |
|
|
}
|
| 297 |
|
|
|
| 298 |
|
|
/* Returns nonzero if the value represented by X (which is really a
|
| 299 |
|
|
mem_attrs *) is the same as that given by Y (which is also really a
|
| 300 |
|
|
mem_attrs *). */
|
| 301 |
|
|
|
| 302 |
|
|
static int
|
| 303 |
|
|
mem_attrs_htab_eq (const void *x, const void *y)
|
| 304 |
|
|
{
|
| 305 |
|
|
const mem_attrs *const p = (const mem_attrs *) x;
|
| 306 |
|
|
const mem_attrs *const q = (const mem_attrs *) y;
|
| 307 |
|
|
|
| 308 |
|
|
return (p->alias == q->alias && p->offset == q->offset
|
| 309 |
|
|
&& p->size == q->size && p->align == q->align
|
| 310 |
|
|
&& p->addrspace == q->addrspace
|
| 311 |
|
|
&& (p->expr == q->expr
|
| 312 |
|
|
|| (p->expr != NULL_TREE && q->expr != NULL_TREE
|
| 313 |
|
|
&& operand_equal_p (p->expr, q->expr, 0))));
|
| 314 |
|
|
}
|
| 315 |
|
|
|
| 316 |
|
|
/* Allocate a new mem_attrs structure and insert it into the hash table if
|
| 317 |
|
|
one identical to it is not already in the table. We are doing this for
|
| 318 |
|
|
MEM of mode MODE. */
|
| 319 |
|
|
|
| 320 |
|
|
static mem_attrs *
|
| 321 |
|
|
get_mem_attrs (alias_set_type alias, tree expr, rtx offset, rtx size,
|
| 322 |
|
|
unsigned int align, addr_space_t addrspace, enum machine_mode mode)
|
| 323 |
|
|
{
|
| 324 |
|
|
mem_attrs attrs;
|
| 325 |
|
|
void **slot;
|
| 326 |
|
|
|
| 327 |
|
|
/* If everything is the default, we can just return zero.
|
| 328 |
|
|
This must match what the corresponding MEM_* macros return when the
|
| 329 |
|
|
field is not present. */
|
| 330 |
|
|
if (alias == 0 && expr == 0 && offset == 0 && addrspace == 0
|
| 331 |
|
|
&& (size == 0
|
| 332 |
|
|
|| (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
|
| 333 |
|
|
&& (STRICT_ALIGNMENT && mode != BLKmode
|
| 334 |
|
|
? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
|
| 335 |
|
|
return 0;
|
| 336 |
|
|
|
| 337 |
|
|
attrs.alias = alias;
|
| 338 |
|
|
attrs.expr = expr;
|
| 339 |
|
|
attrs.offset = offset;
|
| 340 |
|
|
attrs.size = size;
|
| 341 |
|
|
attrs.align = align;
|
| 342 |
|
|
attrs.addrspace = addrspace;
|
| 343 |
|
|
|
| 344 |
|
|
slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
|
| 345 |
|
|
if (*slot == 0)
|
| 346 |
|
|
{
|
| 347 |
|
|
*slot = ggc_alloc (sizeof (mem_attrs));
|
| 348 |
|
|
memcpy (*slot, &attrs, sizeof (mem_attrs));
|
| 349 |
|
|
}
|
| 350 |
|
|
|
| 351 |
|
|
return (mem_attrs *) *slot;
|
| 352 |
|
|
}
|
| 353 |
|
|
|
| 354 |
|
|
/* Returns a hash code for X (which is a really a reg_attrs *). */
|
| 355 |
|
|
|
| 356 |
|
|
static hashval_t
|
| 357 |
|
|
reg_attrs_htab_hash (const void *x)
|
| 358 |
|
|
{
|
| 359 |
|
|
const reg_attrs *const p = (const reg_attrs *) x;
|
| 360 |
|
|
|
| 361 |
|
|
return ((p->offset * 1000) ^ (long) p->decl);
|
| 362 |
|
|
}
|
| 363 |
|
|
|
| 364 |
|
|
/* Returns nonzero if the value represented by X (which is really a
|
| 365 |
|
|
reg_attrs *) is the same as that given by Y (which is also really a
|
| 366 |
|
|
reg_attrs *). */
|
| 367 |
|
|
|
| 368 |
|
|
static int
|
| 369 |
|
|
reg_attrs_htab_eq (const void *x, const void *y)
|
| 370 |
|
|
{
|
| 371 |
|
|
const reg_attrs *const p = (const reg_attrs *) x;
|
| 372 |
|
|
const reg_attrs *const q = (const reg_attrs *) y;
|
| 373 |
|
|
|
| 374 |
|
|
return (p->decl == q->decl && p->offset == q->offset);
|
| 375 |
|
|
}
|
| 376 |
|
|
/* Allocate a new reg_attrs structure and insert it into the hash table if
|
| 377 |
|
|
one identical to it is not already in the table. We are doing this for
|
| 378 |
|
|
MEM of mode MODE. */
|
| 379 |
|
|
|
| 380 |
|
|
static reg_attrs *
|
| 381 |
|
|
get_reg_attrs (tree decl, int offset)
|
| 382 |
|
|
{
|
| 383 |
|
|
reg_attrs attrs;
|
| 384 |
|
|
void **slot;
|
| 385 |
|
|
|
| 386 |
|
|
/* If everything is the default, we can just return zero. */
|
| 387 |
|
|
if (decl == 0 && offset == 0)
|
| 388 |
|
|
return 0;
|
| 389 |
|
|
|
| 390 |
|
|
attrs.decl = decl;
|
| 391 |
|
|
attrs.offset = offset;
|
| 392 |
|
|
|
| 393 |
|
|
slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
|
| 394 |
|
|
if (*slot == 0)
|
| 395 |
|
|
{
|
| 396 |
|
|
*slot = ggc_alloc (sizeof (reg_attrs));
|
| 397 |
|
|
memcpy (*slot, &attrs, sizeof (reg_attrs));
|
| 398 |
|
|
}
|
| 399 |
|
|
|
| 400 |
|
|
return (reg_attrs *) *slot;
|
| 401 |
|
|
}
|
| 402 |
|
|
|
| 403 |
|
|
|
| 404 |
|
|
#if !HAVE_blockage
|
| 405 |
|
|
/* Generate an empty ASM_INPUT, which is used to block attempts to schedule
|
| 406 |
|
|
across this insn. */
|
| 407 |
|
|
|
| 408 |
|
|
rtx
|
| 409 |
|
|
gen_blockage (void)
|
| 410 |
|
|
{
|
| 411 |
|
|
rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
|
| 412 |
|
|
MEM_VOLATILE_P (x) = true;
|
| 413 |
|
|
return x;
|
| 414 |
|
|
}
|
| 415 |
|
|
#endif
|
| 416 |
|
|
|
| 417 |
|
|
|
| 418 |
|
|
/* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
|
| 419 |
|
|
don't attempt to share with the various global pieces of rtl (such as
|
| 420 |
|
|
frame_pointer_rtx). */
|
| 421 |
|
|
|
| 422 |
|
|
rtx
|
| 423 |
|
|
gen_raw_REG (enum machine_mode mode, int regno)
|
| 424 |
|
|
{
|
| 425 |
|
|
rtx x = gen_rtx_raw_REG (mode, regno);
|
| 426 |
|
|
ORIGINAL_REGNO (x) = regno;
|
| 427 |
|
|
return x;
|
| 428 |
|
|
}
|
| 429 |
|
|
|
| 430 |
|
|
/* There are some RTL codes that require special attention; the generation
|
| 431 |
|
|
functions do the raw handling. If you add to this list, modify
|
| 432 |
|
|
special_rtx in gengenrtl.c as well. */
|
| 433 |
|
|
|
| 434 |
|
|
rtx
|
| 435 |
|
|
gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
|
| 436 |
|
|
{
|
| 437 |
|
|
void **slot;
|
| 438 |
|
|
|
| 439 |
|
|
if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
|
| 440 |
|
|
return const_int_rtx[arg + MAX_SAVED_CONST_INT];
|
| 441 |
|
|
|
| 442 |
|
|
#if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
|
| 443 |
|
|
if (const_true_rtx && arg == STORE_FLAG_VALUE)
|
| 444 |
|
|
return const_true_rtx;
|
| 445 |
|
|
#endif
|
| 446 |
|
|
|
| 447 |
|
|
/* Look up the CONST_INT in the hash table. */
|
| 448 |
|
|
slot = htab_find_slot_with_hash (const_int_htab, &arg,
|
| 449 |
|
|
(hashval_t) arg, INSERT);
|
| 450 |
|
|
if (*slot == 0)
|
| 451 |
|
|
*slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
|
| 452 |
|
|
|
| 453 |
|
|
return (rtx) *slot;
|
| 454 |
|
|
}
|
| 455 |
|
|
|
| 456 |
|
|
rtx
|
| 457 |
|
|
gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
|
| 458 |
|
|
{
|
| 459 |
|
|
return GEN_INT (trunc_int_for_mode (c, mode));
|
| 460 |
|
|
}
|
| 461 |
|
|
|
| 462 |
|
|
/* CONST_DOUBLEs might be created from pairs of integers, or from
|
| 463 |
|
|
REAL_VALUE_TYPEs. Also, their length is known only at run time,
|
| 464 |
|
|
so we cannot use gen_rtx_raw_CONST_DOUBLE. */
|
| 465 |
|
|
|
| 466 |
|
|
/* Determine whether REAL, a CONST_DOUBLE, already exists in the
|
| 467 |
|
|
hash table. If so, return its counterpart; otherwise add it
|
| 468 |
|
|
to the hash table and return it. */
|
| 469 |
|
|
static rtx
|
| 470 |
|
|
lookup_const_double (rtx real)
|
| 471 |
|
|
{
|
| 472 |
|
|
void **slot = htab_find_slot (const_double_htab, real, INSERT);
|
| 473 |
|
|
if (*slot == 0)
|
| 474 |
|
|
*slot = real;
|
| 475 |
|
|
|
| 476 |
|
|
return (rtx) *slot;
|
| 477 |
|
|
}
|
| 478 |
|
|
|
| 479 |
|
|
/* Return a CONST_DOUBLE rtx for a floating-point value specified by
|
| 480 |
|
|
VALUE in mode MODE. */
|
| 481 |
|
|
rtx
|
| 482 |
|
|
const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
|
| 483 |
|
|
{
|
| 484 |
|
|
rtx real = rtx_alloc (CONST_DOUBLE);
|
| 485 |
|
|
PUT_MODE (real, mode);
|
| 486 |
|
|
|
| 487 |
|
|
real->u.rv = value;
|
| 488 |
|
|
|
| 489 |
|
|
return lookup_const_double (real);
|
| 490 |
|
|
}
|
| 491 |
|
|
|
| 492 |
|
|
/* Determine whether FIXED, a CONST_FIXED, already exists in the
|
| 493 |
|
|
hash table. If so, return its counterpart; otherwise add it
|
| 494 |
|
|
to the hash table and return it. */
|
| 495 |
|
|
|
| 496 |
|
|
static rtx
|
| 497 |
|
|
lookup_const_fixed (rtx fixed)
|
| 498 |
|
|
{
|
| 499 |
|
|
void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
|
| 500 |
|
|
if (*slot == 0)
|
| 501 |
|
|
*slot = fixed;
|
| 502 |
|
|
|
| 503 |
|
|
return (rtx) *slot;
|
| 504 |
|
|
}
|
| 505 |
|
|
|
| 506 |
|
|
/* Return a CONST_FIXED rtx for a fixed-point value specified by
|
| 507 |
|
|
VALUE in mode MODE. */
|
| 508 |
|
|
|
| 509 |
|
|
rtx
|
| 510 |
|
|
const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
|
| 511 |
|
|
{
|
| 512 |
|
|
rtx fixed = rtx_alloc (CONST_FIXED);
|
| 513 |
|
|
PUT_MODE (fixed, mode);
|
| 514 |
|
|
|
| 515 |
|
|
fixed->u.fv = value;
|
| 516 |
|
|
|
| 517 |
|
|
return lookup_const_fixed (fixed);
|
| 518 |
|
|
}
|
| 519 |
|
|
|
| 520 |
|
|
/* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
|
| 521 |
|
|
of ints: I0 is the low-order word and I1 is the high-order word.
|
| 522 |
|
|
Do not use this routine for non-integer modes; convert to
|
| 523 |
|
|
REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
|
| 524 |
|
|
|
| 525 |
|
|
rtx
|
| 526 |
|
|
immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
|
| 527 |
|
|
{
|
| 528 |
|
|
rtx value;
|
| 529 |
|
|
unsigned int i;
|
| 530 |
|
|
|
| 531 |
|
|
/* There are the following cases (note that there are no modes with
|
| 532 |
|
|
HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < 2 * HOST_BITS_PER_WIDE_INT):
|
| 533 |
|
|
|
| 534 |
|
|
1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
|
| 535 |
|
|
gen_int_mode.
|
| 536 |
|
|
2) GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT, but the value of
|
| 537 |
|
|
the integer fits into HOST_WIDE_INT anyway (i.e., i1 consists only
|
| 538 |
|
|
from copies of the sign bit, and sign of i0 and i1 are the same), then
|
| 539 |
|
|
we return a CONST_INT for i0.
|
| 540 |
|
|
3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
|
| 541 |
|
|
if (mode != VOIDmode)
|
| 542 |
|
|
{
|
| 543 |
|
|
gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
|
| 544 |
|
|
|| GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
|
| 545 |
|
|
/* We can get a 0 for an error mark. */
|
| 546 |
|
|
|| GET_MODE_CLASS (mode) == MODE_VECTOR_INT
|
| 547 |
|
|
|| GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
|
| 548 |
|
|
|
| 549 |
|
|
if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
|
| 550 |
|
|
return gen_int_mode (i0, mode);
|
| 551 |
|
|
|
| 552 |
|
|
gcc_assert (GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT);
|
| 553 |
|
|
}
|
| 554 |
|
|
|
| 555 |
|
|
/* If this integer fits in one word, return a CONST_INT. */
|
| 556 |
|
|
if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
|
| 557 |
|
|
return GEN_INT (i0);
|
| 558 |
|
|
|
| 559 |
|
|
/* We use VOIDmode for integers. */
|
| 560 |
|
|
value = rtx_alloc (CONST_DOUBLE);
|
| 561 |
|
|
PUT_MODE (value, VOIDmode);
|
| 562 |
|
|
|
| 563 |
|
|
CONST_DOUBLE_LOW (value) = i0;
|
| 564 |
|
|
CONST_DOUBLE_HIGH (value) = i1;
|
| 565 |
|
|
|
| 566 |
|
|
for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
|
| 567 |
|
|
XWINT (value, i) = 0;
|
| 568 |
|
|
|
| 569 |
|
|
return lookup_const_double (value);
|
| 570 |
|
|
}
|
| 571 |
|
|
|
| 572 |
|
|
rtx
|
| 573 |
|
|
gen_rtx_REG (enum machine_mode mode, unsigned int regno)
|
| 574 |
|
|
{
|
| 575 |
|
|
/* In case the MD file explicitly references the frame pointer, have
|
| 576 |
|
|
all such references point to the same frame pointer. This is
|
| 577 |
|
|
used during frame pointer elimination to distinguish the explicit
|
| 578 |
|
|
references to these registers from pseudos that happened to be
|
| 579 |
|
|
assigned to them.
|
| 580 |
|
|
|
| 581 |
|
|
If we have eliminated the frame pointer or arg pointer, we will
|
| 582 |
|
|
be using it as a normal register, for example as a spill
|
| 583 |
|
|
register. In such cases, we might be accessing it in a mode that
|
| 584 |
|
|
is not Pmode and therefore cannot use the pre-allocated rtx.
|
| 585 |
|
|
|
| 586 |
|
|
Also don't do this when we are making new REGs in reload, since
|
| 587 |
|
|
we don't want to get confused with the real pointers. */
|
| 588 |
|
|
|
| 589 |
|
|
if (mode == Pmode && !reload_in_progress)
|
| 590 |
|
|
{
|
| 591 |
|
|
if (regno == FRAME_POINTER_REGNUM
|
| 592 |
|
|
&& (!reload_completed || frame_pointer_needed))
|
| 593 |
|
|
return frame_pointer_rtx;
|
| 594 |
|
|
#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
|
| 595 |
|
|
if (regno == HARD_FRAME_POINTER_REGNUM
|
| 596 |
|
|
&& (!reload_completed || frame_pointer_needed))
|
| 597 |
|
|
return hard_frame_pointer_rtx;
|
| 598 |
|
|
#endif
|
| 599 |
|
|
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
|
| 600 |
|
|
if (regno == ARG_POINTER_REGNUM)
|
| 601 |
|
|
return arg_pointer_rtx;
|
| 602 |
|
|
#endif
|
| 603 |
|
|
#ifdef RETURN_ADDRESS_POINTER_REGNUM
|
| 604 |
|
|
if (regno == RETURN_ADDRESS_POINTER_REGNUM)
|
| 605 |
|
|
return return_address_pointer_rtx;
|
| 606 |
|
|
#endif
|
| 607 |
|
|
if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
|
| 608 |
|
|
&& fixed_regs[PIC_OFFSET_TABLE_REGNUM])
|
| 609 |
|
|
return pic_offset_table_rtx;
|
| 610 |
|
|
if (regno == STACK_POINTER_REGNUM)
|
| 611 |
|
|
return stack_pointer_rtx;
|
| 612 |
|
|
}
|
| 613 |
|
|
|
| 614 |
|
|
#if 0
|
| 615 |
|
|
/* If the per-function register table has been set up, try to re-use
|
| 616 |
|
|
an existing entry in that table to avoid useless generation of RTL.
|
| 617 |
|
|
|
| 618 |
|
|
This code is disabled for now until we can fix the various backends
|
| 619 |
|
|
which depend on having non-shared hard registers in some cases. Long
|
| 620 |
|
|
term we want to re-enable this code as it can significantly cut down
|
| 621 |
|
|
on the amount of useless RTL that gets generated.
|
| 622 |
|
|
|
| 623 |
|
|
We'll also need to fix some code that runs after reload that wants to
|
| 624 |
|
|
set ORIGINAL_REGNO. */
|
| 625 |
|
|
|
| 626 |
|
|
if (cfun
|
| 627 |
|
|
&& cfun->emit
|
| 628 |
|
|
&& regno_reg_rtx
|
| 629 |
|
|
&& regno < FIRST_PSEUDO_REGISTER
|
| 630 |
|
|
&& reg_raw_mode[regno] == mode)
|
| 631 |
|
|
return regno_reg_rtx[regno];
|
| 632 |
|
|
#endif
|
| 633 |
|
|
|
| 634 |
|
|
return gen_raw_REG (mode, regno);
|
| 635 |
|
|
}
|
| 636 |
|
|
|
| 637 |
|
|
rtx
|
| 638 |
|
|
gen_rtx_MEM (enum machine_mode mode, rtx addr)
|
| 639 |
|
|
{
|
| 640 |
|
|
rtx rt = gen_rtx_raw_MEM (mode, addr);
|
| 641 |
|
|
|
| 642 |
|
|
/* This field is not cleared by the mere allocation of the rtx, so
|
| 643 |
|
|
we clear it here. */
|
| 644 |
|
|
MEM_ATTRS (rt) = 0;
|
| 645 |
|
|
|
| 646 |
|
|
return rt;
|
| 647 |
|
|
}
|
| 648 |
|
|
|
| 649 |
|
|
/* Generate a memory referring to non-trapping constant memory. */
|
| 650 |
|
|
|
| 651 |
|
|
rtx
|
| 652 |
|
|
gen_const_mem (enum machine_mode mode, rtx addr)
|
| 653 |
|
|
{
|
| 654 |
|
|
rtx mem = gen_rtx_MEM (mode, addr);
|
| 655 |
|
|
MEM_READONLY_P (mem) = 1;
|
| 656 |
|
|
MEM_NOTRAP_P (mem) = 1;
|
| 657 |
|
|
return mem;
|
| 658 |
|
|
}
|
| 659 |
|
|
|
| 660 |
|
|
/* Generate a MEM referring to fixed portions of the frame, e.g., register
|
| 661 |
|
|
save areas. */
|
| 662 |
|
|
|
| 663 |
|
|
rtx
|
| 664 |
|
|
gen_frame_mem (enum machine_mode mode, rtx addr)
|
| 665 |
|
|
{
|
| 666 |
|
|
rtx mem = gen_rtx_MEM (mode, addr);
|
| 667 |
|
|
MEM_NOTRAP_P (mem) = 1;
|
| 668 |
|
|
set_mem_alias_set (mem, get_frame_alias_set ());
|
| 669 |
|
|
return mem;
|
| 670 |
|
|
}
|
| 671 |
|
|
|
| 672 |
|
|
/* Generate a MEM referring to a temporary use of the stack, not part
|
| 673 |
|
|
of the fixed stack frame. For example, something which is pushed
|
| 674 |
|
|
by a target splitter. */
|
| 675 |
|
|
rtx
|
| 676 |
|
|
gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
|
| 677 |
|
|
{
|
| 678 |
|
|
rtx mem = gen_rtx_MEM (mode, addr);
|
| 679 |
|
|
MEM_NOTRAP_P (mem) = 1;
|
| 680 |
|
|
if (!cfun->calls_alloca)
|
| 681 |
|
|
set_mem_alias_set (mem, get_frame_alias_set ());
|
| 682 |
|
|
return mem;
|
| 683 |
|
|
}
|
| 684 |
|
|
|
| 685 |
|
|
/* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
|
| 686 |
|
|
this construct would be valid, and false otherwise. */
|
| 687 |
|
|
|
| 688 |
|
|
bool
|
| 689 |
|
|
validate_subreg (enum machine_mode omode, enum machine_mode imode,
|
| 690 |
|
|
const_rtx reg, unsigned int offset)
|
| 691 |
|
|
{
|
| 692 |
|
|
unsigned int isize = GET_MODE_SIZE (imode);
|
| 693 |
|
|
unsigned int osize = GET_MODE_SIZE (omode);
|
| 694 |
|
|
|
| 695 |
|
|
/* All subregs must be aligned. */
|
| 696 |
|
|
if (offset % osize != 0)
|
| 697 |
|
|
return false;
|
| 698 |
|
|
|
| 699 |
|
|
/* The subreg offset cannot be outside the inner object. */
|
| 700 |
|
|
if (offset >= isize)
|
| 701 |
|
|
return false;
|
| 702 |
|
|
|
| 703 |
|
|
/* ??? This should not be here. Temporarily continue to allow word_mode
|
| 704 |
|
|
subregs of anything. The most common offender is (subreg:SI (reg:DF)).
|
| 705 |
|
|
Generally, backends are doing something sketchy but it'll take time to
|
| 706 |
|
|
fix them all. */
|
| 707 |
|
|
if (omode == word_mode)
|
| 708 |
|
|
;
|
| 709 |
|
|
/* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
|
| 710 |
|
|
is the culprit here, and not the backends. */
|
| 711 |
|
|
else if (osize >= UNITS_PER_WORD && isize >= osize)
|
| 712 |
|
|
;
|
| 713 |
|
|
/* Allow component subregs of complex and vector. Though given the below
|
| 714 |
|
|
extraction rules, it's not always clear what that means. */
|
| 715 |
|
|
else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
|
| 716 |
|
|
&& GET_MODE_INNER (imode) == omode)
|
| 717 |
|
|
;
|
| 718 |
|
|
/* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
|
| 719 |
|
|
i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
|
| 720 |
|
|
represent this. It's questionable if this ought to be represented at
|
| 721 |
|
|
all -- why can't this all be hidden in post-reload splitters that make
|
| 722 |
|
|
arbitrarily mode changes to the registers themselves. */
|
| 723 |
|
|
else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
|
| 724 |
|
|
;
|
| 725 |
|
|
/* Subregs involving floating point modes are not allowed to
|
| 726 |
|
|
change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
|
| 727 |
|
|
(subreg:SI (reg:DF) 0) isn't. */
|
| 728 |
|
|
else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
|
| 729 |
|
|
{
|
| 730 |
|
|
if (isize != osize)
|
| 731 |
|
|
return false;
|
| 732 |
|
|
}
|
| 733 |
|
|
|
| 734 |
|
|
/* Paradoxical subregs must have offset zero. */
|
| 735 |
|
|
if (osize > isize)
|
| 736 |
|
|
return offset == 0;
|
| 737 |
|
|
|
| 738 |
|
|
/* This is a normal subreg. Verify that the offset is representable. */
|
| 739 |
|
|
|
| 740 |
|
|
/* For hard registers, we already have most of these rules collected in
|
| 741 |
|
|
subreg_offset_representable_p. */
|
| 742 |
|
|
if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
|
| 743 |
|
|
{
|
| 744 |
|
|
unsigned int regno = REGNO (reg);
|
| 745 |
|
|
|
| 746 |
|
|
#ifdef CANNOT_CHANGE_MODE_CLASS
|
| 747 |
|
|
if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
|
| 748 |
|
|
&& GET_MODE_INNER (imode) == omode)
|
| 749 |
|
|
;
|
| 750 |
|
|
else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
|
| 751 |
|
|
return false;
|
| 752 |
|
|
#endif
|
| 753 |
|
|
|
| 754 |
|
|
return subreg_offset_representable_p (regno, imode, offset, omode);
|
| 755 |
|
|
}
|
| 756 |
|
|
|
| 757 |
|
|
/* For pseudo registers, we want most of the same checks. Namely:
|
| 758 |
|
|
If the register no larger than a word, the subreg must be lowpart.
|
| 759 |
|
|
If the register is larger than a word, the subreg must be the lowpart
|
| 760 |
|
|
of a subword. A subreg does *not* perform arbitrary bit extraction.
|
| 761 |
|
|
Given that we've already checked mode/offset alignment, we only have
|
| 762 |
|
|
to check subword subregs here. */
|
| 763 |
|
|
if (osize < UNITS_PER_WORD)
|
| 764 |
|
|
{
|
| 765 |
|
|
enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
|
| 766 |
|
|
unsigned int low_off = subreg_lowpart_offset (omode, wmode);
|
| 767 |
|
|
if (offset % UNITS_PER_WORD != low_off)
|
| 768 |
|
|
return false;
|
| 769 |
|
|
}
|
| 770 |
|
|
return true;
|
| 771 |
|
|
}
|
| 772 |
|
|
|
| 773 |
|
|
rtx
|
| 774 |
|
|
gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
|
| 775 |
|
|
{
|
| 776 |
|
|
gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
|
| 777 |
|
|
return gen_rtx_raw_SUBREG (mode, reg, offset);
|
| 778 |
|
|
}
|
| 779 |
|
|
|
| 780 |
|
|
/* Generate a SUBREG representing the least-significant part of REG if MODE
|
| 781 |
|
|
is smaller than mode of REG, otherwise paradoxical SUBREG. */
|
| 782 |
|
|
|
| 783 |
|
|
rtx
|
| 784 |
|
|
gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
|
| 785 |
|
|
{
|
| 786 |
|
|
enum machine_mode inmode;
|
| 787 |
|
|
|
| 788 |
|
|
inmode = GET_MODE (reg);
|
| 789 |
|
|
if (inmode == VOIDmode)
|
| 790 |
|
|
inmode = mode;
|
| 791 |
|
|
return gen_rtx_SUBREG (mode, reg,
|
| 792 |
|
|
subreg_lowpart_offset (mode, inmode));
|
| 793 |
|
|
}
|
| 794 |
|
|
|
| 795 |
|
|
|
| 796 |
|
|
/* Create an rtvec and stores within it the RTXen passed in the arguments. */
|
| 797 |
|
|
|
| 798 |
|
|
rtvec
|
| 799 |
|
|
gen_rtvec (int n, ...)
|
| 800 |
|
|
{
|
| 801 |
|
|
int i;
|
| 802 |
|
|
rtvec rt_val;
|
| 803 |
|
|
va_list p;
|
| 804 |
|
|
|
| 805 |
|
|
va_start (p, n);
|
| 806 |
|
|
|
| 807 |
|
|
/* Don't allocate an empty rtvec... */
|
| 808 |
|
|
if (n == 0)
|
| 809 |
|
|
return NULL_RTVEC;
|
| 810 |
|
|
|
| 811 |
|
|
rt_val = rtvec_alloc (n);
|
| 812 |
|
|
|
| 813 |
|
|
for (i = 0; i < n; i++)
|
| 814 |
|
|
rt_val->elem[i] = va_arg (p, rtx);
|
| 815 |
|
|
|
| 816 |
|
|
va_end (p);
|
| 817 |
|
|
return rt_val;
|
| 818 |
|
|
}
|
| 819 |
|
|
|
| 820 |
|
|
rtvec
|
| 821 |
|
|
gen_rtvec_v (int n, rtx *argp)
|
| 822 |
|
|
{
|
| 823 |
|
|
int i;
|
| 824 |
|
|
rtvec rt_val;
|
| 825 |
|
|
|
| 826 |
|
|
/* Don't allocate an empty rtvec... */
|
| 827 |
|
|
if (n == 0)
|
| 828 |
|
|
return NULL_RTVEC;
|
| 829 |
|
|
|
| 830 |
|
|
rt_val = rtvec_alloc (n);
|
| 831 |
|
|
|
| 832 |
|
|
for (i = 0; i < n; i++)
|
| 833 |
|
|
rt_val->elem[i] = *argp++;
|
| 834 |
|
|
|
| 835 |
|
|
return rt_val;
|
| 836 |
|
|
}
|
| 837 |
|
|
|
| 838 |
|
|
/* Return the number of bytes between the start of an OUTER_MODE
|
| 839 |
|
|
in-memory value and the start of an INNER_MODE in-memory value,
|
| 840 |
|
|
given that the former is a lowpart of the latter. It may be a
|
| 841 |
|
|
paradoxical lowpart, in which case the offset will be negative
|
| 842 |
|
|
on big-endian targets. */
|
| 843 |
|
|
|
| 844 |
|
|
int
|
| 845 |
|
|
byte_lowpart_offset (enum machine_mode outer_mode,
|
| 846 |
|
|
enum machine_mode inner_mode)
|
| 847 |
|
|
{
|
| 848 |
|
|
if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
|
| 849 |
|
|
return subreg_lowpart_offset (outer_mode, inner_mode);
|
| 850 |
|
|
else
|
| 851 |
|
|
return -subreg_lowpart_offset (inner_mode, outer_mode);
|
| 852 |
|
|
}
|
| 853 |
|
|
|
| 854 |
|
|
/* Generate a REG rtx for a new pseudo register of mode MODE.
|
| 855 |
|
|
This pseudo is assigned the next sequential register number. */
|
| 856 |
|
|
|
| 857 |
|
|
rtx
|
| 858 |
|
|
gen_reg_rtx (enum machine_mode mode)
|
| 859 |
|
|
{
|
| 860 |
|
|
rtx val;
|
| 861 |
|
|
unsigned int align = GET_MODE_ALIGNMENT (mode);
|
| 862 |
|
|
|
| 863 |
|
|
gcc_assert (can_create_pseudo_p ());
|
| 864 |
|
|
|
| 865 |
|
|
/* If a virtual register with bigger mode alignment is generated,
|
| 866 |
|
|
increase stack alignment estimation because it might be spilled
|
| 867 |
|
|
to stack later. */
|
| 868 |
|
|
if (SUPPORTS_STACK_ALIGNMENT
|
| 869 |
|
|
&& crtl->stack_alignment_estimated < align
|
| 870 |
|
|
&& !crtl->stack_realign_processed)
|
| 871 |
|
|
{
|
| 872 |
|
|
unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
|
| 873 |
|
|
if (crtl->stack_alignment_estimated < min_align)
|
| 874 |
|
|
crtl->stack_alignment_estimated = min_align;
|
| 875 |
|
|
}
|
| 876 |
|
|
|
| 877 |
|
|
if (generating_concat_p
|
| 878 |
|
|
&& (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
|
| 879 |
|
|
|| GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
|
| 880 |
|
|
{
|
| 881 |
|
|
/* For complex modes, don't make a single pseudo.
|
| 882 |
|
|
Instead, make a CONCAT of two pseudos.
|
| 883 |
|
|
This allows noncontiguous allocation of the real and imaginary parts,
|
| 884 |
|
|
which makes much better code. Besides, allocating DCmode
|
| 885 |
|
|
pseudos overstrains reload on some machines like the 386. */
|
| 886 |
|
|
rtx realpart, imagpart;
|
| 887 |
|
|
enum machine_mode partmode = GET_MODE_INNER (mode);
|
| 888 |
|
|
|
| 889 |
|
|
realpart = gen_reg_rtx (partmode);
|
| 890 |
|
|
imagpart = gen_reg_rtx (partmode);
|
| 891 |
|
|
return gen_rtx_CONCAT (mode, realpart, imagpart);
|
| 892 |
|
|
}
|
| 893 |
|
|
|
| 894 |
|
|
/* Make sure regno_pointer_align, and regno_reg_rtx are large
|
| 895 |
|
|
enough to have an element for this pseudo reg number. */
|
| 896 |
|
|
|
| 897 |
|
|
if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
|
| 898 |
|
|
{
|
| 899 |
|
|
int old_size = crtl->emit.regno_pointer_align_length;
|
| 900 |
|
|
char *tmp;
|
| 901 |
|
|
rtx *new1;
|
| 902 |
|
|
|
| 903 |
|
|
tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
|
| 904 |
|
|
memset (tmp + old_size, 0, old_size);
|
| 905 |
|
|
crtl->emit.regno_pointer_align = (unsigned char *) tmp;
|
| 906 |
|
|
|
| 907 |
|
|
new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
|
| 908 |
|
|
memset (new1 + old_size, 0, old_size * sizeof (rtx));
|
| 909 |
|
|
regno_reg_rtx = new1;
|
| 910 |
|
|
|
| 911 |
|
|
crtl->emit.regno_pointer_align_length = old_size * 2;
|
| 912 |
|
|
}
|
| 913 |
|
|
|
| 914 |
|
|
val = gen_raw_REG (mode, reg_rtx_no);
|
| 915 |
|
|
regno_reg_rtx[reg_rtx_no++] = val;
|
| 916 |
|
|
return val;
|
| 917 |
|
|
}
|
| 918 |
|
|
|
| 919 |
|
|
/* Update NEW with the same attributes as REG, but with OFFSET added
|
| 920 |
|
|
to the REG_OFFSET. */
|
| 921 |
|
|
|
| 922 |
|
|
static void
|
| 923 |
|
|
update_reg_offset (rtx new_rtx, rtx reg, int offset)
|
| 924 |
|
|
{
|
| 925 |
|
|
REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
|
| 926 |
|
|
REG_OFFSET (reg) + offset);
|
| 927 |
|
|
}
|
| 928 |
|
|
|
| 929 |
|
|
/* Generate a register with same attributes as REG, but with OFFSET
|
| 930 |
|
|
added to the REG_OFFSET. */
|
| 931 |
|
|
|
| 932 |
|
|
rtx
|
| 933 |
|
|
gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
|
| 934 |
|
|
int offset)
|
| 935 |
|
|
{
|
| 936 |
|
|
rtx new_rtx = gen_rtx_REG (mode, regno);
|
| 937 |
|
|
|
| 938 |
|
|
update_reg_offset (new_rtx, reg, offset);
|
| 939 |
|
|
return new_rtx;
|
| 940 |
|
|
}
|
| 941 |
|
|
|
| 942 |
|
|
/* Generate a new pseudo-register with the same attributes as REG, but
|
| 943 |
|
|
with OFFSET added to the REG_OFFSET. */
|
| 944 |
|
|
|
| 945 |
|
|
rtx
|
| 946 |
|
|
gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
|
| 947 |
|
|
{
|
| 948 |
|
|
rtx new_rtx = gen_reg_rtx (mode);
|
| 949 |
|
|
|
| 950 |
|
|
update_reg_offset (new_rtx, reg, offset);
|
| 951 |
|
|
return new_rtx;
|
| 952 |
|
|
}
|
| 953 |
|
|
|
| 954 |
|
|
/* Adjust REG in-place so that it has mode MODE. It is assumed that the
|
| 955 |
|
|
new register is a (possibly paradoxical) lowpart of the old one. */
|
| 956 |
|
|
|
| 957 |
|
|
void
|
| 958 |
|
|
adjust_reg_mode (rtx reg, enum machine_mode mode)
|
| 959 |
|
|
{
|
| 960 |
|
|
update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
|
| 961 |
|
|
PUT_MODE (reg, mode);
|
| 962 |
|
|
}
|
| 963 |
|
|
|
| 964 |
|
|
/* Copy REG's attributes from X, if X has any attributes. If REG and X
|
| 965 |
|
|
have different modes, REG is a (possibly paradoxical) lowpart of X. */
|
| 966 |
|
|
|
| 967 |
|
|
void
|
| 968 |
|
|
set_reg_attrs_from_value (rtx reg, rtx x)
|
| 969 |
|
|
{
|
| 970 |
|
|
int offset;
|
| 971 |
|
|
|
| 972 |
|
|
/* Hard registers can be reused for multiple purposes within the same
|
| 973 |
|
|
function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
|
| 974 |
|
|
on them is wrong. */
|
| 975 |
|
|
if (HARD_REGISTER_P (reg))
|
| 976 |
|
|
return;
|
| 977 |
|
|
|
| 978 |
|
|
offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
|
| 979 |
|
|
if (MEM_P (x))
|
| 980 |
|
|
{
|
| 981 |
|
|
if (MEM_OFFSET (x) && CONST_INT_P (MEM_OFFSET (x)))
|
| 982 |
|
|
REG_ATTRS (reg)
|
| 983 |
|
|
= get_reg_attrs (MEM_EXPR (x), INTVAL (MEM_OFFSET (x)) + offset);
|
| 984 |
|
|
if (MEM_POINTER (x))
|
| 985 |
|
|
mark_reg_pointer (reg, 0);
|
| 986 |
|
|
}
|
| 987 |
|
|
else if (REG_P (x))
|
| 988 |
|
|
{
|
| 989 |
|
|
if (REG_ATTRS (x))
|
| 990 |
|
|
update_reg_offset (reg, x, offset);
|
| 991 |
|
|
if (REG_POINTER (x))
|
| 992 |
|
|
mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
|
| 993 |
|
|
}
|
| 994 |
|
|
}
|
| 995 |
|
|
|
| 996 |
|
|
/* Generate a REG rtx for a new pseudo register, copying the mode
|
| 997 |
|
|
and attributes from X. */
|
| 998 |
|
|
|
| 999 |
|
|
rtx
|
| 1000 |
|
|
gen_reg_rtx_and_attrs (rtx x)
|
| 1001 |
|
|
{
|
| 1002 |
|
|
rtx reg = gen_reg_rtx (GET_MODE (x));
|
| 1003 |
|
|
set_reg_attrs_from_value (reg, x);
|
| 1004 |
|
|
return reg;
|
| 1005 |
|
|
}
|
| 1006 |
|
|
|
| 1007 |
|
|
/* Set the register attributes for registers contained in PARM_RTX.
|
| 1008 |
|
|
Use needed values from memory attributes of MEM. */
|
| 1009 |
|
|
|
| 1010 |
|
|
void
|
| 1011 |
|
|
set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
|
| 1012 |
|
|
{
|
| 1013 |
|
|
if (REG_P (parm_rtx))
|
| 1014 |
|
|
set_reg_attrs_from_value (parm_rtx, mem);
|
| 1015 |
|
|
else if (GET_CODE (parm_rtx) == PARALLEL)
|
| 1016 |
|
|
{
|
| 1017 |
|
|
/* Check for a NULL entry in the first slot, used to indicate that the
|
| 1018 |
|
|
parameter goes both on the stack and in registers. */
|
| 1019 |
|
|
int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
|
| 1020 |
|
|
for (; i < XVECLEN (parm_rtx, 0); i++)
|
| 1021 |
|
|
{
|
| 1022 |
|
|
rtx x = XVECEXP (parm_rtx, 0, i);
|
| 1023 |
|
|
if (REG_P (XEXP (x, 0)))
|
| 1024 |
|
|
REG_ATTRS (XEXP (x, 0))
|
| 1025 |
|
|
= get_reg_attrs (MEM_EXPR (mem),
|
| 1026 |
|
|
INTVAL (XEXP (x, 1)));
|
| 1027 |
|
|
}
|
| 1028 |
|
|
}
|
| 1029 |
|
|
}
|
| 1030 |
|
|
|
| 1031 |
|
|
/* Set the REG_ATTRS for registers in value X, given that X represents
|
| 1032 |
|
|
decl T. */
|
| 1033 |
|
|
|
| 1034 |
|
|
void
|
| 1035 |
|
|
set_reg_attrs_for_decl_rtl (tree t, rtx x)
|
| 1036 |
|
|
{
|
| 1037 |
|
|
if (GET_CODE (x) == SUBREG)
|
| 1038 |
|
|
{
|
| 1039 |
|
|
gcc_assert (subreg_lowpart_p (x));
|
| 1040 |
|
|
x = SUBREG_REG (x);
|
| 1041 |
|
|
}
|
| 1042 |
|
|
if (REG_P (x))
|
| 1043 |
|
|
REG_ATTRS (x)
|
| 1044 |
|
|
= get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
|
| 1045 |
|
|
DECL_MODE (t)));
|
| 1046 |
|
|
if (GET_CODE (x) == CONCAT)
|
| 1047 |
|
|
{
|
| 1048 |
|
|
if (REG_P (XEXP (x, 0)))
|
| 1049 |
|
|
REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
|
| 1050 |
|
|
if (REG_P (XEXP (x, 1)))
|
| 1051 |
|
|
REG_ATTRS (XEXP (x, 1))
|
| 1052 |
|
|
= get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
|
| 1053 |
|
|
}
|
| 1054 |
|
|
if (GET_CODE (x) == PARALLEL)
|
| 1055 |
|
|
{
|
| 1056 |
|
|
int i, start;
|
| 1057 |
|
|
|
| 1058 |
|
|
/* Check for a NULL entry, used to indicate that the parameter goes
|
| 1059 |
|
|
both on the stack and in registers. */
|
| 1060 |
|
|
if (XEXP (XVECEXP (x, 0, 0), 0))
|
| 1061 |
|
|
start = 0;
|
| 1062 |
|
|
else
|
| 1063 |
|
|
start = 1;
|
| 1064 |
|
|
|
| 1065 |
|
|
for (i = start; i < XVECLEN (x, 0); i++)
|
| 1066 |
|
|
{
|
| 1067 |
|
|
rtx y = XVECEXP (x, 0, i);
|
| 1068 |
|
|
if (REG_P (XEXP (y, 0)))
|
| 1069 |
|
|
REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
|
| 1070 |
|
|
}
|
| 1071 |
|
|
}
|
| 1072 |
|
|
}
|
| 1073 |
|
|
|
| 1074 |
|
|
/* Assign the RTX X to declaration T. */
|
| 1075 |
|
|
|
| 1076 |
|
|
void
|
| 1077 |
|
|
set_decl_rtl (tree t, rtx x)
|
| 1078 |
|
|
{
|
| 1079 |
|
|
DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
|
| 1080 |
|
|
if (x)
|
| 1081 |
|
|
set_reg_attrs_for_decl_rtl (t, x);
|
| 1082 |
|
|
}
|
| 1083 |
|
|
|
| 1084 |
|
|
/* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
|
| 1085 |
|
|
if the ABI requires the parameter to be passed by reference. */
|
| 1086 |
|
|
|
| 1087 |
|
|
void
|
| 1088 |
|
|
set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
|
| 1089 |
|
|
{
|
| 1090 |
|
|
DECL_INCOMING_RTL (t) = x;
|
| 1091 |
|
|
if (x && !by_reference_p)
|
| 1092 |
|
|
set_reg_attrs_for_decl_rtl (t, x);
|
| 1093 |
|
|
}
|
| 1094 |
|
|
|
| 1095 |
|
|
/* Identify REG (which may be a CONCAT) as a user register. */
|
| 1096 |
|
|
|
| 1097 |
|
|
void
|
| 1098 |
|
|
mark_user_reg (rtx reg)
|
| 1099 |
|
|
{
|
| 1100 |
|
|
if (GET_CODE (reg) == CONCAT)
|
| 1101 |
|
|
{
|
| 1102 |
|
|
REG_USERVAR_P (XEXP (reg, 0)) = 1;
|
| 1103 |
|
|
REG_USERVAR_P (XEXP (reg, 1)) = 1;
|
| 1104 |
|
|
}
|
| 1105 |
|
|
else
|
| 1106 |
|
|
{
|
| 1107 |
|
|
gcc_assert (REG_P (reg));
|
| 1108 |
|
|
REG_USERVAR_P (reg) = 1;
|
| 1109 |
|
|
}
|
| 1110 |
|
|
}
|
| 1111 |
|
|
|
| 1112 |
|
|
/* Identify REG as a probable pointer register and show its alignment
|
| 1113 |
|
|
as ALIGN, if nonzero. */
|
| 1114 |
|
|
|
| 1115 |
|
|
void
|
| 1116 |
|
|
mark_reg_pointer (rtx reg, int align)
|
| 1117 |
|
|
{
|
| 1118 |
|
|
if (! REG_POINTER (reg))
|
| 1119 |
|
|
{
|
| 1120 |
|
|
REG_POINTER (reg) = 1;
|
| 1121 |
|
|
|
| 1122 |
|
|
if (align)
|
| 1123 |
|
|
REGNO_POINTER_ALIGN (REGNO (reg)) = align;
|
| 1124 |
|
|
}
|
| 1125 |
|
|
else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
|
| 1126 |
|
|
/* We can no-longer be sure just how aligned this pointer is. */
|
| 1127 |
|
|
REGNO_POINTER_ALIGN (REGNO (reg)) = align;
|
| 1128 |
|
|
}
|
| 1129 |
|
|
|
| 1130 |
|
|
/* Return 1 plus largest pseudo reg number used in the current function. */
|
| 1131 |
|
|
|
| 1132 |
|
|
int
|
| 1133 |
|
|
max_reg_num (void)
|
| 1134 |
|
|
{
|
| 1135 |
|
|
return reg_rtx_no;
|
| 1136 |
|
|
}
|
| 1137 |
|
|
|
| 1138 |
|
|
/* Return 1 + the largest label number used so far in the current function. */
|
| 1139 |
|
|
|
| 1140 |
|
|
int
|
| 1141 |
|
|
max_label_num (void)
|
| 1142 |
|
|
{
|
| 1143 |
|
|
return label_num;
|
| 1144 |
|
|
}
|
| 1145 |
|
|
|
| 1146 |
|
|
/* Return first label number used in this function (if any were used). */
|
| 1147 |
|
|
|
| 1148 |
|
|
int
|
| 1149 |
|
|
get_first_label_num (void)
|
| 1150 |
|
|
{
|
| 1151 |
|
|
return first_label_num;
|
| 1152 |
|
|
}
|
| 1153 |
|
|
|
| 1154 |
|
|
/* If the rtx for label was created during the expansion of a nested
|
| 1155 |
|
|
function, then first_label_num won't include this label number.
|
| 1156 |
|
|
Fix this now so that array indices work later. */
|
| 1157 |
|
|
|
| 1158 |
|
|
void
|
| 1159 |
|
|
maybe_set_first_label_num (rtx x)
|
| 1160 |
|
|
{
|
| 1161 |
|
|
if (CODE_LABEL_NUMBER (x) < first_label_num)
|
| 1162 |
|
|
first_label_num = CODE_LABEL_NUMBER (x);
|
| 1163 |
|
|
}
|
| 1164 |
|
|
|
| 1165 |
|
|
/* Return a value representing some low-order bits of X, where the number
|
| 1166 |
|
|
of low-order bits is given by MODE. Note that no conversion is done
|
| 1167 |
|
|
between floating-point and fixed-point values, rather, the bit
|
| 1168 |
|
|
representation is returned.
|
| 1169 |
|
|
|
| 1170 |
|
|
This function handles the cases in common between gen_lowpart, below,
|
| 1171 |
|
|
and two variants in cse.c and combine.c. These are the cases that can
|
| 1172 |
|
|
be safely handled at all points in the compilation.
|
| 1173 |
|
|
|
| 1174 |
|
|
If this is not a case we can handle, return 0. */
|
| 1175 |
|
|
|
| 1176 |
|
|
rtx
|
| 1177 |
|
|
gen_lowpart_common (enum machine_mode mode, rtx x)
|
| 1178 |
|
|
{
|
| 1179 |
|
|
int msize = GET_MODE_SIZE (mode);
|
| 1180 |
|
|
int xsize;
|
| 1181 |
|
|
int offset = 0;
|
| 1182 |
|
|
enum machine_mode innermode;
|
| 1183 |
|
|
|
| 1184 |
|
|
/* Unfortunately, this routine doesn't take a parameter for the mode of X,
|
| 1185 |
|
|
so we have to make one up. Yuk. */
|
| 1186 |
|
|
innermode = GET_MODE (x);
|
| 1187 |
|
|
if (CONST_INT_P (x)
|
| 1188 |
|
|
&& msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
|
| 1189 |
|
|
innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
|
| 1190 |
|
|
else if (innermode == VOIDmode)
|
| 1191 |
|
|
innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
|
| 1192 |
|
|
|
| 1193 |
|
|
xsize = GET_MODE_SIZE (innermode);
|
| 1194 |
|
|
|
| 1195 |
|
|
gcc_assert (innermode != VOIDmode && innermode != BLKmode);
|
| 1196 |
|
|
|
| 1197 |
|
|
if (innermode == mode)
|
| 1198 |
|
|
return x;
|
| 1199 |
|
|
|
| 1200 |
|
|
/* MODE must occupy no more words than the mode of X. */
|
| 1201 |
|
|
if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
|
| 1202 |
|
|
> ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
|
| 1203 |
|
|
return 0;
|
| 1204 |
|
|
|
| 1205 |
|
|
/* Don't allow generating paradoxical FLOAT_MODE subregs. */
|
| 1206 |
|
|
if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
|
| 1207 |
|
|
return 0;
|
| 1208 |
|
|
|
| 1209 |
|
|
offset = subreg_lowpart_offset (mode, innermode);
|
| 1210 |
|
|
|
| 1211 |
|
|
if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
|
| 1212 |
|
|
&& (GET_MODE_CLASS (mode) == MODE_INT
|
| 1213 |
|
|
|| GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
|
| 1214 |
|
|
{
|
| 1215 |
|
|
/* If we are getting the low-order part of something that has been
|
| 1216 |
|
|
sign- or zero-extended, we can either just use the object being
|
| 1217 |
|
|
extended or make a narrower extension. If we want an even smaller
|
| 1218 |
|
|
piece than the size of the object being extended, call ourselves
|
| 1219 |
|
|
recursively.
|
| 1220 |
|
|
|
| 1221 |
|
|
This case is used mostly by combine and cse. */
|
| 1222 |
|
|
|
| 1223 |
|
|
if (GET_MODE (XEXP (x, 0)) == mode)
|
| 1224 |
|
|
return XEXP (x, 0);
|
| 1225 |
|
|
else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
|
| 1226 |
|
|
return gen_lowpart_common (mode, XEXP (x, 0));
|
| 1227 |
|
|
else if (msize < xsize)
|
| 1228 |
|
|
return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
|
| 1229 |
|
|
}
|
| 1230 |
|
|
else if (GET_CODE (x) == SUBREG || REG_P (x)
|
| 1231 |
|
|
|| GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
|
| 1232 |
|
|
|| GET_CODE (x) == CONST_DOUBLE || CONST_INT_P (x))
|
| 1233 |
|
|
return simplify_gen_subreg (mode, x, innermode, offset);
|
| 1234 |
|
|
|
| 1235 |
|
|
/* Otherwise, we can't do this. */
|
| 1236 |
|
|
return 0;
|
| 1237 |
|
|
}
|
| 1238 |
|
|
|
| 1239 |
|
|
rtx
|
| 1240 |
|
|
gen_highpart (enum machine_mode mode, rtx x)
|
| 1241 |
|
|
{
|
| 1242 |
|
|
unsigned int msize = GET_MODE_SIZE (mode);
|
| 1243 |
|
|
rtx result;
|
| 1244 |
|
|
|
| 1245 |
|
|
/* This case loses if X is a subreg. To catch bugs early,
|
| 1246 |
|
|
complain if an invalid MODE is used even in other cases. */
|
| 1247 |
|
|
gcc_assert (msize <= UNITS_PER_WORD
|
| 1248 |
|
|
|| msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
|
| 1249 |
|
|
|
| 1250 |
|
|
result = simplify_gen_subreg (mode, x, GET_MODE (x),
|
| 1251 |
|
|
subreg_highpart_offset (mode, GET_MODE (x)));
|
| 1252 |
|
|
gcc_assert (result);
|
| 1253 |
|
|
|
| 1254 |
|
|
/* simplify_gen_subreg is not guaranteed to return a valid operand for
|
| 1255 |
|
|
the target if we have a MEM. gen_highpart must return a valid operand,
|
| 1256 |
|
|
emitting code if necessary to do so. */
|
| 1257 |
|
|
if (MEM_P (result))
|
| 1258 |
|
|
{
|
| 1259 |
|
|
result = validize_mem (result);
|
| 1260 |
|
|
gcc_assert (result);
|
| 1261 |
|
|
}
|
| 1262 |
|
|
|
| 1263 |
|
|
return result;
|
| 1264 |
|
|
}
|
| 1265 |
|
|
|
| 1266 |
|
|
/* Like gen_highpart, but accept mode of EXP operand in case EXP can
|
| 1267 |
|
|
be VOIDmode constant. */
|
| 1268 |
|
|
rtx
|
| 1269 |
|
|
gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
|
| 1270 |
|
|
{
|
| 1271 |
|
|
if (GET_MODE (exp) != VOIDmode)
|
| 1272 |
|
|
{
|
| 1273 |
|
|
gcc_assert (GET_MODE (exp) == innermode);
|
| 1274 |
|
|
return gen_highpart (outermode, exp);
|
| 1275 |
|
|
}
|
| 1276 |
|
|
return simplify_gen_subreg (outermode, exp, innermode,
|
| 1277 |
|
|
subreg_highpart_offset (outermode, innermode));
|
| 1278 |
|
|
}
|
| 1279 |
|
|
|
| 1280 |
|
|
/* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
|
| 1281 |
|
|
|
| 1282 |
|
|
unsigned int
|
| 1283 |
|
|
subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
|
| 1284 |
|
|
{
|
| 1285 |
|
|
unsigned int offset = 0;
|
| 1286 |
|
|
int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
|
| 1287 |
|
|
|
| 1288 |
|
|
if (difference > 0)
|
| 1289 |
|
|
{
|
| 1290 |
|
|
if (WORDS_BIG_ENDIAN)
|
| 1291 |
|
|
offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
|
| 1292 |
|
|
if (BYTES_BIG_ENDIAN)
|
| 1293 |
|
|
offset += difference % UNITS_PER_WORD;
|
| 1294 |
|
|
}
|
| 1295 |
|
|
|
| 1296 |
|
|
return offset;
|
| 1297 |
|
|
}
|
| 1298 |
|
|
|
| 1299 |
|
|
/* Return offset in bytes to get OUTERMODE high part
|
| 1300 |
|
|
of the value in mode INNERMODE stored in memory in target format. */
|
| 1301 |
|
|
unsigned int
|
| 1302 |
|
|
subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
|
| 1303 |
|
|
{
|
| 1304 |
|
|
unsigned int offset = 0;
|
| 1305 |
|
|
int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
|
| 1306 |
|
|
|
| 1307 |
|
|
gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
|
| 1308 |
|
|
|
| 1309 |
|
|
if (difference > 0)
|
| 1310 |
|
|
{
|
| 1311 |
|
|
if (! WORDS_BIG_ENDIAN)
|
| 1312 |
|
|
offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
|
| 1313 |
|
|
if (! BYTES_BIG_ENDIAN)
|
| 1314 |
|
|
offset += difference % UNITS_PER_WORD;
|
| 1315 |
|
|
}
|
| 1316 |
|
|
|
| 1317 |
|
|
return offset;
|
| 1318 |
|
|
}
|
| 1319 |
|
|
|
| 1320 |
|
|
/* Return 1 iff X, assumed to be a SUBREG,
|
| 1321 |
|
|
refers to the least significant part of its containing reg.
|
| 1322 |
|
|
If X is not a SUBREG, always return 1 (it is its own low part!). */
|
| 1323 |
|
|
|
| 1324 |
|
|
int
|
| 1325 |
|
|
subreg_lowpart_p (const_rtx x)
|
| 1326 |
|
|
{
|
| 1327 |
|
|
if (GET_CODE (x) != SUBREG)
|
| 1328 |
|
|
return 1;
|
| 1329 |
|
|
else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
|
| 1330 |
|
|
return 0;
|
| 1331 |
|
|
|
| 1332 |
|
|
return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
|
| 1333 |
|
|
== SUBREG_BYTE (x));
|
| 1334 |
|
|
}
|
| 1335 |
|
|
|
| 1336 |
|
|
/* Return subword OFFSET of operand OP.
|
| 1337 |
|
|
The word number, OFFSET, is interpreted as the word number starting
|
| 1338 |
|
|
at the low-order address. OFFSET 0 is the low-order word if not
|
| 1339 |
|
|
WORDS_BIG_ENDIAN, otherwise it is the high-order word.
|
| 1340 |
|
|
|
| 1341 |
|
|
If we cannot extract the required word, we return zero. Otherwise,
|
| 1342 |
|
|
an rtx corresponding to the requested word will be returned.
|
| 1343 |
|
|
|
| 1344 |
|
|
VALIDATE_ADDRESS is nonzero if the address should be validated. Before
|
| 1345 |
|
|
reload has completed, a valid address will always be returned. After
|
| 1346 |
|
|
reload, if a valid address cannot be returned, we return zero.
|
| 1347 |
|
|
|
| 1348 |
|
|
If VALIDATE_ADDRESS is zero, we simply form the required address; validating
|
| 1349 |
|
|
it is the responsibility of the caller.
|
| 1350 |
|
|
|
| 1351 |
|
|
MODE is the mode of OP in case it is a CONST_INT.
|
| 1352 |
|
|
|
| 1353 |
|
|
??? This is still rather broken for some cases. The problem for the
|
| 1354 |
|
|
moment is that all callers of this thing provide no 'goal mode' to
|
| 1355 |
|
|
tell us to work with. This exists because all callers were written
|
| 1356 |
|
|
in a word based SUBREG world.
|
| 1357 |
|
|
Now use of this function can be deprecated by simplify_subreg in most
|
| 1358 |
|
|
cases.
|
| 1359 |
|
|
*/
|
| 1360 |
|
|
|
| 1361 |
|
|
rtx
|
| 1362 |
|
|
operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
|
| 1363 |
|
|
{
|
| 1364 |
|
|
if (mode == VOIDmode)
|
| 1365 |
|
|
mode = GET_MODE (op);
|
| 1366 |
|
|
|
| 1367 |
|
|
gcc_assert (mode != VOIDmode);
|
| 1368 |
|
|
|
| 1369 |
|
|
/* If OP is narrower than a word, fail. */
|
| 1370 |
|
|
if (mode != BLKmode
|
| 1371 |
|
|
&& (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
|
| 1372 |
|
|
return 0;
|
| 1373 |
|
|
|
| 1374 |
|
|
/* If we want a word outside OP, return zero. */
|
| 1375 |
|
|
if (mode != BLKmode
|
| 1376 |
|
|
&& (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
|
| 1377 |
|
|
return const0_rtx;
|
| 1378 |
|
|
|
| 1379 |
|
|
/* Form a new MEM at the requested address. */
|
| 1380 |
|
|
if (MEM_P (op))
|
| 1381 |
|
|
{
|
| 1382 |
|
|
rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
|
| 1383 |
|
|
|
| 1384 |
|
|
if (! validate_address)
|
| 1385 |
|
|
return new_rtx;
|
| 1386 |
|
|
|
| 1387 |
|
|
else if (reload_completed)
|
| 1388 |
|
|
{
|
| 1389 |
|
|
if (! strict_memory_address_addr_space_p (word_mode,
|
| 1390 |
|
|
XEXP (new_rtx, 0),
|
| 1391 |
|
|
MEM_ADDR_SPACE (op)))
|
| 1392 |
|
|
return 0;
|
| 1393 |
|
|
}
|
| 1394 |
|
|
else
|
| 1395 |
|
|
return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
|
| 1396 |
|
|
}
|
| 1397 |
|
|
|
| 1398 |
|
|
/* Rest can be handled by simplify_subreg. */
|
| 1399 |
|
|
return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
|
| 1400 |
|
|
}
|
| 1401 |
|
|
|
| 1402 |
|
|
/* Similar to `operand_subword', but never return 0. If we can't
|
| 1403 |
|
|
extract the required subword, put OP into a register and try again.
|
| 1404 |
|
|
The second attempt must succeed. We always validate the address in
|
| 1405 |
|
|
this case.
|
| 1406 |
|
|
|
| 1407 |
|
|
MODE is the mode of OP, in case it is CONST_INT. */
|
| 1408 |
|
|
|
| 1409 |
|
|
rtx
|
| 1410 |
|
|
operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
|
| 1411 |
|
|
{
|
| 1412 |
|
|
rtx result = operand_subword (op, offset, 1, mode);
|
| 1413 |
|
|
|
| 1414 |
|
|
if (result)
|
| 1415 |
|
|
return result;
|
| 1416 |
|
|
|
| 1417 |
|
|
if (mode != BLKmode && mode != VOIDmode)
|
| 1418 |
|
|
{
|
| 1419 |
|
|
/* If this is a register which can not be accessed by words, copy it
|
| 1420 |
|
|
to a pseudo register. */
|
| 1421 |
|
|
if (REG_P (op))
|
| 1422 |
|
|
op = copy_to_reg (op);
|
| 1423 |
|
|
else
|
| 1424 |
|
|
op = force_reg (mode, op);
|
| 1425 |
|
|
}
|
| 1426 |
|
|
|
| 1427 |
|
|
result = operand_subword (op, offset, 1, mode);
|
| 1428 |
|
|
gcc_assert (result);
|
| 1429 |
|
|
|
| 1430 |
|
|
return result;
|
| 1431 |
|
|
}
|
| 1432 |
|
|
|
| 1433 |
|
|
/* Returns 1 if both MEM_EXPR can be considered equal
|
| 1434 |
|
|
and 0 otherwise. */
|
| 1435 |
|
|
|
| 1436 |
|
|
int
|
| 1437 |
|
|
mem_expr_equal_p (const_tree expr1, const_tree expr2)
|
| 1438 |
|
|
{
|
| 1439 |
|
|
if (expr1 == expr2)
|
| 1440 |
|
|
return 1;
|
| 1441 |
|
|
|
| 1442 |
|
|
if (! expr1 || ! expr2)
|
| 1443 |
|
|
return 0;
|
| 1444 |
|
|
|
| 1445 |
|
|
if (TREE_CODE (expr1) != TREE_CODE (expr2))
|
| 1446 |
|
|
return 0;
|
| 1447 |
|
|
|
| 1448 |
|
|
return operand_equal_p (expr1, expr2, 0);
|
| 1449 |
|
|
}
|
| 1450 |
|
|
|
| 1451 |
|
|
/* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
|
| 1452 |
|
|
bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
|
| 1453 |
|
|
-1 if not known. */
|
| 1454 |
|
|
|
| 1455 |
|
|
int
|
| 1456 |
|
|
get_mem_align_offset (rtx mem, unsigned int align)
|
| 1457 |
|
|
{
|
| 1458 |
|
|
tree expr;
|
| 1459 |
|
|
unsigned HOST_WIDE_INT offset;
|
| 1460 |
|
|
|
| 1461 |
|
|
/* This function can't use
|
| 1462 |
|
|
if (!MEM_EXPR (mem) || !MEM_OFFSET (mem)
|
| 1463 |
|
|
|| !CONST_INT_P (MEM_OFFSET (mem))
|
| 1464 |
|
|
|| (get_object_alignment (MEM_EXPR (mem), MEM_ALIGN (mem), align)
|
| 1465 |
|
|
< align))
|
| 1466 |
|
|
return -1;
|
| 1467 |
|
|
else
|
| 1468 |
|
|
return (- INTVAL (MEM_OFFSET (mem))) & (align / BITS_PER_UNIT - 1);
|
| 1469 |
|
|
for two reasons:
|
| 1470 |
|
|
- COMPONENT_REFs in MEM_EXPR can have NULL first operand,
|
| 1471 |
|
|
for <variable>. get_inner_reference doesn't handle it and
|
| 1472 |
|
|
even if it did, the alignment in that case needs to be determined
|
| 1473 |
|
|
from DECL_FIELD_CONTEXT's TYPE_ALIGN.
|
| 1474 |
|
|
- it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
|
| 1475 |
|
|
isn't sufficiently aligned, the object it is in might be. */
|
| 1476 |
|
|
gcc_assert (MEM_P (mem));
|
| 1477 |
|
|
expr = MEM_EXPR (mem);
|
| 1478 |
|
|
if (expr == NULL_TREE
|
| 1479 |
|
|
|| MEM_OFFSET (mem) == NULL_RTX
|
| 1480 |
|
|
|| !CONST_INT_P (MEM_OFFSET (mem)))
|
| 1481 |
|
|
return -1;
|
| 1482 |
|
|
|
| 1483 |
|
|
offset = INTVAL (MEM_OFFSET (mem));
|
| 1484 |
|
|
if (DECL_P (expr))
|
| 1485 |
|
|
{
|
| 1486 |
|
|
if (DECL_ALIGN (expr) < align)
|
| 1487 |
|
|
return -1;
|
| 1488 |
|
|
}
|
| 1489 |
|
|
else if (INDIRECT_REF_P (expr))
|
| 1490 |
|
|
{
|
| 1491 |
|
|
if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
|
| 1492 |
|
|
return -1;
|
| 1493 |
|
|
}
|
| 1494 |
|
|
else if (TREE_CODE (expr) == COMPONENT_REF)
|
| 1495 |
|
|
{
|
| 1496 |
|
|
while (1)
|
| 1497 |
|
|
{
|
| 1498 |
|
|
tree inner = TREE_OPERAND (expr, 0);
|
| 1499 |
|
|
tree field = TREE_OPERAND (expr, 1);
|
| 1500 |
|
|
tree byte_offset = component_ref_field_offset (expr);
|
| 1501 |
|
|
tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
|
| 1502 |
|
|
|
| 1503 |
|
|
if (!byte_offset
|
| 1504 |
|
|
|| !host_integerp (byte_offset, 1)
|
| 1505 |
|
|
|| !host_integerp (bit_offset, 1))
|
| 1506 |
|
|
return -1;
|
| 1507 |
|
|
|
| 1508 |
|
|
offset += tree_low_cst (byte_offset, 1);
|
| 1509 |
|
|
offset += tree_low_cst (bit_offset, 1) / BITS_PER_UNIT;
|
| 1510 |
|
|
|
| 1511 |
|
|
if (inner == NULL_TREE)
|
| 1512 |
|
|
{
|
| 1513 |
|
|
if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
|
| 1514 |
|
|
< (unsigned int) align)
|
| 1515 |
|
|
return -1;
|
| 1516 |
|
|
break;
|
| 1517 |
|
|
}
|
| 1518 |
|
|
else if (DECL_P (inner))
|
| 1519 |
|
|
{
|
| 1520 |
|
|
if (DECL_ALIGN (inner) < align)
|
| 1521 |
|
|
return -1;
|
| 1522 |
|
|
break;
|
| 1523 |
|
|
}
|
| 1524 |
|
|
else if (TREE_CODE (inner) != COMPONENT_REF)
|
| 1525 |
|
|
return -1;
|
| 1526 |
|
|
expr = inner;
|
| 1527 |
|
|
}
|
| 1528 |
|
|
}
|
| 1529 |
|
|
else
|
| 1530 |
|
|
return -1;
|
| 1531 |
|
|
|
| 1532 |
|
|
return offset & ((align / BITS_PER_UNIT) - 1);
|
| 1533 |
|
|
}
|
| 1534 |
|
|
|
| 1535 |
|
|
/* Given REF (a MEM) and T, either the type of X or the expression
|
| 1536 |
|
|
corresponding to REF, set the memory attributes. OBJECTP is nonzero
|
| 1537 |
|
|
if we are making a new object of this type. BITPOS is nonzero if
|
| 1538 |
|
|
there is an offset outstanding on T that will be applied later. */
|
| 1539 |
|
|
|
| 1540 |
|
|
void
|
| 1541 |
|
|
set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
|
| 1542 |
|
|
HOST_WIDE_INT bitpos)
|
| 1543 |
|
|
{
|
| 1544 |
|
|
alias_set_type alias = MEM_ALIAS_SET (ref);
|
| 1545 |
|
|
tree expr = MEM_EXPR (ref);
|
| 1546 |
|
|
rtx offset = MEM_OFFSET (ref);
|
| 1547 |
|
|
rtx size = MEM_SIZE (ref);
|
| 1548 |
|
|
unsigned int align = MEM_ALIGN (ref);
|
| 1549 |
|
|
HOST_WIDE_INT apply_bitpos = 0;
|
| 1550 |
|
|
tree type;
|
| 1551 |
|
|
|
| 1552 |
|
|
/* It can happen that type_for_mode was given a mode for which there
|
| 1553 |
|
|
is no language-level type. In which case it returns NULL, which
|
| 1554 |
|
|
we can see here. */
|
| 1555 |
|
|
if (t == NULL_TREE)
|
| 1556 |
|
|
return;
|
| 1557 |
|
|
|
| 1558 |
|
|
type = TYPE_P (t) ? t : TREE_TYPE (t);
|
| 1559 |
|
|
if (type == error_mark_node)
|
| 1560 |
|
|
return;
|
| 1561 |
|
|
|
| 1562 |
|
|
/* If we have already set DECL_RTL = ref, get_alias_set will get the
|
| 1563 |
|
|
wrong answer, as it assumes that DECL_RTL already has the right alias
|
| 1564 |
|
|
info. Callers should not set DECL_RTL until after the call to
|
| 1565 |
|
|
set_mem_attributes. */
|
| 1566 |
|
|
gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
|
| 1567 |
|
|
|
| 1568 |
|
|
/* Get the alias set from the expression or type (perhaps using a
|
| 1569 |
|
|
front-end routine) and use it. */
|
| 1570 |
|
|
alias = get_alias_set (t);
|
| 1571 |
|
|
|
| 1572 |
|
|
MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
|
| 1573 |
|
|
MEM_IN_STRUCT_P (ref)
|
| 1574 |
|
|
= AGGREGATE_TYPE_P (type) || TREE_CODE (type) == COMPLEX_TYPE;
|
| 1575 |
|
|
MEM_POINTER (ref) = POINTER_TYPE_P (type);
|
| 1576 |
|
|
|
| 1577 |
|
|
/* If we are making an object of this type, or if this is a DECL, we know
|
| 1578 |
|
|
that it is a scalar if the type is not an aggregate. */
|
| 1579 |
|
|
if ((objectp || DECL_P (t))
|
| 1580 |
|
|
&& ! AGGREGATE_TYPE_P (type)
|
| 1581 |
|
|
&& TREE_CODE (type) != COMPLEX_TYPE)
|
| 1582 |
|
|
MEM_SCALAR_P (ref) = 1;
|
| 1583 |
|
|
|
| 1584 |
|
|
/* We can set the alignment from the type if we are making an object,
|
| 1585 |
|
|
this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
|
| 1586 |
|
|
if (objectp || TREE_CODE (t) == INDIRECT_REF
|
| 1587 |
|
|
|| TREE_CODE (t) == ALIGN_INDIRECT_REF
|
| 1588 |
|
|
|| TYPE_ALIGN_OK (type))
|
| 1589 |
|
|
align = MAX (align, TYPE_ALIGN (type));
|
| 1590 |
|
|
else
|
| 1591 |
|
|
if (TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
|
| 1592 |
|
|
{
|
| 1593 |
|
|
if (integer_zerop (TREE_OPERAND (t, 1)))
|
| 1594 |
|
|
/* We don't know anything about the alignment. */
|
| 1595 |
|
|
align = BITS_PER_UNIT;
|
| 1596 |
|
|
else
|
| 1597 |
|
|
align = tree_low_cst (TREE_OPERAND (t, 1), 1);
|
| 1598 |
|
|
}
|
| 1599 |
|
|
|
| 1600 |
|
|
/* If the size is known, we can set that. */
|
| 1601 |
|
|
if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
|
| 1602 |
|
|
size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
|
| 1603 |
|
|
|
| 1604 |
|
|
/* If T is not a type, we may be able to deduce some more information about
|
| 1605 |
|
|
the expression. */
|
| 1606 |
|
|
if (! TYPE_P (t))
|
| 1607 |
|
|
{
|
| 1608 |
|
|
tree base;
|
| 1609 |
|
|
bool align_computed = false;
|
| 1610 |
|
|
|
| 1611 |
|
|
if (TREE_THIS_VOLATILE (t))
|
| 1612 |
|
|
MEM_VOLATILE_P (ref) = 1;
|
| 1613 |
|
|
|
| 1614 |
|
|
/* Now remove any conversions: they don't change what the underlying
|
| 1615 |
|
|
object is. Likewise for SAVE_EXPR. */
|
| 1616 |
|
|
while (CONVERT_EXPR_P (t)
|
| 1617 |
|
|
|| TREE_CODE (t) == VIEW_CONVERT_EXPR
|
| 1618 |
|
|
|| TREE_CODE (t) == SAVE_EXPR)
|
| 1619 |
|
|
t = TREE_OPERAND (t, 0);
|
| 1620 |
|
|
|
| 1621 |
|
|
/* We may look through structure-like accesses for the purposes of
|
| 1622 |
|
|
examining TREE_THIS_NOTRAP, but not array-like accesses. */
|
| 1623 |
|
|
base = t;
|
| 1624 |
|
|
while (TREE_CODE (base) == COMPONENT_REF
|
| 1625 |
|
|
|| TREE_CODE (base) == REALPART_EXPR
|
| 1626 |
|
|
|| TREE_CODE (base) == IMAGPART_EXPR
|
| 1627 |
|
|
|| TREE_CODE (base) == BIT_FIELD_REF)
|
| 1628 |
|
|
base = TREE_OPERAND (base, 0);
|
| 1629 |
|
|
|
| 1630 |
|
|
if (DECL_P (base))
|
| 1631 |
|
|
{
|
| 1632 |
|
|
if (CODE_CONTAINS_STRUCT (TREE_CODE (base), TS_DECL_WITH_VIS))
|
| 1633 |
|
|
MEM_NOTRAP_P (ref) = !DECL_WEAK (base);
|
| 1634 |
|
|
else
|
| 1635 |
|
|
MEM_NOTRAP_P (ref) = 1;
|
| 1636 |
|
|
}
|
| 1637 |
|
|
else
|
| 1638 |
|
|
MEM_NOTRAP_P (ref) = TREE_THIS_NOTRAP (base);
|
| 1639 |
|
|
|
| 1640 |
|
|
base = get_base_address (base);
|
| 1641 |
|
|
if (base && DECL_P (base)
|
| 1642 |
|
|
&& TREE_READONLY (base)
|
| 1643 |
|
|
&& (TREE_STATIC (base) || DECL_EXTERNAL (base)))
|
| 1644 |
|
|
{
|
| 1645 |
|
|
tree base_type = TREE_TYPE (base);
|
| 1646 |
|
|
gcc_assert (!(base_type && TYPE_NEEDS_CONSTRUCTING (base_type))
|
| 1647 |
|
|
|| DECL_ARTIFICIAL (base));
|
| 1648 |
|
|
MEM_READONLY_P (ref) = 1;
|
| 1649 |
|
|
}
|
| 1650 |
|
|
|
| 1651 |
|
|
/* If this expression uses it's parent's alias set, mark it such
|
| 1652 |
|
|
that we won't change it. */
|
| 1653 |
|
|
if (component_uses_parent_alias_set (t))
|
| 1654 |
|
|
MEM_KEEP_ALIAS_SET_P (ref) = 1;
|
| 1655 |
|
|
|
| 1656 |
|
|
/* If this is a decl, set the attributes of the MEM from it. */
|
| 1657 |
|
|
if (DECL_P (t))
|
| 1658 |
|
|
{
|
| 1659 |
|
|
expr = t;
|
| 1660 |
|
|
offset = const0_rtx;
|
| 1661 |
|
|
apply_bitpos = bitpos;
|
| 1662 |
|
|
size = (DECL_SIZE_UNIT (t)
|
| 1663 |
|
|
&& host_integerp (DECL_SIZE_UNIT (t), 1)
|
| 1664 |
|
|
? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
|
| 1665 |
|
|
align = DECL_ALIGN (t);
|
| 1666 |
|
|
align_computed = true;
|
| 1667 |
|
|
}
|
| 1668 |
|
|
|
| 1669 |
|
|
/* If this is a constant, we know the alignment. */
|
| 1670 |
|
|
else if (CONSTANT_CLASS_P (t))
|
| 1671 |
|
|
{
|
| 1672 |
|
|
align = TYPE_ALIGN (type);
|
| 1673 |
|
|
#ifdef CONSTANT_ALIGNMENT
|
| 1674 |
|
|
align = CONSTANT_ALIGNMENT (t, align);
|
| 1675 |
|
|
#endif
|
| 1676 |
|
|
align_computed = true;
|
| 1677 |
|
|
}
|
| 1678 |
|
|
|
| 1679 |
|
|
/* If this is a field reference and not a bit-field, record it. */
|
| 1680 |
|
|
/* ??? There is some information that can be gleaned from bit-fields,
|
| 1681 |
|
|
such as the word offset in the structure that might be modified.
|
| 1682 |
|
|
But skip it for now. */
|
| 1683 |
|
|
else if (TREE_CODE (t) == COMPONENT_REF
|
| 1684 |
|
|
&& ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
|
| 1685 |
|
|
{
|
| 1686 |
|
|
expr = t;
|
| 1687 |
|
|
offset = const0_rtx;
|
| 1688 |
|
|
apply_bitpos = bitpos;
|
| 1689 |
|
|
/* ??? Any reason the field size would be different than
|
| 1690 |
|
|
the size we got from the type? */
|
| 1691 |
|
|
}
|
| 1692 |
|
|
|
| 1693 |
|
|
/* If this is an array reference, look for an outer field reference. */
|
| 1694 |
|
|
else if (TREE_CODE (t) == ARRAY_REF)
|
| 1695 |
|
|
{
|
| 1696 |
|
|
tree off_tree = size_zero_node;
|
| 1697 |
|
|
/* We can't modify t, because we use it at the end of the
|
| 1698 |
|
|
function. */
|
| 1699 |
|
|
tree t2 = t;
|
| 1700 |
|
|
|
| 1701 |
|
|
do
|
| 1702 |
|
|
{
|
| 1703 |
|
|
tree index = TREE_OPERAND (t2, 1);
|
| 1704 |
|
|
tree low_bound = array_ref_low_bound (t2);
|
| 1705 |
|
|
tree unit_size = array_ref_element_size (t2);
|
| 1706 |
|
|
|
| 1707 |
|
|
/* We assume all arrays have sizes that are a multiple of a byte.
|
| 1708 |
|
|
First subtract the lower bound, if any, in the type of the
|
| 1709 |
|
|
index, then convert to sizetype and multiply by the size of
|
| 1710 |
|
|
the array element. */
|
| 1711 |
|
|
if (! integer_zerop (low_bound))
|
| 1712 |
|
|
index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
|
| 1713 |
|
|
index, low_bound);
|
| 1714 |
|
|
|
| 1715 |
|
|
off_tree = size_binop (PLUS_EXPR,
|
| 1716 |
|
|
size_binop (MULT_EXPR,
|
| 1717 |
|
|
fold_convert (sizetype,
|
| 1718 |
|
|
index),
|
| 1719 |
|
|
unit_size),
|
| 1720 |
|
|
off_tree);
|
| 1721 |
|
|
t2 = TREE_OPERAND (t2, 0);
|
| 1722 |
|
|
}
|
| 1723 |
|
|
while (TREE_CODE (t2) == ARRAY_REF);
|
| 1724 |
|
|
|
| 1725 |
|
|
if (DECL_P (t2))
|
| 1726 |
|
|
{
|
| 1727 |
|
|
expr = t2;
|
| 1728 |
|
|
offset = NULL;
|
| 1729 |
|
|
if (host_integerp (off_tree, 1))
|
| 1730 |
|
|
{
|
| 1731 |
|
|
HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
|
| 1732 |
|
|
HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
|
| 1733 |
|
|
align = DECL_ALIGN (t2);
|
| 1734 |
|
|
if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
|
| 1735 |
|
|
align = aoff;
|
| 1736 |
|
|
align_computed = true;
|
| 1737 |
|
|
offset = GEN_INT (ioff);
|
| 1738 |
|
|
apply_bitpos = bitpos;
|
| 1739 |
|
|
}
|
| 1740 |
|
|
}
|
| 1741 |
|
|
else if (TREE_CODE (t2) == COMPONENT_REF)
|
| 1742 |
|
|
{
|
| 1743 |
|
|
expr = t2;
|
| 1744 |
|
|
offset = NULL;
|
| 1745 |
|
|
if (host_integerp (off_tree, 1))
|
| 1746 |
|
|
{
|
| 1747 |
|
|
offset = GEN_INT (tree_low_cst (off_tree, 1));
|
| 1748 |
|
|
apply_bitpos = bitpos;
|
| 1749 |
|
|
}
|
| 1750 |
|
|
/* ??? Any reason the field size would be different than
|
| 1751 |
|
|
the size we got from the type? */
|
| 1752 |
|
|
}
|
| 1753 |
|
|
else if (flag_argument_noalias > 1
|
| 1754 |
|
|
&& (INDIRECT_REF_P (t2))
|
| 1755 |
|
|
&& TREE_CODE (TREE_OPERAND (t2, 0)) == PARM_DECL)
|
| 1756 |
|
|
{
|
| 1757 |
|
|
expr = t2;
|
| 1758 |
|
|
offset = NULL;
|
| 1759 |
|
|
}
|
| 1760 |
|
|
}
|
| 1761 |
|
|
|
| 1762 |
|
|
/* If this is a Fortran indirect argument reference, record the
|
| 1763 |
|
|
parameter decl. */
|
| 1764 |
|
|
else if (flag_argument_noalias > 1
|
| 1765 |
|
|
&& (INDIRECT_REF_P (t))
|
| 1766 |
|
|
&& TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
|
| 1767 |
|
|
{
|
| 1768 |
|
|
expr = t;
|
| 1769 |
|
|
offset = NULL;
|
| 1770 |
|
|
}
|
| 1771 |
|
|
|
| 1772 |
|
|
if (!align_computed && !INDIRECT_REF_P (t))
|
| 1773 |
|
|
{
|
| 1774 |
|
|
unsigned int obj_align
|
| 1775 |
|
|
= get_object_alignment (t, align, BIGGEST_ALIGNMENT);
|
| 1776 |
|
|
align = MAX (align, obj_align);
|
| 1777 |
|
|
}
|
| 1778 |
|
|
}
|
| 1779 |
|
|
|
| 1780 |
|
|
/* If we modified OFFSET based on T, then subtract the outstanding
|
| 1781 |
|
|
bit position offset. Similarly, increase the size of the accessed
|
| 1782 |
|
|
object to contain the negative offset. */
|
| 1783 |
|
|
if (apply_bitpos)
|
| 1784 |
|
|
{
|
| 1785 |
|
|
offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
|
| 1786 |
|
|
if (size)
|
| 1787 |
|
|
size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
|
| 1788 |
|
|
}
|
| 1789 |
|
|
|
| 1790 |
|
|
if (TREE_CODE (t) == ALIGN_INDIRECT_REF)
|
| 1791 |
|
|
{
|
| 1792 |
|
|
/* Force EXPR and OFFSET to NULL, since we don't know exactly what
|
| 1793 |
|
|
we're overlapping. */
|
| 1794 |
|
|
offset = NULL;
|
| 1795 |
|
|
expr = NULL;
|
| 1796 |
|
|
}
|
| 1797 |
|
|
|
| 1798 |
|
|
/* Now set the attributes we computed above. */
|
| 1799 |
|
|
MEM_ATTRS (ref)
|
| 1800 |
|
|
= get_mem_attrs (alias, expr, offset, size, align,
|
| 1801 |
|
|
TYPE_ADDR_SPACE (type), GET_MODE (ref));
|
| 1802 |
|
|
|
| 1803 |
|
|
/* If this is already known to be a scalar or aggregate, we are done. */
|
| 1804 |
|
|
if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
|
| 1805 |
|
|
return;
|
| 1806 |
|
|
|
| 1807 |
|
|
/* If it is a reference into an aggregate, this is part of an aggregate.
|
| 1808 |
|
|
Otherwise we don't know. */
|
| 1809 |
|
|
else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
|
| 1810 |
|
|
|| TREE_CODE (t) == ARRAY_RANGE_REF
|
| 1811 |
|
|
|| TREE_CODE (t) == BIT_FIELD_REF)
|
| 1812 |
|
|
MEM_IN_STRUCT_P (ref) = 1;
|
| 1813 |
|
|
}
|
| 1814 |
|
|
|
| 1815 |
|
|
void
|
| 1816 |
|
|
set_mem_attributes (rtx ref, tree t, int objectp)
|
| 1817 |
|
|
{
|
| 1818 |
|
|
set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
|
| 1819 |
|
|
}
|
| 1820 |
|
|
|
| 1821 |
|
|
/* Set the alias set of MEM to SET. */
|
| 1822 |
|
|
|
| 1823 |
|
|
void
|
| 1824 |
|
|
set_mem_alias_set (rtx mem, alias_set_type set)
|
| 1825 |
|
|
{
|
| 1826 |
|
|
#ifdef ENABLE_CHECKING
|
| 1827 |
|
|
/* If the new and old alias sets don't conflict, something is wrong. */
|
| 1828 |
|
|
gcc_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
|
| 1829 |
|
|
#endif
|
| 1830 |
|
|
|
| 1831 |
|
|
MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
|
| 1832 |
|
|
MEM_SIZE (mem), MEM_ALIGN (mem),
|
| 1833 |
|
|
MEM_ADDR_SPACE (mem), GET_MODE (mem));
|
| 1834 |
|
|
}
|
| 1835 |
|
|
|
| 1836 |
|
|
/* Set the address space of MEM to ADDRSPACE (target-defined). */
|
| 1837 |
|
|
|
| 1838 |
|
|
void
|
| 1839 |
|
|
set_mem_addr_space (rtx mem, addr_space_t addrspace)
|
| 1840 |
|
|
{
|
| 1841 |
|
|
MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
|
| 1842 |
|
|
MEM_OFFSET (mem), MEM_SIZE (mem),
|
| 1843 |
|
|
MEM_ALIGN (mem), addrspace, GET_MODE (mem));
|
| 1844 |
|
|
}
|
| 1845 |
|
|
|
| 1846 |
|
|
/* Set the alignment of MEM to ALIGN bits. */
|
| 1847 |
|
|
|
| 1848 |
|
|
void
|
| 1849 |
|
|
set_mem_align (rtx mem, unsigned int align)
|
| 1850 |
|
|
{
|
| 1851 |
|
|
MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
|
| 1852 |
|
|
MEM_OFFSET (mem), MEM_SIZE (mem), align,
|
| 1853 |
|
|
MEM_ADDR_SPACE (mem), GET_MODE (mem));
|
| 1854 |
|
|
}
|
| 1855 |
|
|
|
| 1856 |
|
|
/* Set the expr for MEM to EXPR. */
|
| 1857 |
|
|
|
| 1858 |
|
|
void
|
| 1859 |
|
|
set_mem_expr (rtx mem, tree expr)
|
| 1860 |
|
|
{
|
| 1861 |
|
|
MEM_ATTRS (mem)
|
| 1862 |
|
|
= get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
|
| 1863 |
|
|
MEM_SIZE (mem), MEM_ALIGN (mem),
|
| 1864 |
|
|
MEM_ADDR_SPACE (mem), GET_MODE (mem));
|
| 1865 |
|
|
}
|
| 1866 |
|
|
|
| 1867 |
|
|
/* Set the offset of MEM to OFFSET. */
|
| 1868 |
|
|
|
| 1869 |
|
|
void
|
| 1870 |
|
|
set_mem_offset (rtx mem, rtx offset)
|
| 1871 |
|
|
{
|
| 1872 |
|
|
MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
|
| 1873 |
|
|
offset, MEM_SIZE (mem), MEM_ALIGN (mem),
|
| 1874 |
|
|
MEM_ADDR_SPACE (mem), GET_MODE (mem));
|
| 1875 |
|
|
}
|
| 1876 |
|
|
|
| 1877 |
|
|
/* Set the size of MEM to SIZE. */
|
| 1878 |
|
|
|
| 1879 |
|
|
void
|
| 1880 |
|
|
set_mem_size (rtx mem, rtx size)
|
| 1881 |
|
|
{
|
| 1882 |
|
|
MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
|
| 1883 |
|
|
MEM_OFFSET (mem), size, MEM_ALIGN (mem),
|
| 1884 |
|
|
MEM_ADDR_SPACE (mem), GET_MODE (mem));
|
| 1885 |
|
|
}
|
| 1886 |
|
|
|
| 1887 |
|
|
/* Return a memory reference like MEMREF, but with its mode changed to MODE
|
| 1888 |
|
|
and its address changed to ADDR. (VOIDmode means don't change the mode.
|
| 1889 |
|
|
NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
|
| 1890 |
|
|
returned memory location is required to be valid. The memory
|
| 1891 |
|
|
attributes are not changed. */
|
| 1892 |
|
|
|
| 1893 |
|
|
static rtx
|
| 1894 |
|
|
change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
|
| 1895 |
|
|
{
|
| 1896 |
|
|
addr_space_t as;
|
| 1897 |
|
|
rtx new_rtx;
|
| 1898 |
|
|
|
| 1899 |
|
|
gcc_assert (MEM_P (memref));
|
| 1900 |
|
|
as = MEM_ADDR_SPACE (memref);
|
| 1901 |
|
|
if (mode == VOIDmode)
|
| 1902 |
|
|
mode = GET_MODE (memref);
|
| 1903 |
|
|
if (addr == 0)
|
| 1904 |
|
|
addr = XEXP (memref, 0);
|
| 1905 |
|
|
if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
|
| 1906 |
|
|
&& (!validate || memory_address_addr_space_p (mode, addr, as)))
|
| 1907 |
|
|
return memref;
|
| 1908 |
|
|
|
| 1909 |
|
|
if (validate)
|
| 1910 |
|
|
{
|
| 1911 |
|
|
if (reload_in_progress || reload_completed)
|
| 1912 |
|
|
gcc_assert (memory_address_addr_space_p (mode, addr, as));
|
| 1913 |
|
|
else
|
| 1914 |
|
|
addr = memory_address_addr_space (mode, addr, as);
|
| 1915 |
|
|
}
|
| 1916 |
|
|
|
| 1917 |
|
|
if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
|
| 1918 |
|
|
return memref;
|
| 1919 |
|
|
|
| 1920 |
|
|
new_rtx = gen_rtx_MEM (mode, addr);
|
| 1921 |
|
|
MEM_COPY_ATTRIBUTES (new_rtx, memref);
|
| 1922 |
|
|
return new_rtx;
|
| 1923 |
|
|
}
|
| 1924 |
|
|
|
| 1925 |
|
|
/* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
|
| 1926 |
|
|
way we are changing MEMREF, so we only preserve the alias set. */
|
| 1927 |
|
|
|
| 1928 |
|
|
rtx
|
| 1929 |
|
|
change_address (rtx memref, enum machine_mode mode, rtx addr)
|
| 1930 |
|
|
{
|
| 1931 |
|
|
rtx new_rtx = change_address_1 (memref, mode, addr, 1), size;
|
| 1932 |
|
|
enum machine_mode mmode = GET_MODE (new_rtx);
|
| 1933 |
|
|
unsigned int align;
|
| 1934 |
|
|
|
| 1935 |
|
|
size = mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode));
|
| 1936 |
|
|
align = mmode == BLKmode ? BITS_PER_UNIT : GET_MODE_ALIGNMENT (mmode);
|
| 1937 |
|
|
|
| 1938 |
|
|
/* If there are no changes, just return the original memory reference. */
|
| 1939 |
|
|
if (new_rtx == memref)
|
| 1940 |
|
|
{
|
| 1941 |
|
|
if (MEM_ATTRS (memref) == 0
|
| 1942 |
|
|
|| (MEM_EXPR (memref) == NULL
|
| 1943 |
|
|
&& MEM_OFFSET (memref) == NULL
|
| 1944 |
|
|
&& MEM_SIZE (memref) == size
|
| 1945 |
|
|
&& MEM_ALIGN (memref) == align))
|
| 1946 |
|
|
return new_rtx;
|
| 1947 |
|
|
|
| 1948 |
|
|
new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
|
| 1949 |
|
|
MEM_COPY_ATTRIBUTES (new_rtx, memref);
|
| 1950 |
|
|
}
|
| 1951 |
|
|
|
| 1952 |
|
|
MEM_ATTRS (new_rtx)
|
| 1953 |
|
|
= get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0, size, align,
|
| 1954 |
|
|
MEM_ADDR_SPACE (memref), mmode);
|
| 1955 |
|
|
|
| 1956 |
|
|
return new_rtx;
|
| 1957 |
|
|
}
|
| 1958 |
|
|
|
| 1959 |
|
|
/* Return a memory reference like MEMREF, but with its mode changed
|
| 1960 |
|
|
to MODE and its address offset by OFFSET bytes. If VALIDATE is
|
| 1961 |
|
|
nonzero, the memory address is forced to be valid.
|
| 1962 |
|
|
If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
|
| 1963 |
|
|
and caller is responsible for adjusting MEMREF base register. */
|
| 1964 |
|
|
|
| 1965 |
|
|
rtx
|
| 1966 |
|
|
adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
|
| 1967 |
|
|
int validate, int adjust)
|
| 1968 |
|
|
{
|
| 1969 |
|
|
rtx addr = XEXP (memref, 0);
|
| 1970 |
|
|
rtx new_rtx;
|
| 1971 |
|
|
rtx memoffset = MEM_OFFSET (memref);
|
| 1972 |
|
|
rtx size = 0;
|
| 1973 |
|
|
unsigned int memalign = MEM_ALIGN (memref);
|
| 1974 |
|
|
addr_space_t as = MEM_ADDR_SPACE (memref);
|
| 1975 |
|
|
enum machine_mode address_mode = targetm.addr_space.address_mode (as);
|
| 1976 |
|
|
int pbits;
|
| 1977 |
|
|
|
| 1978 |
|
|
/* If there are no changes, just return the original memory reference. */
|
| 1979 |
|
|
if (mode == GET_MODE (memref) && !offset
|
| 1980 |
|
|
&& (!validate || memory_address_addr_space_p (mode, addr, as)))
|
| 1981 |
|
|
return memref;
|
| 1982 |
|
|
|
| 1983 |
|
|
/* ??? Prefer to create garbage instead of creating shared rtl.
|
| 1984 |
|
|
This may happen even if offset is nonzero -- consider
|
| 1985 |
|
|
(plus (plus reg reg) const_int) -- so do this always. */
|
| 1986 |
|
|
addr = copy_rtx (addr);
|
| 1987 |
|
|
|
| 1988 |
|
|
/* Convert a possibly large offset to a signed value within the
|
| 1989 |
|
|
range of the target address space. */
|
| 1990 |
|
|
pbits = GET_MODE_BITSIZE (address_mode);
|
| 1991 |
|
|
if (HOST_BITS_PER_WIDE_INT > pbits)
|
| 1992 |
|
|
{
|
| 1993 |
|
|
int shift = HOST_BITS_PER_WIDE_INT - pbits;
|
| 1994 |
|
|
offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
|
| 1995 |
|
|
>> shift);
|
| 1996 |
|
|
}
|
| 1997 |
|
|
|
| 1998 |
|
|
if (adjust)
|
| 1999 |
|
|
{
|
| 2000 |
|
|
/* If MEMREF is a LO_SUM and the offset is within the alignment of the
|
| 2001 |
|
|
object, we can merge it into the LO_SUM. */
|
| 2002 |
|
|
if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
|
| 2003 |
|
|
&& offset >= 0
|
| 2004 |
|
|
&& (unsigned HOST_WIDE_INT) offset
|
| 2005 |
|
|
< GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
|
| 2006 |
|
|
addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
|
| 2007 |
|
|
plus_constant (XEXP (addr, 1), offset));
|
| 2008 |
|
|
else
|
| 2009 |
|
|
addr = plus_constant (addr, offset);
|
| 2010 |
|
|
}
|
| 2011 |
|
|
|
| 2012 |
|
|
new_rtx = change_address_1 (memref, mode, addr, validate);
|
| 2013 |
|
|
|
| 2014 |
|
|
/* If the address is a REG, change_address_1 rightfully returns memref,
|
| 2015 |
|
|
but this would destroy memref's MEM_ATTRS. */
|
| 2016 |
|
|
if (new_rtx == memref && offset != 0)
|
| 2017 |
|
|
new_rtx = copy_rtx (new_rtx);
|
| 2018 |
|
|
|
| 2019 |
|
|
/* Compute the new values of the memory attributes due to this adjustment.
|
| 2020 |
|
|
We add the offsets and update the alignment. */
|
| 2021 |
|
|
if (memoffset)
|
| 2022 |
|
|
memoffset = GEN_INT (offset + INTVAL (memoffset));
|
| 2023 |
|
|
|
| 2024 |
|
|
/* Compute the new alignment by taking the MIN of the alignment and the
|
| 2025 |
|
|
lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
|
| 2026 |
|
|
if zero. */
|
| 2027 |
|
|
if (offset != 0)
|
| 2028 |
|
|
memalign
|
| 2029 |
|
|
= MIN (memalign,
|
| 2030 |
|
|
(unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
|
| 2031 |
|
|
|
| 2032 |
|
|
/* We can compute the size in a number of ways. */
|
| 2033 |
|
|
if (GET_MODE (new_rtx) != BLKmode)
|
| 2034 |
|
|
size = GEN_INT (GET_MODE_SIZE (GET_MODE (new_rtx)));
|
| 2035 |
|
|
else if (MEM_SIZE (memref))
|
| 2036 |
|
|
size = plus_constant (MEM_SIZE (memref), -offset);
|
| 2037 |
|
|
|
| 2038 |
|
|
MEM_ATTRS (new_rtx) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
|
| 2039 |
|
|
memoffset, size, memalign, as,
|
| 2040 |
|
|
GET_MODE (new_rtx));
|
| 2041 |
|
|
|
| 2042 |
|
|
/* At some point, we should validate that this offset is within the object,
|
| 2043 |
|
|
if all the appropriate values are known. */
|
| 2044 |
|
|
return new_rtx;
|
| 2045 |
|
|
}
|
| 2046 |
|
|
|
| 2047 |
|
|
/* Return a memory reference like MEMREF, but with its mode changed
|
| 2048 |
|
|
to MODE and its address changed to ADDR, which is assumed to be
|
| 2049 |
|
|
MEMREF offset by OFFSET bytes. If VALIDATE is
|
| 2050 |
|
|
nonzero, the memory address is forced to be valid. */
|
| 2051 |
|
|
|
| 2052 |
|
|
rtx
|
| 2053 |
|
|
adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
|
| 2054 |
|
|
HOST_WIDE_INT offset, int validate)
|
| 2055 |
|
|
{
|
| 2056 |
|
|
memref = change_address_1 (memref, VOIDmode, addr, validate);
|
| 2057 |
|
|
return adjust_address_1 (memref, mode, offset, validate, 0);
|
| 2058 |
|
|
}
|
| 2059 |
|
|
|
| 2060 |
|
|
/* Return a memory reference like MEMREF, but whose address is changed by
|
| 2061 |
|
|
adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
|
| 2062 |
|
|
known to be in OFFSET (possibly 1). */
|
| 2063 |
|
|
|
| 2064 |
|
|
rtx
|
| 2065 |
|
|
offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
|
| 2066 |
|
|
{
|
| 2067 |
|
|
rtx new_rtx, addr = XEXP (memref, 0);
|
| 2068 |
|
|
addr_space_t as = MEM_ADDR_SPACE (memref);
|
| 2069 |
|
|
enum machine_mode address_mode = targetm.addr_space.address_mode (as);
|
| 2070 |
|
|
|
| 2071 |
|
|
new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
|
| 2072 |
|
|
|
| 2073 |
|
|
/* At this point we don't know _why_ the address is invalid. It
|
| 2074 |
|
|
could have secondary memory references, multiplies or anything.
|
| 2075 |
|
|
|
| 2076 |
|
|
However, if we did go and rearrange things, we can wind up not
|
| 2077 |
|
|
being able to recognize the magic around pic_offset_table_rtx.
|
| 2078 |
|
|
This stuff is fragile, and is yet another example of why it is
|
| 2079 |
|
|
bad to expose PIC machinery too early. */
|
| 2080 |
|
|
if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx, as)
|
| 2081 |
|
|
&& GET_CODE (addr) == PLUS
|
| 2082 |
|
|
&& XEXP (addr, 0) == pic_offset_table_rtx)
|
| 2083 |
|
|
{
|
| 2084 |
|
|
addr = force_reg (GET_MODE (addr), addr);
|
| 2085 |
|
|
new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
|
| 2086 |
|
|
}
|
| 2087 |
|
|
|
| 2088 |
|
|
update_temp_slot_address (XEXP (memref, 0), new_rtx);
|
| 2089 |
|
|
new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1);
|
| 2090 |
|
|
|
| 2091 |
|
|
/* If there are no changes, just return the original memory reference. */
|
| 2092 |
|
|
if (new_rtx == memref)
|
| 2093 |
|
|
return new_rtx;
|
| 2094 |
|
|
|
| 2095 |
|
|
/* Update the alignment to reflect the offset. Reset the offset, which
|
| 2096 |
|
|
we don't know. */
|
| 2097 |
|
|
MEM_ATTRS (new_rtx)
|
| 2098 |
|
|
= get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
|
| 2099 |
|
|
MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
|
| 2100 |
|
|
as, GET_MODE (new_rtx));
|
| 2101 |
|
|
return new_rtx;
|
| 2102 |
|
|
}
|
| 2103 |
|
|
|
| 2104 |
|
|
/* Return a memory reference like MEMREF, but with its address changed to
|
| 2105 |
|
|
ADDR. The caller is asserting that the actual piece of memory pointed
|
| 2106 |
|
|
to is the same, just the form of the address is being changed, such as
|
| 2107 |
|
|
by putting something into a register. */
|
| 2108 |
|
|
|
| 2109 |
|
|
rtx
|
| 2110 |
|
|
replace_equiv_address (rtx memref, rtx addr)
|
| 2111 |
|
|
{
|
| 2112 |
|
|
/* change_address_1 copies the memory attribute structure without change
|
| 2113 |
|
|
and that's exactly what we want here. */
|
| 2114 |
|
|
update_temp_slot_address (XEXP (memref, 0), addr);
|
| 2115 |
|
|
return change_address_1 (memref, VOIDmode, addr, 1);
|
| 2116 |
|
|
}
|
| 2117 |
|
|
|
| 2118 |
|
|
/* Likewise, but the reference is not required to be valid. */
|
| 2119 |
|
|
|
| 2120 |
|
|
rtx
|
| 2121 |
|
|
replace_equiv_address_nv (rtx memref, rtx addr)
|
| 2122 |
|
|
{
|
| 2123 |
|
|
return change_address_1 (memref, VOIDmode, addr, 0);
|
| 2124 |
|
|
}
|
| 2125 |
|
|
|
| 2126 |
|
|
/* Return a memory reference like MEMREF, but with its mode widened to
|
| 2127 |
|
|
MODE and offset by OFFSET. This would be used by targets that e.g.
|
| 2128 |
|
|
cannot issue QImode memory operations and have to use SImode memory
|
| 2129 |
|
|
operations plus masking logic. */
|
| 2130 |
|
|
|
| 2131 |
|
|
rtx
|
| 2132 |
|
|
widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
|
| 2133 |
|
|
{
|
| 2134 |
|
|
rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1);
|
| 2135 |
|
|
tree expr = MEM_EXPR (new_rtx);
|
| 2136 |
|
|
rtx memoffset = MEM_OFFSET (new_rtx);
|
| 2137 |
|
|
unsigned int size = GET_MODE_SIZE (mode);
|
| 2138 |
|
|
|
| 2139 |
|
|
/* If there are no changes, just return the original memory reference. */
|
| 2140 |
|
|
if (new_rtx == memref)
|
| 2141 |
|
|
return new_rtx;
|
| 2142 |
|
|
|
| 2143 |
|
|
/* If we don't know what offset we were at within the expression, then
|
| 2144 |
|
|
we can't know if we've overstepped the bounds. */
|
| 2145 |
|
|
if (! memoffset)
|
| 2146 |
|
|
expr = NULL_TREE;
|
| 2147 |
|
|
|
| 2148 |
|
|
while (expr)
|
| 2149 |
|
|
{
|
| 2150 |
|
|
if (TREE_CODE (expr) == COMPONENT_REF)
|
| 2151 |
|
|
{
|
| 2152 |
|
|
tree field = TREE_OPERAND (expr, 1);
|
| 2153 |
|
|
tree offset = component_ref_field_offset (expr);
|
| 2154 |
|
|
|
| 2155 |
|
|
if (! DECL_SIZE_UNIT (field))
|
| 2156 |
|
|
{
|
| 2157 |
|
|
expr = NULL_TREE;
|
| 2158 |
|
|
break;
|
| 2159 |
|
|
}
|
| 2160 |
|
|
|
| 2161 |
|
|
/* Is the field at least as large as the access? If so, ok,
|
| 2162 |
|
|
otherwise strip back to the containing structure. */
|
| 2163 |
|
|
if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
|
| 2164 |
|
|
&& compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
|
| 2165 |
|
|
&& INTVAL (memoffset) >= 0)
|
| 2166 |
|
|
break;
|
| 2167 |
|
|
|
| 2168 |
|
|
if (! host_integerp (offset, 1))
|
| 2169 |
|
|
{
|
| 2170 |
|
|
expr = NULL_TREE;
|
| 2171 |
|
|
break;
|
| 2172 |
|
|
}
|
| 2173 |
|
|
|
| 2174 |
|
|
expr = TREE_OPERAND (expr, 0);
|
| 2175 |
|
|
memoffset
|
| 2176 |
|
|
= (GEN_INT (INTVAL (memoffset)
|
| 2177 |
|
|
+ tree_low_cst (offset, 1)
|
| 2178 |
|
|
+ (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
|
| 2179 |
|
|
/ BITS_PER_UNIT)));
|
| 2180 |
|
|
}
|
| 2181 |
|
|
/* Similarly for the decl. */
|
| 2182 |
|
|
else if (DECL_P (expr)
|
| 2183 |
|
|
&& DECL_SIZE_UNIT (expr)
|
| 2184 |
|
|
&& TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
|
| 2185 |
|
|
&& compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
|
| 2186 |
|
|
&& (! memoffset || INTVAL (memoffset) >= 0))
|
| 2187 |
|
|
break;
|
| 2188 |
|
|
else
|
| 2189 |
|
|
{
|
| 2190 |
|
|
/* The widened memory access overflows the expression, which means
|
| 2191 |
|
|
that it could alias another expression. Zap it. */
|
| 2192 |
|
|
expr = NULL_TREE;
|
| 2193 |
|
|
break;
|
| 2194 |
|
|
}
|
| 2195 |
|
|
}
|
| 2196 |
|
|
|
| 2197 |
|
|
if (! expr)
|
| 2198 |
|
|
memoffset = NULL_RTX;
|
| 2199 |
|
|
|
| 2200 |
|
|
/* The widened memory may alias other stuff, so zap the alias set. */
|
| 2201 |
|
|
/* ??? Maybe use get_alias_set on any remaining expression. */
|
| 2202 |
|
|
|
| 2203 |
|
|
MEM_ATTRS (new_rtx) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
|
| 2204 |
|
|
MEM_ALIGN (new_rtx),
|
| 2205 |
|
|
MEM_ADDR_SPACE (new_rtx), mode);
|
| 2206 |
|
|
|
| 2207 |
|
|
return new_rtx;
|
| 2208 |
|
|
}
|
| 2209 |
|
|
|
| 2210 |
|
|
/* A fake decl that is used as the MEM_EXPR of spill slots. */
|
| 2211 |
|
|
static GTY(()) tree spill_slot_decl;
|
| 2212 |
|
|
|
| 2213 |
|
|
tree
|
| 2214 |
|
|
get_spill_slot_decl (bool force_build_p)
|
| 2215 |
|
|
{
|
| 2216 |
|
|
tree d = spill_slot_decl;
|
| 2217 |
|
|
rtx rd;
|
| 2218 |
|
|
|
| 2219 |
|
|
if (d || !force_build_p)
|
| 2220 |
|
|
return d;
|
| 2221 |
|
|
|
| 2222 |
|
|
d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
|
| 2223 |
|
|
VAR_DECL, get_identifier ("%sfp"), void_type_node);
|
| 2224 |
|
|
DECL_ARTIFICIAL (d) = 1;
|
| 2225 |
|
|
DECL_IGNORED_P (d) = 1;
|
| 2226 |
|
|
TREE_USED (d) = 1;
|
| 2227 |
|
|
TREE_THIS_NOTRAP (d) = 1;
|
| 2228 |
|
|
spill_slot_decl = d;
|
| 2229 |
|
|
|
| 2230 |
|
|
rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
|
| 2231 |
|
|
MEM_NOTRAP_P (rd) = 1;
|
| 2232 |
|
|
MEM_ATTRS (rd) = get_mem_attrs (new_alias_set (), d, const0_rtx,
|
| 2233 |
|
|
NULL_RTX, 0, ADDR_SPACE_GENERIC, BLKmode);
|
| 2234 |
|
|
SET_DECL_RTL (d, rd);
|
| 2235 |
|
|
|
| 2236 |
|
|
return d;
|
| 2237 |
|
|
}
|
| 2238 |
|
|
|
| 2239 |
|
|
/* Given MEM, a result from assign_stack_local, fill in the memory
|
| 2240 |
|
|
attributes as appropriate for a register allocator spill slot.
|
| 2241 |
|
|
These slots are not aliasable by other memory. We arrange for
|
| 2242 |
|
|
them all to use a single MEM_EXPR, so that the aliasing code can
|
| 2243 |
|
|
work properly in the case of shared spill slots. */
|
| 2244 |
|
|
|
| 2245 |
|
|
void
|
| 2246 |
|
|
set_mem_attrs_for_spill (rtx mem)
|
| 2247 |
|
|
{
|
| 2248 |
|
|
alias_set_type alias;
|
| 2249 |
|
|
rtx addr, offset;
|
| 2250 |
|
|
tree expr;
|
| 2251 |
|
|
|
| 2252 |
|
|
expr = get_spill_slot_decl (true);
|
| 2253 |
|
|
alias = MEM_ALIAS_SET (DECL_RTL (expr));
|
| 2254 |
|
|
|
| 2255 |
|
|
/* We expect the incoming memory to be of the form:
|
| 2256 |
|
|
(mem:MODE (plus (reg sfp) (const_int offset)))
|
| 2257 |
|
|
with perhaps the plus missing for offset = 0. */
|
| 2258 |
|
|
addr = XEXP (mem, 0);
|
| 2259 |
|
|
offset = const0_rtx;
|
| 2260 |
|
|
if (GET_CODE (addr) == PLUS
|
| 2261 |
|
|
&& CONST_INT_P (XEXP (addr, 1)))
|
| 2262 |
|
|
offset = XEXP (addr, 1);
|
| 2263 |
|
|
|
| 2264 |
|
|
MEM_ATTRS (mem) = get_mem_attrs (alias, expr, offset,
|
| 2265 |
|
|
MEM_SIZE (mem), MEM_ALIGN (mem),
|
| 2266 |
|
|
ADDR_SPACE_GENERIC, GET_MODE (mem));
|
| 2267 |
|
|
MEM_NOTRAP_P (mem) = 1;
|
| 2268 |
|
|
}
|
| 2269 |
|
|
|
| 2270 |
|
|
/* Return a newly created CODE_LABEL rtx with a unique label number. */
|
| 2271 |
|
|
|
| 2272 |
|
|
rtx
|
| 2273 |
|
|
gen_label_rtx (void)
|
| 2274 |
|
|
{
|
| 2275 |
|
|
return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
|
| 2276 |
|
|
NULL, label_num++, NULL);
|
| 2277 |
|
|
}
|
| 2278 |
|
|
|
| 2279 |
|
|
/* For procedure integration. */
|
| 2280 |
|
|
|
| 2281 |
|
|
/* Install new pointers to the first and last insns in the chain.
|
| 2282 |
|
|
Also, set cur_insn_uid to one higher than the last in use.
|
| 2283 |
|
|
Used for an inline-procedure after copying the insn chain. */
|
| 2284 |
|
|
|
| 2285 |
|
|
void
|
| 2286 |
|
|
set_new_first_and_last_insn (rtx first, rtx last)
|
| 2287 |
|
|
{
|
| 2288 |
|
|
rtx insn;
|
| 2289 |
|
|
|
| 2290 |
|
|
first_insn = first;
|
| 2291 |
|
|
last_insn = last;
|
| 2292 |
|
|
cur_insn_uid = 0;
|
| 2293 |
|
|
|
| 2294 |
|
|
if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
|
| 2295 |
|
|
{
|
| 2296 |
|
|
int debug_count = 0;
|
| 2297 |
|
|
|
| 2298 |
|
|
cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
|
| 2299 |
|
|
cur_debug_insn_uid = 0;
|
| 2300 |
|
|
|
| 2301 |
|
|
for (insn = first; insn; insn = NEXT_INSN (insn))
|
| 2302 |
|
|
if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
|
| 2303 |
|
|
cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
|
| 2304 |
|
|
else
|
| 2305 |
|
|
{
|
| 2306 |
|
|
cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
|
| 2307 |
|
|
if (DEBUG_INSN_P (insn))
|
| 2308 |
|
|
debug_count++;
|
| 2309 |
|
|
}
|
| 2310 |
|
|
|
| 2311 |
|
|
if (debug_count)
|
| 2312 |
|
|
cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
|
| 2313 |
|
|
else
|
| 2314 |
|
|
cur_debug_insn_uid++;
|
| 2315 |
|
|
}
|
| 2316 |
|
|
else
|
| 2317 |
|
|
for (insn = first; insn; insn = NEXT_INSN (insn))
|
| 2318 |
|
|
cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
|
| 2319 |
|
|
|
| 2320 |
|
|
cur_insn_uid++;
|
| 2321 |
|
|
}
|
| 2322 |
|
|
|
| 2323 |
|
|
/* Go through all the RTL insn bodies and copy any invalid shared
|
| 2324 |
|
|
structure. This routine should only be called once. */
|
| 2325 |
|
|
|
| 2326 |
|
|
static void
|
| 2327 |
|
|
unshare_all_rtl_1 (rtx insn)
|
| 2328 |
|
|
{
|
| 2329 |
|
|
/* Unshare just about everything else. */
|
| 2330 |
|
|
unshare_all_rtl_in_chain (insn);
|
| 2331 |
|
|
|
| 2332 |
|
|
/* Make sure the addresses of stack slots found outside the insn chain
|
| 2333 |
|
|
(such as, in DECL_RTL of a variable) are not shared
|
| 2334 |
|
|
with the insn chain.
|
| 2335 |
|
|
|
| 2336 |
|
|
This special care is necessary when the stack slot MEM does not
|
| 2337 |
|
|
actually appear in the insn chain. If it does appear, its address
|
| 2338 |
|
|
is unshared from all else at that point. */
|
| 2339 |
|
|
stack_slot_list = copy_rtx_if_shared (stack_slot_list);
|
| 2340 |
|
|
}
|
| 2341 |
|
|
|
| 2342 |
|
|
/* Go through all the RTL insn bodies and copy any invalid shared
|
| 2343 |
|
|
structure, again. This is a fairly expensive thing to do so it
|
| 2344 |
|
|
should be done sparingly. */
|
| 2345 |
|
|
|
| 2346 |
|
|
void
|
| 2347 |
|
|
unshare_all_rtl_again (rtx insn)
|
| 2348 |
|
|
{
|
| 2349 |
|
|
rtx p;
|
| 2350 |
|
|
tree decl;
|
| 2351 |
|
|
|
| 2352 |
|
|
for (p = insn; p; p = NEXT_INSN (p))
|
| 2353 |
|
|
if (INSN_P (p))
|
| 2354 |
|
|
{
|
| 2355 |
|
|
reset_used_flags (PATTERN (p));
|
| 2356 |
|
|
reset_used_flags (REG_NOTES (p));
|
| 2357 |
|
|
}
|
| 2358 |
|
|
|
| 2359 |
|
|
/* Make sure that virtual stack slots are not shared. */
|
| 2360 |
|
|
set_used_decls (DECL_INITIAL (cfun->decl));
|
| 2361 |
|
|
|
| 2362 |
|
|
/* Make sure that virtual parameters are not shared. */
|
| 2363 |
|
|
for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
|
| 2364 |
|
|
set_used_flags (DECL_RTL (decl));
|
| 2365 |
|
|
|
| 2366 |
|
|
reset_used_flags (stack_slot_list);
|
| 2367 |
|
|
|
| 2368 |
|
|
unshare_all_rtl_1 (insn);
|
| 2369 |
|
|
}
|
| 2370 |
|
|
|
| 2371 |
|
|
unsigned int
|
| 2372 |
|
|
unshare_all_rtl (void)
|
| 2373 |
|
|
{
|
| 2374 |
|
|
unshare_all_rtl_1 (get_insns ());
|
| 2375 |
|
|
return 0;
|
| 2376 |
|
|
}
|
| 2377 |
|
|
|
| 2378 |
|
|
struct rtl_opt_pass pass_unshare_all_rtl =
|
| 2379 |
|
|
{
|
| 2380 |
|
|
{
|
| 2381 |
|
|
RTL_PASS,
|
| 2382 |
|
|
"unshare", /* name */
|
| 2383 |
|
|
NULL, /* gate */
|
| 2384 |
|
|
unshare_all_rtl, /* execute */
|
| 2385 |
|
|
NULL, /* sub */
|
| 2386 |
|
|
NULL, /* next */
|
| 2387 |
|
|
0, /* static_pass_number */
|
| 2388 |
|
|
TV_NONE, /* tv_id */
|
| 2389 |
|
|
0, /* properties_required */
|
| 2390 |
|
|
0, /* properties_provided */
|
| 2391 |
|
|
0, /* properties_destroyed */
|
| 2392 |
|
|
0, /* todo_flags_start */
|
| 2393 |
|
|
TODO_dump_func | TODO_verify_rtl_sharing /* todo_flags_finish */
|
| 2394 |
|
|
}
|
| 2395 |
|
|
};
|
| 2396 |
|
|
|
| 2397 |
|
|
|
| 2398 |
|
|
/* Check that ORIG is not marked when it should not be and mark ORIG as in use,
|
| 2399 |
|
|
Recursively does the same for subexpressions. */
|
| 2400 |
|
|
|
| 2401 |
|
|
static void
|
| 2402 |
|
|
verify_rtx_sharing (rtx orig, rtx insn)
|
| 2403 |
|
|
{
|
| 2404 |
|
|
rtx x = orig;
|
| 2405 |
|
|
int i;
|
| 2406 |
|
|
enum rtx_code code;
|
| 2407 |
|
|
const char *format_ptr;
|
| 2408 |
|
|
|
| 2409 |
|
|
if (x == 0)
|
| 2410 |
|
|
return;
|
| 2411 |
|
|
|
| 2412 |
|
|
code = GET_CODE (x);
|
| 2413 |
|
|
|
| 2414 |
|
|
/* These types may be freely shared. */
|
| 2415 |
|
|
|
| 2416 |
|
|
switch (code)
|
| 2417 |
|
|
{
|
| 2418 |
|
|
case REG:
|
| 2419 |
|
|
case DEBUG_EXPR:
|
| 2420 |
|
|
case VALUE:
|
| 2421 |
|
|
case CONST_INT:
|
| 2422 |
|
|
case CONST_DOUBLE:
|
| 2423 |
|
|
case CONST_FIXED:
|
| 2424 |
|
|
case CONST_VECTOR:
|
| 2425 |
|
|
case SYMBOL_REF:
|
| 2426 |
|
|
case LABEL_REF:
|
| 2427 |
|
|
case CODE_LABEL:
|
| 2428 |
|
|
case PC:
|
| 2429 |
|
|
case CC0:
|
| 2430 |
|
|
case SCRATCH:
|
| 2431 |
|
|
return;
|
| 2432 |
|
|
/* SCRATCH must be shared because they represent distinct values. */
|
| 2433 |
|
|
case CLOBBER:
|
| 2434 |
|
|
if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
|
| 2435 |
|
|
return;
|
| 2436 |
|
|
break;
|
| 2437 |
|
|
|
| 2438 |
|
|
case CONST:
|
| 2439 |
|
|
if (shared_const_p (orig))
|
| 2440 |
|
|
return;
|
| 2441 |
|
|
break;
|
| 2442 |
|
|
|
| 2443 |
|
|
case MEM:
|
| 2444 |
|
|
/* A MEM is allowed to be shared if its address is constant. */
|
| 2445 |
|
|
if (CONSTANT_ADDRESS_P (XEXP (x, 0))
|
| 2446 |
|
|
|| reload_completed || reload_in_progress)
|
| 2447 |
|
|
return;
|
| 2448 |
|
|
|
| 2449 |
|
|
break;
|
| 2450 |
|
|
|
| 2451 |
|
|
default:
|
| 2452 |
|
|
break;
|
| 2453 |
|
|
}
|
| 2454 |
|
|
|
| 2455 |
|
|
/* This rtx may not be shared. If it has already been seen,
|
| 2456 |
|
|
replace it with a copy of itself. */
|
| 2457 |
|
|
#ifdef ENABLE_CHECKING
|
| 2458 |
|
|
if (RTX_FLAG (x, used))
|
| 2459 |
|
|
{
|
| 2460 |
|
|
error ("invalid rtl sharing found in the insn");
|
| 2461 |
|
|
debug_rtx (insn);
|
| 2462 |
|
|
error ("shared rtx");
|
| 2463 |
|
|
debug_rtx (x);
|
| 2464 |
|
|
internal_error ("internal consistency failure");
|
| 2465 |
|
|
}
|
| 2466 |
|
|
#endif
|
| 2467 |
|
|
gcc_assert (!RTX_FLAG (x, used));
|
| 2468 |
|
|
|
| 2469 |
|
|
RTX_FLAG (x, used) = 1;
|
| 2470 |
|
|
|
| 2471 |
|
|
/* Now scan the subexpressions recursively. */
|
| 2472 |
|
|
|
| 2473 |
|
|
format_ptr = GET_RTX_FORMAT (code);
|
| 2474 |
|
|
|
| 2475 |
|
|
for (i = 0; i < GET_RTX_LENGTH (code); i++)
|
| 2476 |
|
|
{
|
| 2477 |
|
|
switch (*format_ptr++)
|
| 2478 |
|
|
{
|
| 2479 |
|
|
case 'e':
|
| 2480 |
|
|
verify_rtx_sharing (XEXP (x, i), insn);
|
| 2481 |
|
|
break;
|
| 2482 |
|
|
|
| 2483 |
|
|
case 'E':
|
| 2484 |
|
|
if (XVEC (x, i) != NULL)
|
| 2485 |
|
|
{
|
| 2486 |
|
|
int j;
|
| 2487 |
|
|
int len = XVECLEN (x, i);
|
| 2488 |
|
|
|
| 2489 |
|
|
for (j = 0; j < len; j++)
|
| 2490 |
|
|
{
|
| 2491 |
|
|
/* We allow sharing of ASM_OPERANDS inside single
|
| 2492 |
|
|
instruction. */
|
| 2493 |
|
|
if (j && GET_CODE (XVECEXP (x, i, j)) == SET
|
| 2494 |
|
|
&& (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
|
| 2495 |
|
|
== ASM_OPERANDS))
|
| 2496 |
|
|
verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
|
| 2497 |
|
|
else
|
| 2498 |
|
|
verify_rtx_sharing (XVECEXP (x, i, j), insn);
|
| 2499 |
|
|
}
|
| 2500 |
|
|
}
|
| 2501 |
|
|
break;
|
| 2502 |
|
|
}
|
| 2503 |
|
|
}
|
| 2504 |
|
|
return;
|
| 2505 |
|
|
}
|
| 2506 |
|
|
|
| 2507 |
|
|
/* Go through all the RTL insn bodies and check that there is no unexpected
|
| 2508 |
|
|
sharing in between the subexpressions. */
|
| 2509 |
|
|
|
| 2510 |
|
|
void
|
| 2511 |
|
|
verify_rtl_sharing (void)
|
| 2512 |
|
|
{
|
| 2513 |
|
|
rtx p;
|
| 2514 |
|
|
|
| 2515 |
|
|
for (p = get_insns (); p; p = NEXT_INSN (p))
|
| 2516 |
|
|
if (INSN_P (p))
|
| 2517 |
|
|
{
|
| 2518 |
|
|
reset_used_flags (PATTERN (p));
|
| 2519 |
|
|
reset_used_flags (REG_NOTES (p));
|
| 2520 |
|
|
if (GET_CODE (PATTERN (p)) == SEQUENCE)
|
| 2521 |
|
|
{
|
| 2522 |
|
|
int i;
|
| 2523 |
|
|
rtx q, sequence = PATTERN (p);
|
| 2524 |
|
|
|
| 2525 |
|
|
for (i = 0; i < XVECLEN (sequence, 0); i++)
|
| 2526 |
|
|
{
|
| 2527 |
|
|
q = XVECEXP (sequence, 0, i);
|
| 2528 |
|
|
gcc_assert (INSN_P (q));
|
| 2529 |
|
|
reset_used_flags (PATTERN (q));
|
| 2530 |
|
|
reset_used_flags (REG_NOTES (q));
|
| 2531 |
|
|
}
|
| 2532 |
|
|
}
|
| 2533 |
|
|
}
|
| 2534 |
|
|
|
| 2535 |
|
|
for (p = get_insns (); p; p = NEXT_INSN (p))
|
| 2536 |
|
|
if (INSN_P (p))
|
| 2537 |
|
|
{
|
| 2538 |
|
|
verify_rtx_sharing (PATTERN (p), p);
|
| 2539 |
|
|
verify_rtx_sharing (REG_NOTES (p), p);
|
| 2540 |
|
|
}
|
| 2541 |
|
|
}
|
| 2542 |
|
|
|
| 2543 |
|
|
/* Go through all the RTL insn bodies and copy any invalid shared structure.
|
| 2544 |
|
|
Assumes the mark bits are cleared at entry. */
|
| 2545 |
|
|
|
| 2546 |
|
|
void
|
| 2547 |
|
|
unshare_all_rtl_in_chain (rtx insn)
|
| 2548 |
|
|
{
|
| 2549 |
|
|
for (; insn; insn = NEXT_INSN (insn))
|
| 2550 |
|
|
if (INSN_P (insn))
|
| 2551 |
|
|
{
|
| 2552 |
|
|
PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
|
| 2553 |
|
|
REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
|
| 2554 |
|
|
}
|
| 2555 |
|
|
}
|
| 2556 |
|
|
|
| 2557 |
|
|
/* Go through all virtual stack slots of a function and mark them as
|
| 2558 |
|
|
shared. We never replace the DECL_RTLs themselves with a copy,
|
| 2559 |
|
|
but expressions mentioned into a DECL_RTL cannot be shared with
|
| 2560 |
|
|
expressions in the instruction stream.
|
| 2561 |
|
|
|
| 2562 |
|
|
Note that reload may convert pseudo registers into memories in-place.
|
| 2563 |
|
|
Pseudo registers are always shared, but MEMs never are. Thus if we
|
| 2564 |
|
|
reset the used flags on MEMs in the instruction stream, we must set
|
| 2565 |
|
|
them again on MEMs that appear in DECL_RTLs. */
|
| 2566 |
|
|
|
| 2567 |
|
|
static void
|
| 2568 |
|
|
set_used_decls (tree blk)
|
| 2569 |
|
|
{
|
| 2570 |
|
|
tree t;
|
| 2571 |
|
|
|
| 2572 |
|
|
/* Mark decls. */
|
| 2573 |
|
|
for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
|
| 2574 |
|
|
if (DECL_RTL_SET_P (t))
|
| 2575 |
|
|
set_used_flags (DECL_RTL (t));
|
| 2576 |
|
|
|
| 2577 |
|
|
/* Now process sub-blocks. */
|
| 2578 |
|
|
for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
|
| 2579 |
|
|
set_used_decls (t);
|
| 2580 |
|
|
}
|
| 2581 |
|
|
|
| 2582 |
|
|
/* Mark ORIG as in use, and return a copy of it if it was already in use.
|
| 2583 |
|
|
Recursively does the same for subexpressions. Uses
|
| 2584 |
|
|
copy_rtx_if_shared_1 to reduce stack space. */
|
| 2585 |
|
|
|
| 2586 |
|
|
rtx
|
| 2587 |
|
|
copy_rtx_if_shared (rtx orig)
|
| 2588 |
|
|
{
|
| 2589 |
|
|
copy_rtx_if_shared_1 (&orig);
|
| 2590 |
|
|
return orig;
|
| 2591 |
|
|
}
|
| 2592 |
|
|
|
| 2593 |
|
|
/* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
|
| 2594 |
|
|
use. Recursively does the same for subexpressions. */
|
| 2595 |
|
|
|
| 2596 |
|
|
static void
|
| 2597 |
|
|
copy_rtx_if_shared_1 (rtx *orig1)
|
| 2598 |
|
|
{
|
| 2599 |
|
|
rtx x;
|
| 2600 |
|
|
int i;
|
| 2601 |
|
|
enum rtx_code code;
|
| 2602 |
|
|
rtx *last_ptr;
|
| 2603 |
|
|
const char *format_ptr;
|
| 2604 |
|
|
int copied = 0;
|
| 2605 |
|
|
int length;
|
| 2606 |
|
|
|
| 2607 |
|
|
/* Repeat is used to turn tail-recursion into iteration. */
|
| 2608 |
|
|
repeat:
|
| 2609 |
|
|
x = *orig1;
|
| 2610 |
|
|
|
| 2611 |
|
|
if (x == 0)
|
| 2612 |
|
|
return;
|
| 2613 |
|
|
|
| 2614 |
|
|
code = GET_CODE (x);
|
| 2615 |
|
|
|
| 2616 |
|
|
/* These types may be freely shared. */
|
| 2617 |
|
|
|
| 2618 |
|
|
switch (code)
|
| 2619 |
|
|
{
|
| 2620 |
|
|
case REG:
|
| 2621 |
|
|
case DEBUG_EXPR:
|
| 2622 |
|
|
case VALUE:
|
| 2623 |
|
|
case CONST_INT:
|
| 2624 |
|
|
case CONST_DOUBLE:
|
| 2625 |
|
|
case CONST_FIXED:
|
| 2626 |
|
|
case CONST_VECTOR:
|
| 2627 |
|
|
case SYMBOL_REF:
|
| 2628 |
|
|
case LABEL_REF:
|
| 2629 |
|
|
case CODE_LABEL:
|
| 2630 |
|
|
case PC:
|
| 2631 |
|
|
case CC0:
|
| 2632 |
|
|
case SCRATCH:
|
| 2633 |
|
|
/* SCRATCH must be shared because they represent distinct values. */
|
| 2634 |
|
|
return;
|
| 2635 |
|
|
case CLOBBER:
|
| 2636 |
|
|
if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
|
| 2637 |
|
|
return;
|
| 2638 |
|
|
break;
|
| 2639 |
|
|
|
| 2640 |
|
|
case CONST:
|
| 2641 |
|
|
if (shared_const_p (x))
|
| 2642 |
|
|
return;
|
| 2643 |
|
|
break;
|
| 2644 |
|
|
|
| 2645 |
|
|
case DEBUG_INSN:
|
| 2646 |
|
|
case INSN:
|
| 2647 |
|
|
case JUMP_INSN:
|
| 2648 |
|
|
case CALL_INSN:
|
| 2649 |
|
|
case NOTE:
|
| 2650 |
|
|
case BARRIER:
|
| 2651 |
|
|
/* The chain of insns is not being copied. */
|
| 2652 |
|
|
return;
|
| 2653 |
|
|
|
| 2654 |
|
|
default:
|
| 2655 |
|
|
break;
|
| 2656 |
|
|
}
|
| 2657 |
|
|
|
| 2658 |
|
|
/* This rtx may not be shared. If it has already been seen,
|
| 2659 |
|
|
replace it with a copy of itself. */
|
| 2660 |
|
|
|
| 2661 |
|
|
if (RTX_FLAG (x, used))
|
| 2662 |
|
|
{
|
| 2663 |
|
|
x = shallow_copy_rtx (x);
|
| 2664 |
|
|
copied = 1;
|
| 2665 |
|
|
}
|
| 2666 |
|
|
RTX_FLAG (x, used) = 1;
|
| 2667 |
|
|
|
| 2668 |
|
|
/* Now scan the subexpressions recursively.
|
| 2669 |
|
|
We can store any replaced subexpressions directly into X
|
| 2670 |
|
|
since we know X is not shared! Any vectors in X
|
| 2671 |
|
|
must be copied if X was copied. */
|
| 2672 |
|
|
|
| 2673 |
|
|
format_ptr = GET_RTX_FORMAT (code);
|
| 2674 |
|
|
length = GET_RTX_LENGTH (code);
|
| 2675 |
|
|
last_ptr = NULL;
|
| 2676 |
|
|
|
| 2677 |
|
|
for (i = 0; i < length; i++)
|
| 2678 |
|
|
{
|
| 2679 |
|
|
switch (*format_ptr++)
|
| 2680 |
|
|
{
|
| 2681 |
|
|
case 'e':
|
| 2682 |
|
|
if (last_ptr)
|
| 2683 |
|
|
copy_rtx_if_shared_1 (last_ptr);
|
| 2684 |
|
|
last_ptr = &XEXP (x, i);
|
| 2685 |
|
|
break;
|
| 2686 |
|
|
|
| 2687 |
|
|
case 'E':
|
| 2688 |
|
|
if (XVEC (x, i) != NULL)
|
| 2689 |
|
|
{
|
| 2690 |
|
|
int j;
|
| 2691 |
|
|
int len = XVECLEN (x, i);
|
| 2692 |
|
|
|
| 2693 |
|
|
/* Copy the vector iff I copied the rtx and the length
|
| 2694 |
|
|
is nonzero. */
|
| 2695 |
|
|
if (copied && len > 0)
|
| 2696 |
|
|
XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
|
| 2697 |
|
|
|
| 2698 |
|
|
/* Call recursively on all inside the vector. */
|
| 2699 |
|
|
for (j = 0; j < len; j++)
|
| 2700 |
|
|
{
|
| 2701 |
|
|
if (last_ptr)
|
| 2702 |
|
|
copy_rtx_if_shared_1 (last_ptr);
|
| 2703 |
|
|
last_ptr = &XVECEXP (x, i, j);
|
| 2704 |
|
|
}
|
| 2705 |
|
|
}
|
| 2706 |
|
|
break;
|
| 2707 |
|
|
}
|
| 2708 |
|
|
}
|
| 2709 |
|
|
*orig1 = x;
|
| 2710 |
|
|
if (last_ptr)
|
| 2711 |
|
|
{
|
| 2712 |
|
|
orig1 = last_ptr;
|
| 2713 |
|
|
goto repeat;
|
| 2714 |
|
|
}
|
| 2715 |
|
|
return;
|
| 2716 |
|
|
}
|
| 2717 |
|
|
|
| 2718 |
|
|
/* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
|
| 2719 |
|
|
to look for shared sub-parts. */
|
| 2720 |
|
|
|
| 2721 |
|
|
void
|
| 2722 |
|
|
reset_used_flags (rtx x)
|
| 2723 |
|
|
{
|
| 2724 |
|
|
int i, j;
|
| 2725 |
|
|
enum rtx_code code;
|
| 2726 |
|
|
const char *format_ptr;
|
| 2727 |
|
|
int length;
|
| 2728 |
|
|
|
| 2729 |
|
|
/* Repeat is used to turn tail-recursion into iteration. */
|
| 2730 |
|
|
repeat:
|
| 2731 |
|
|
if (x == 0)
|
| 2732 |
|
|
return;
|
| 2733 |
|
|
|
| 2734 |
|
|
code = GET_CODE (x);
|
| 2735 |
|
|
|
| 2736 |
|
|
/* These types may be freely shared so we needn't do any resetting
|
| 2737 |
|
|
for them. */
|
| 2738 |
|
|
|
| 2739 |
|
|
switch (code)
|
| 2740 |
|
|
{
|
| 2741 |
|
|
case REG:
|
| 2742 |
|
|
case DEBUG_EXPR:
|
| 2743 |
|
|
case VALUE:
|
| 2744 |
|
|
case CONST_INT:
|
| 2745 |
|
|
case CONST_DOUBLE:
|
| 2746 |
|
|
case CONST_FIXED:
|
| 2747 |
|
|
case CONST_VECTOR:
|
| 2748 |
|
|
case SYMBOL_REF:
|
| 2749 |
|
|
case CODE_LABEL:
|
| 2750 |
|
|
case PC:
|
| 2751 |
|
|
case CC0:
|
| 2752 |
|
|
return;
|
| 2753 |
|
|
|
| 2754 |
|
|
case DEBUG_INSN:
|
| 2755 |
|
|
case INSN:
|
| 2756 |
|
|
case JUMP_INSN:
|
| 2757 |
|
|
case CALL_INSN:
|
| 2758 |
|
|
case NOTE:
|
| 2759 |
|
|
case LABEL_REF:
|
| 2760 |
|
|
case BARRIER:
|
| 2761 |
|
|
/* The chain of insns is not being copied. */
|
| 2762 |
|
|
return;
|
| 2763 |
|
|
|
| 2764 |
|
|
default:
|
| 2765 |
|
|
break;
|
| 2766 |
|
|
}
|
| 2767 |
|
|
|
| 2768 |
|
|
RTX_FLAG (x, used) = 0;
|
| 2769 |
|
|
|
| 2770 |
|
|
format_ptr = GET_RTX_FORMAT (code);
|
| 2771 |
|
|
length = GET_RTX_LENGTH (code);
|
| 2772 |
|
|
|
| 2773 |
|
|
for (i = 0; i < length; i++)
|
| 2774 |
|
|
{
|
| 2775 |
|
|
switch (*format_ptr++)
|
| 2776 |
|
|
{
|
| 2777 |
|
|
case 'e':
|
| 2778 |
|
|
if (i == length-1)
|
| 2779 |
|
|
{
|
| 2780 |
|
|
x = XEXP (x, i);
|
| 2781 |
|
|
goto repeat;
|
| 2782 |
|
|
}
|
| 2783 |
|
|
reset_used_flags (XEXP (x, i));
|
| 2784 |
|
|
break;
|
| 2785 |
|
|
|
| 2786 |
|
|
case 'E':
|
| 2787 |
|
|
for (j = 0; j < XVECLEN (x, i); j++)
|
| 2788 |
|
|
reset_used_flags (XVECEXP (x, i, j));
|
| 2789 |
|
|
break;
|
| 2790 |
|
|
}
|
| 2791 |
|
|
}
|
| 2792 |
|
|
}
|
| 2793 |
|
|
|
| 2794 |
|
|
/* Set all the USED bits in X to allow copy_rtx_if_shared to be used
|
| 2795 |
|
|
to look for shared sub-parts. */
|
| 2796 |
|
|
|
| 2797 |
|
|
void
|
| 2798 |
|
|
set_used_flags (rtx x)
|
| 2799 |
|
|
{
|
| 2800 |
|
|
int i, j;
|
| 2801 |
|
|
enum rtx_code code;
|
| 2802 |
|
|
const char *format_ptr;
|
| 2803 |
|
|
|
| 2804 |
|
|
if (x == 0)
|
| 2805 |
|
|
return;
|
| 2806 |
|
|
|
| 2807 |
|
|
code = GET_CODE (x);
|
| 2808 |
|
|
|
| 2809 |
|
|
/* These types may be freely shared so we needn't do any resetting
|
| 2810 |
|
|
for them. */
|
| 2811 |
|
|
|
| 2812 |
|
|
switch (code)
|
| 2813 |
|
|
{
|
| 2814 |
|
|
case REG:
|
| 2815 |
|
|
case DEBUG_EXPR:
|
| 2816 |
|
|
case VALUE:
|
| 2817 |
|
|
case CONST_INT:
|
| 2818 |
|
|
case CONST_DOUBLE:
|
| 2819 |
|
|
case CONST_FIXED:
|
| 2820 |
|
|
case CONST_VECTOR:
|
| 2821 |
|
|
case SYMBOL_REF:
|
| 2822 |
|
|
case CODE_LABEL:
|
| 2823 |
|
|
case PC:
|
| 2824 |
|
|
case CC0:
|
| 2825 |
|
|
return;
|
| 2826 |
|
|
|
| 2827 |
|
|
case DEBUG_INSN:
|
| 2828 |
|
|
case INSN:
|
| 2829 |
|
|
case JUMP_INSN:
|
| 2830 |
|
|
case CALL_INSN:
|
| 2831 |
|
|
case NOTE:
|
| 2832 |
|
|
case LABEL_REF:
|
| 2833 |
|
|
case BARRIER:
|
| 2834 |
|
|
/* The chain of insns is not being copied. */
|
| 2835 |
|
|
return;
|
| 2836 |
|
|
|
| 2837 |
|
|
default:
|
| 2838 |
|
|
break;
|
| 2839 |
|
|
}
|
| 2840 |
|
|
|
| 2841 |
|
|
RTX_FLAG (x, used) = 1;
|
| 2842 |
|
|
|
| 2843 |
|
|
format_ptr = GET_RTX_FORMAT (code);
|
| 2844 |
|
|
for (i = 0; i < GET_RTX_LENGTH (code); i++)
|
| 2845 |
|
|
{
|
| 2846 |
|
|
switch (*format_ptr++)
|
| 2847 |
|
|
{
|
| 2848 |
|
|
case 'e':
|
| 2849 |
|
|
set_used_flags (XEXP (x, i));
|
| 2850 |
|
|
break;
|
| 2851 |
|
|
|
| 2852 |
|
|
case 'E':
|
| 2853 |
|
|
for (j = 0; j < XVECLEN (x, i); j++)
|
| 2854 |
|
|
set_used_flags (XVECEXP (x, i, j));
|
| 2855 |
|
|
break;
|
| 2856 |
|
|
}
|
| 2857 |
|
|
}
|
| 2858 |
|
|
}
|
| 2859 |
|
|
|
| 2860 |
|
|
/* Copy X if necessary so that it won't be altered by changes in OTHER.
|
| 2861 |
|
|
Return X or the rtx for the pseudo reg the value of X was copied into.
|
| 2862 |
|
|
OTHER must be valid as a SET_DEST. */
|
| 2863 |
|
|
|
| 2864 |
|
|
rtx
|
| 2865 |
|
|
make_safe_from (rtx x, rtx other)
|
| 2866 |
|
|
{
|
| 2867 |
|
|
while (1)
|
| 2868 |
|
|
switch (GET_CODE (other))
|
| 2869 |
|
|
{
|
| 2870 |
|
|
case SUBREG:
|
| 2871 |
|
|
other = SUBREG_REG (other);
|
| 2872 |
|
|
break;
|
| 2873 |
|
|
case STRICT_LOW_PART:
|
| 2874 |
|
|
case SIGN_EXTEND:
|
| 2875 |
|
|
case ZERO_EXTEND:
|
| 2876 |
|
|
other = XEXP (other, 0);
|
| 2877 |
|
|
break;
|
| 2878 |
|
|
default:
|
| 2879 |
|
|
goto done;
|
| 2880 |
|
|
}
|
| 2881 |
|
|
done:
|
| 2882 |
|
|
if ((MEM_P (other)
|
| 2883 |
|
|
&& ! CONSTANT_P (x)
|
| 2884 |
|
|
&& !REG_P (x)
|
| 2885 |
|
|
&& GET_CODE (x) != SUBREG)
|
| 2886 |
|
|
|| (REG_P (other)
|
| 2887 |
|
|
&& (REGNO (other) < FIRST_PSEUDO_REGISTER
|
| 2888 |
|
|
|| reg_mentioned_p (other, x))))
|
| 2889 |
|
|
{
|
| 2890 |
|
|
rtx temp = gen_reg_rtx (GET_MODE (x));
|
| 2891 |
|
|
emit_move_insn (temp, x);
|
| 2892 |
|
|
return temp;
|
| 2893 |
|
|
}
|
| 2894 |
|
|
return x;
|
| 2895 |
|
|
}
|
| 2896 |
|
|
|
| 2897 |
|
|
/* Emission of insns (adding them to the doubly-linked list). */
|
| 2898 |
|
|
|
| 2899 |
|
|
/* Return the first insn of the current sequence or current function. */
|
| 2900 |
|
|
|
| 2901 |
|
|
rtx
|
| 2902 |
|
|
get_insns (void)
|
| 2903 |
|
|
{
|
| 2904 |
|
|
return first_insn;
|
| 2905 |
|
|
}
|
| 2906 |
|
|
|
| 2907 |
|
|
/* Specify a new insn as the first in the chain. */
|
| 2908 |
|
|
|
| 2909 |
|
|
void
|
| 2910 |
|
|
set_first_insn (rtx insn)
|
| 2911 |
|
|
{
|
| 2912 |
|
|
gcc_assert (!PREV_INSN (insn));
|
| 2913 |
|
|
first_insn = insn;
|
| 2914 |
|
|
}
|
| 2915 |
|
|
|
| 2916 |
|
|
/* Return the last insn emitted in current sequence or current function. */
|
| 2917 |
|
|
|
| 2918 |
|
|
rtx
|
| 2919 |
|
|
get_last_insn (void)
|
| 2920 |
|
|
{
|
| 2921 |
|
|
return last_insn;
|
| 2922 |
|
|
}
|
| 2923 |
|
|
|
| 2924 |
|
|
/* Specify a new insn as the last in the chain. */
|
| 2925 |
|
|
|
| 2926 |
|
|
void
|
| 2927 |
|
|
set_last_insn (rtx insn)
|
| 2928 |
|
|
{
|
| 2929 |
|
|
gcc_assert (!NEXT_INSN (insn));
|
| 2930 |
|
|
last_insn = insn;
|
| 2931 |
|
|
}
|
| 2932 |
|
|
|
| 2933 |
|
|
/* Return the last insn emitted, even if it is in a sequence now pushed. */
|
| 2934 |
|
|
|
| 2935 |
|
|
rtx
|
| 2936 |
|
|
get_last_insn_anywhere (void)
|
| 2937 |
|
|
{
|
| 2938 |
|
|
struct sequence_stack *stack;
|
| 2939 |
|
|
if (last_insn)
|
| 2940 |
|
|
return last_insn;
|
| 2941 |
|
|
for (stack = seq_stack; stack; stack = stack->next)
|
| 2942 |
|
|
if (stack->last != 0)
|
| 2943 |
|
|
return stack->last;
|
| 2944 |
|
|
return 0;
|
| 2945 |
|
|
}
|
| 2946 |
|
|
|
| 2947 |
|
|
/* Return the first nonnote insn emitted in current sequence or current
|
| 2948 |
|
|
function. This routine looks inside SEQUENCEs. */
|
| 2949 |
|
|
|
| 2950 |
|
|
rtx
|
| 2951 |
|
|
get_first_nonnote_insn (void)
|
| 2952 |
|
|
{
|
| 2953 |
|
|
rtx insn = first_insn;
|
| 2954 |
|
|
|
| 2955 |
|
|
if (insn)
|
| 2956 |
|
|
{
|
| 2957 |
|
|
if (NOTE_P (insn))
|
| 2958 |
|
|
for (insn = next_insn (insn);
|
| 2959 |
|
|
insn && NOTE_P (insn);
|
| 2960 |
|
|
insn = next_insn (insn))
|
| 2961 |
|
|
continue;
|
| 2962 |
|
|
else
|
| 2963 |
|
|
{
|
| 2964 |
|
|
if (NONJUMP_INSN_P (insn)
|
| 2965 |
|
|
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
|
| 2966 |
|
|
insn = XVECEXP (PATTERN (insn), 0, 0);
|
| 2967 |
|
|
}
|
| 2968 |
|
|
}
|
| 2969 |
|
|
|
| 2970 |
|
|
return insn;
|
| 2971 |
|
|
}
|
| 2972 |
|
|
|
| 2973 |
|
|
/* Return the last nonnote insn emitted in current sequence or current
|
| 2974 |
|
|
function. This routine looks inside SEQUENCEs. */
|
| 2975 |
|
|
|
| 2976 |
|
|
rtx
|
| 2977 |
|
|
get_last_nonnote_insn (void)
|
| 2978 |
|
|
{
|
| 2979 |
|
|
rtx insn = last_insn;
|
| 2980 |
|
|
|
| 2981 |
|
|
if (insn)
|
| 2982 |
|
|
{
|
| 2983 |
|
|
if (NOTE_P (insn))
|
| 2984 |
|
|
for (insn = previous_insn (insn);
|
| 2985 |
|
|
insn && NOTE_P (insn);
|
| 2986 |
|
|
insn = previous_insn (insn))
|
| 2987 |
|
|
continue;
|
| 2988 |
|
|
else
|
| 2989 |
|
|
{
|
| 2990 |
|
|
if (NONJUMP_INSN_P (insn)
|
| 2991 |
|
|
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
|
| 2992 |
|
|
insn = XVECEXP (PATTERN (insn), 0,
|
| 2993 |
|
|
XVECLEN (PATTERN (insn), 0) - 1);
|
| 2994 |
|
|
}
|
| 2995 |
|
|
}
|
| 2996 |
|
|
|
| 2997 |
|
|
return insn;
|
| 2998 |
|
|
}
|
| 2999 |
|
|
|
| 3000 |
|
|
/* Return a number larger than any instruction's uid in this function. */
|
| 3001 |
|
|
|
| 3002 |
|
|
int
|
| 3003 |
|
|
get_max_uid (void)
|
| 3004 |
|
|
{
|
| 3005 |
|
|
return cur_insn_uid;
|
| 3006 |
|
|
}
|
| 3007 |
|
|
|
| 3008 |
|
|
/* Return the number of actual (non-debug) insns emitted in this
|
| 3009 |
|
|
function. */
|
| 3010 |
|
|
|
| 3011 |
|
|
int
|
| 3012 |
|
|
get_max_insn_count (void)
|
| 3013 |
|
|
{
|
| 3014 |
|
|
int n = cur_insn_uid;
|
| 3015 |
|
|
|
| 3016 |
|
|
/* The table size must be stable across -g, to avoid codegen
|
| 3017 |
|
|
differences due to debug insns, and not be affected by
|
| 3018 |
|
|
-fmin-insn-uid, to avoid excessive table size and to simplify
|
| 3019 |
|
|
debugging of -fcompare-debug failures. */
|
| 3020 |
|
|
if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
|
| 3021 |
|
|
n -= cur_debug_insn_uid;
|
| 3022 |
|
|
else
|
| 3023 |
|
|
n -= MIN_NONDEBUG_INSN_UID;
|
| 3024 |
|
|
|
| 3025 |
|
|
return n;
|
| 3026 |
|
|
}
|
| 3027 |
|
|
|
| 3028 |
|
|
|
| 3029 |
|
|
/* Return the next insn. If it is a SEQUENCE, return the first insn
|
| 3030 |
|
|
of the sequence. */
|
| 3031 |
|
|
|
| 3032 |
|
|
rtx
|
| 3033 |
|
|
next_insn (rtx insn)
|
| 3034 |
|
|
{
|
| 3035 |
|
|
if (insn)
|
| 3036 |
|
|
{
|
| 3037 |
|
|
insn = NEXT_INSN (insn);
|
| 3038 |
|
|
if (insn && NONJUMP_INSN_P (insn)
|
| 3039 |
|
|
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
|
| 3040 |
|
|
insn = XVECEXP (PATTERN (insn), 0, 0);
|
| 3041 |
|
|
}
|
| 3042 |
|
|
|
| 3043 |
|
|
return insn;
|
| 3044 |
|
|
}
|
| 3045 |
|
|
|
| 3046 |
|
|
/* Return the previous insn. If it is a SEQUENCE, return the last insn
|
| 3047 |
|
|
of the sequence. */
|
| 3048 |
|
|
|
| 3049 |
|
|
rtx
|
| 3050 |
|
|
previous_insn (rtx insn)
|
| 3051 |
|
|
{
|
| 3052 |
|
|
if (insn)
|
| 3053 |
|
|
{
|
| 3054 |
|
|
insn = PREV_INSN (insn);
|
| 3055 |
|
|
if (insn && NONJUMP_INSN_P (insn)
|
| 3056 |
|
|
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
|
| 3057 |
|
|
insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
|
| 3058 |
|
|
}
|
| 3059 |
|
|
|
| 3060 |
|
|
return insn;
|
| 3061 |
|
|
}
|
| 3062 |
|
|
|
| 3063 |
|
|
/* Return the next insn after INSN that is not a NOTE. This routine does not
|
| 3064 |
|
|
look inside SEQUENCEs. */
|
| 3065 |
|
|
|
| 3066 |
|
|
rtx
|
| 3067 |
|
|
next_nonnote_insn (rtx insn)
|
| 3068 |
|
|
{
|
| 3069 |
|
|
while (insn)
|
| 3070 |
|
|
{
|
| 3071 |
|
|
insn = NEXT_INSN (insn);
|
| 3072 |
|
|
if (insn == 0 || !NOTE_P (insn))
|
| 3073 |
|
|
break;
|
| 3074 |
|
|
}
|
| 3075 |
|
|
|
| 3076 |
|
|
return insn;
|
| 3077 |
|
|
}
|
| 3078 |
|
|
|
| 3079 |
|
|
/* Return the next insn after INSN that is not a NOTE, but stop the
|
| 3080 |
|
|
search before we enter another basic block. This routine does not
|
| 3081 |
|
|
look inside SEQUENCEs. */
|
| 3082 |
|
|
|
| 3083 |
|
|
rtx
|
| 3084 |
|
|
next_nonnote_insn_bb (rtx insn)
|
| 3085 |
|
|
{
|
| 3086 |
|
|
while (insn)
|
| 3087 |
|
|
{
|
| 3088 |
|
|
insn = NEXT_INSN (insn);
|
| 3089 |
|
|
if (insn == 0 || !NOTE_P (insn))
|
| 3090 |
|
|
break;
|
| 3091 |
|
|
if (NOTE_INSN_BASIC_BLOCK_P (insn))
|
| 3092 |
|
|
return NULL_RTX;
|
| 3093 |
|
|
}
|
| 3094 |
|
|
|
| 3095 |
|
|
return insn;
|
| 3096 |
|
|
}
|
| 3097 |
|
|
|
| 3098 |
|
|
/* Return the previous insn before INSN that is not a NOTE. This routine does
|
| 3099 |
|
|
not look inside SEQUENCEs. */
|
| 3100 |
|
|
|
| 3101 |
|
|
rtx
|
| 3102 |
|
|
prev_nonnote_insn (rtx insn)
|
| 3103 |
|
|
{
|
| 3104 |
|
|
while (insn)
|
| 3105 |
|
|
{
|
| 3106 |
|
|
insn = PREV_INSN (insn);
|
| 3107 |
|
|
if (insn == 0 || !NOTE_P (insn))
|
| 3108 |
|
|
break;
|
| 3109 |
|
|
}
|
| 3110 |
|
|
|
| 3111 |
|
|
return insn;
|
| 3112 |
|
|
}
|
| 3113 |
|
|
|
| 3114 |
|
|
/* Return the previous insn before INSN that is not a NOTE, but stop
|
| 3115 |
|
|
the search before we enter another basic block. This routine does
|
| 3116 |
|
|
not look inside SEQUENCEs. */
|
| 3117 |
|
|
|
| 3118 |
|
|
rtx
|
| 3119 |
|
|
prev_nonnote_insn_bb (rtx insn)
|
| 3120 |
|
|
{
|
| 3121 |
|
|
while (insn)
|
| 3122 |
|
|
{
|
| 3123 |
|
|
insn = PREV_INSN (insn);
|
| 3124 |
|
|
if (insn == 0 || !NOTE_P (insn))
|
| 3125 |
|
|
break;
|
| 3126 |
|
|
if (NOTE_INSN_BASIC_BLOCK_P (insn))
|
| 3127 |
|
|
return NULL_RTX;
|
| 3128 |
|
|
}
|
| 3129 |
|
|
|
| 3130 |
|
|
return insn;
|
| 3131 |
|
|
}
|
| 3132 |
|
|
|
| 3133 |
|
|
/* Return the next insn after INSN that is not a DEBUG_INSN. This
|
| 3134 |
|
|
routine does not look inside SEQUENCEs. */
|
| 3135 |
|
|
|
| 3136 |
|
|
rtx
|
| 3137 |
|
|
next_nondebug_insn (rtx insn)
|
| 3138 |
|
|
{
|
| 3139 |
|
|
while (insn)
|
| 3140 |
|
|
{
|
| 3141 |
|
|
insn = NEXT_INSN (insn);
|
| 3142 |
|
|
if (insn == 0 || !DEBUG_INSN_P (insn))
|
| 3143 |
|
|
break;
|
| 3144 |
|
|
}
|
| 3145 |
|
|
|
| 3146 |
|
|
return insn;
|
| 3147 |
|
|
}
|
| 3148 |
|
|
|
| 3149 |
|
|
/* Return the previous insn before INSN that is not a DEBUG_INSN.
|
| 3150 |
|
|
This routine does not look inside SEQUENCEs. */
|
| 3151 |
|
|
|
| 3152 |
|
|
rtx
|
| 3153 |
|
|
prev_nondebug_insn (rtx insn)
|
| 3154 |
|
|
{
|
| 3155 |
|
|
while (insn)
|
| 3156 |
|
|
{
|
| 3157 |
|
|
insn = PREV_INSN (insn);
|
| 3158 |
|
|
if (insn == 0 || !DEBUG_INSN_P (insn))
|
| 3159 |
|
|
break;
|
| 3160 |
|
|
}
|
| 3161 |
|
|
|
| 3162 |
|
|
return insn;
|
| 3163 |
|
|
}
|
| 3164 |
|
|
|
| 3165 |
|
|
/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
|
| 3166 |
|
|
or 0, if there is none. This routine does not look inside
|
| 3167 |
|
|
SEQUENCEs. */
|
| 3168 |
|
|
|
| 3169 |
|
|
rtx
|
| 3170 |
|
|
next_real_insn (rtx insn)
|
| 3171 |
|
|
{
|
| 3172 |
|
|
while (insn)
|
| 3173 |
|
|
{
|
| 3174 |
|
|
insn = NEXT_INSN (insn);
|
| 3175 |
|
|
if (insn == 0 || INSN_P (insn))
|
| 3176 |
|
|
break;
|
| 3177 |
|
|
}
|
| 3178 |
|
|
|
| 3179 |
|
|
return insn;
|
| 3180 |
|
|
}
|
| 3181 |
|
|
|
| 3182 |
|
|
/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
|
| 3183 |
|
|
or 0, if there is none. This routine does not look inside
|
| 3184 |
|
|
SEQUENCEs. */
|
| 3185 |
|
|
|
| 3186 |
|
|
rtx
|
| 3187 |
|
|
prev_real_insn (rtx insn)
|
| 3188 |
|
|
{
|
| 3189 |
|
|
while (insn)
|
| 3190 |
|
|
{
|
| 3191 |
|
|
insn = PREV_INSN (insn);
|
| 3192 |
|
|
if (insn == 0 || INSN_P (insn))
|
| 3193 |
|
|
break;
|
| 3194 |
|
|
}
|
| 3195 |
|
|
|
| 3196 |
|
|
return insn;
|
| 3197 |
|
|
}
|
| 3198 |
|
|
|
| 3199 |
|
|
/* Return the last CALL_INSN in the current list, or 0 if there is none.
|
| 3200 |
|
|
This routine does not look inside SEQUENCEs. */
|
| 3201 |
|
|
|
| 3202 |
|
|
rtx
|
| 3203 |
|
|
last_call_insn (void)
|
| 3204 |
|
|
{
|
| 3205 |
|
|
rtx insn;
|
| 3206 |
|
|
|
| 3207 |
|
|
for (insn = get_last_insn ();
|
| 3208 |
|
|
insn && !CALL_P (insn);
|
| 3209 |
|
|
insn = PREV_INSN (insn))
|
| 3210 |
|
|
;
|
| 3211 |
|
|
|
| 3212 |
|
|
return insn;
|
| 3213 |
|
|
}
|
| 3214 |
|
|
|
| 3215 |
|
|
/* Find the next insn after INSN that really does something. This routine
|
| 3216 |
|
|
does not look inside SEQUENCEs. After reload this also skips over
|
| 3217 |
|
|
standalone USE and CLOBBER insn. */
|
| 3218 |
|
|
|
| 3219 |
|
|
int
|
| 3220 |
|
|
active_insn_p (const_rtx insn)
|
| 3221 |
|
|
{
|
| 3222 |
|
|
return (CALL_P (insn) || JUMP_P (insn)
|
| 3223 |
|
|
|| (NONJUMP_INSN_P (insn)
|
| 3224 |
|
|
&& (! reload_completed
|
| 3225 |
|
|
|| (GET_CODE (PATTERN (insn)) != USE
|
| 3226 |
|
|
&& GET_CODE (PATTERN (insn)) != CLOBBER))));
|
| 3227 |
|
|
}
|
| 3228 |
|
|
|
| 3229 |
|
|
rtx
|
| 3230 |
|
|
next_active_insn (rtx insn)
|
| 3231 |
|
|
{
|
| 3232 |
|
|
while (insn)
|
| 3233 |
|
|
{
|
| 3234 |
|
|
insn = NEXT_INSN (insn);
|
| 3235 |
|
|
if (insn == 0 || active_insn_p (insn))
|
| 3236 |
|
|
break;
|
| 3237 |
|
|
}
|
| 3238 |
|
|
|
| 3239 |
|
|
return insn;
|
| 3240 |
|
|
}
|
| 3241 |
|
|
|
| 3242 |
|
|
/* Find the last insn before INSN that really does something. This routine
|
| 3243 |
|
|
does not look inside SEQUENCEs. After reload this also skips over
|
| 3244 |
|
|
standalone USE and CLOBBER insn. */
|
| 3245 |
|
|
|
| 3246 |
|
|
rtx
|
| 3247 |
|
|
prev_active_insn (rtx insn)
|
| 3248 |
|
|
{
|
| 3249 |
|
|
while (insn)
|
| 3250 |
|
|
{
|
| 3251 |
|
|
insn = PREV_INSN (insn);
|
| 3252 |
|
|
if (insn == 0 || active_insn_p (insn))
|
| 3253 |
|
|
break;
|
| 3254 |
|
|
}
|
| 3255 |
|
|
|
| 3256 |
|
|
return insn;
|
| 3257 |
|
|
}
|
| 3258 |
|
|
|
| 3259 |
|
|
/* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
|
| 3260 |
|
|
|
| 3261 |
|
|
rtx
|
| 3262 |
|
|
next_label (rtx insn)
|
| 3263 |
|
|
{
|
| 3264 |
|
|
while (insn)
|
| 3265 |
|
|
{
|
| 3266 |
|
|
insn = NEXT_INSN (insn);
|
| 3267 |
|
|
if (insn == 0 || LABEL_P (insn))
|
| 3268 |
|
|
break;
|
| 3269 |
|
|
}
|
| 3270 |
|
|
|
| 3271 |
|
|
return insn;
|
| 3272 |
|
|
}
|
| 3273 |
|
|
|
| 3274 |
|
|
/* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
|
| 3275 |
|
|
|
| 3276 |
|
|
rtx
|
| 3277 |
|
|
prev_label (rtx insn)
|
| 3278 |
|
|
{
|
| 3279 |
|
|
while (insn)
|
| 3280 |
|
|
{
|
| 3281 |
|
|
insn = PREV_INSN (insn);
|
| 3282 |
|
|
if (insn == 0 || LABEL_P (insn))
|
| 3283 |
|
|
break;
|
| 3284 |
|
|
}
|
| 3285 |
|
|
|
| 3286 |
|
|
return insn;
|
| 3287 |
|
|
}
|
| 3288 |
|
|
|
| 3289 |
|
|
/* Return the last label to mark the same position as LABEL. Return null
|
| 3290 |
|
|
if LABEL itself is null. */
|
| 3291 |
|
|
|
| 3292 |
|
|
rtx
|
| 3293 |
|
|
skip_consecutive_labels (rtx label)
|
| 3294 |
|
|
{
|
| 3295 |
|
|
rtx insn;
|
| 3296 |
|
|
|
| 3297 |
|
|
for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
|
| 3298 |
|
|
if (LABEL_P (insn))
|
| 3299 |
|
|
label = insn;
|
| 3300 |
|
|
|
| 3301 |
|
|
return label;
|
| 3302 |
|
|
}
|
| 3303 |
|
|
|
| 3304 |
|
|
#ifdef HAVE_cc0
|
| 3305 |
|
|
/* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
|
| 3306 |
|
|
and REG_CC_USER notes so we can find it. */
|
| 3307 |
|
|
|
| 3308 |
|
|
void
|
| 3309 |
|
|
link_cc0_insns (rtx insn)
|
| 3310 |
|
|
{
|
| 3311 |
|
|
rtx user = next_nonnote_insn (insn);
|
| 3312 |
|
|
|
| 3313 |
|
|
if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
|
| 3314 |
|
|
user = XVECEXP (PATTERN (user), 0, 0);
|
| 3315 |
|
|
|
| 3316 |
|
|
add_reg_note (user, REG_CC_SETTER, insn);
|
| 3317 |
|
|
add_reg_note (insn, REG_CC_USER, user);
|
| 3318 |
|
|
}
|
| 3319 |
|
|
|
| 3320 |
|
|
/* Return the next insn that uses CC0 after INSN, which is assumed to
|
| 3321 |
|
|
set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
|
| 3322 |
|
|
applied to the result of this function should yield INSN).
|
| 3323 |
|
|
|
| 3324 |
|
|
Normally, this is simply the next insn. However, if a REG_CC_USER note
|
| 3325 |
|
|
is present, it contains the insn that uses CC0.
|
| 3326 |
|
|
|
| 3327 |
|
|
Return 0 if we can't find the insn. */
|
| 3328 |
|
|
|
| 3329 |
|
|
rtx
|
| 3330 |
|
|
next_cc0_user (rtx insn)
|
| 3331 |
|
|
{
|
| 3332 |
|
|
rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
|
| 3333 |
|
|
|
| 3334 |
|
|
if (note)
|
| 3335 |
|
|
return XEXP (note, 0);
|
| 3336 |
|
|
|
| 3337 |
|
|
insn = next_nonnote_insn (insn);
|
| 3338 |
|
|
if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
|
| 3339 |
|
|
insn = XVECEXP (PATTERN (insn), 0, 0);
|
| 3340 |
|
|
|
| 3341 |
|
|
if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
|
| 3342 |
|
|
return insn;
|
| 3343 |
|
|
|
| 3344 |
|
|
return 0;
|
| 3345 |
|
|
}
|
| 3346 |
|
|
|
| 3347 |
|
|
/* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
|
| 3348 |
|
|
note, it is the previous insn. */
|
| 3349 |
|
|
|
| 3350 |
|
|
rtx
|
| 3351 |
|
|
prev_cc0_setter (rtx insn)
|
| 3352 |
|
|
{
|
| 3353 |
|
|
rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
|
| 3354 |
|
|
|
| 3355 |
|
|
if (note)
|
| 3356 |
|
|
return XEXP (note, 0);
|
| 3357 |
|
|
|
| 3358 |
|
|
insn = prev_nonnote_insn (insn);
|
| 3359 |
|
|
gcc_assert (sets_cc0_p (PATTERN (insn)));
|
| 3360 |
|
|
|
| 3361 |
|
|
return insn;
|
| 3362 |
|
|
}
|
| 3363 |
|
|
#endif
|
| 3364 |
|
|
|
| 3365 |
|
|
#ifdef AUTO_INC_DEC
|
| 3366 |
|
|
/* Find a RTX_AUTOINC class rtx which matches DATA. */
|
| 3367 |
|
|
|
| 3368 |
|
|
static int
|
| 3369 |
|
|
find_auto_inc (rtx *xp, void *data)
|
| 3370 |
|
|
{
|
| 3371 |
|
|
rtx x = *xp;
|
| 3372 |
|
|
rtx reg = (rtx) data;
|
| 3373 |
|
|
|
| 3374 |
|
|
if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
|
| 3375 |
|
|
return 0;
|
| 3376 |
|
|
|
| 3377 |
|
|
switch (GET_CODE (x))
|
| 3378 |
|
|
{
|
| 3379 |
|
|
case PRE_DEC:
|
| 3380 |
|
|
case PRE_INC:
|
| 3381 |
|
|
case POST_DEC:
|
| 3382 |
|
|
case POST_INC:
|
| 3383 |
|
|
case PRE_MODIFY:
|
| 3384 |
|
|
case POST_MODIFY:
|
| 3385 |
|
|
if (rtx_equal_p (reg, XEXP (x, 0)))
|
| 3386 |
|
|
return 1;
|
| 3387 |
|
|
break;
|
| 3388 |
|
|
|
| 3389 |
|
|
default:
|
| 3390 |
|
|
gcc_unreachable ();
|
| 3391 |
|
|
}
|
| 3392 |
|
|
return -1;
|
| 3393 |
|
|
}
|
| 3394 |
|
|
#endif
|
| 3395 |
|
|
|
| 3396 |
|
|
/* Increment the label uses for all labels present in rtx. */
|
| 3397 |
|
|
|
| 3398 |
|
|
static void
|
| 3399 |
|
|
mark_label_nuses (rtx x)
|
| 3400 |
|
|
{
|
| 3401 |
|
|
enum rtx_code code;
|
| 3402 |
|
|
int i, j;
|
| 3403 |
|
|
const char *fmt;
|
| 3404 |
|
|
|
| 3405 |
|
|
code = GET_CODE (x);
|
| 3406 |
|
|
if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
|
| 3407 |
|
|
LABEL_NUSES (XEXP (x, 0))++;
|
| 3408 |
|
|
|
| 3409 |
|
|
fmt = GET_RTX_FORMAT (code);
|
| 3410 |
|
|
for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
|
| 3411 |
|
|
{
|
| 3412 |
|
|
if (fmt[i] == 'e')
|
| 3413 |
|
|
mark_label_nuses (XEXP (x, i));
|
| 3414 |
|
|
else if (fmt[i] == 'E')
|
| 3415 |
|
|
for (j = XVECLEN (x, i) - 1; j >= 0; j--)
|
| 3416 |
|
|
mark_label_nuses (XVECEXP (x, i, j));
|
| 3417 |
|
|
}
|
| 3418 |
|
|
}
|
| 3419 |
|
|
|
| 3420 |
|
|
|
| 3421 |
|
|
/* Try splitting insns that can be split for better scheduling.
|
| 3422 |
|
|
PAT is the pattern which might split.
|
| 3423 |
|
|
TRIAL is the insn providing PAT.
|
| 3424 |
|
|
LAST is nonzero if we should return the last insn of the sequence produced.
|
| 3425 |
|
|
|
| 3426 |
|
|
If this routine succeeds in splitting, it returns the first or last
|
| 3427 |
|
|
replacement insn depending on the value of LAST. Otherwise, it
|
| 3428 |
|
|
returns TRIAL. If the insn to be returned can be split, it will be. */
|
| 3429 |
|
|
|
| 3430 |
|
|
rtx
|
| 3431 |
|
|
try_split (rtx pat, rtx trial, int last)
|
| 3432 |
|
|
{
|
| 3433 |
|
|
rtx before = PREV_INSN (trial);
|
| 3434 |
|
|
rtx after = NEXT_INSN (trial);
|
| 3435 |
|
|
int has_barrier = 0;
|
| 3436 |
|
|
rtx note, seq, tem;
|
| 3437 |
|
|
int probability;
|
| 3438 |
|
|
rtx insn_last, insn;
|
| 3439 |
|
|
int njumps = 0;
|
| 3440 |
|
|
|
| 3441 |
|
|
/* We're not good at redistributing frame information. */
|
| 3442 |
|
|
if (RTX_FRAME_RELATED_P (trial))
|
| 3443 |
|
|
return trial;
|
| 3444 |
|
|
|
| 3445 |
|
|
if (any_condjump_p (trial)
|
| 3446 |
|
|
&& (note = find_reg_note (trial, REG_BR_PROB, 0)))
|
| 3447 |
|
|
split_branch_probability = INTVAL (XEXP (note, 0));
|
| 3448 |
|
|
probability = split_branch_probability;
|
| 3449 |
|
|
|
| 3450 |
|
|
seq = split_insns (pat, trial);
|
| 3451 |
|
|
|
| 3452 |
|
|
split_branch_probability = -1;
|
| 3453 |
|
|
|
| 3454 |
|
|
/* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
|
| 3455 |
|
|
We may need to handle this specially. */
|
| 3456 |
|
|
if (after && BARRIER_P (after))
|
| 3457 |
|
|
{
|
| 3458 |
|
|
has_barrier = 1;
|
| 3459 |
|
|
after = NEXT_INSN (after);
|
| 3460 |
|
|
}
|
| 3461 |
|
|
|
| 3462 |
|
|
if (!seq)
|
| 3463 |
|
|
return trial;
|
| 3464 |
|
|
|
| 3465 |
|
|
/* Avoid infinite loop if any insn of the result matches
|
| 3466 |
|
|
the original pattern. */
|
| 3467 |
|
|
insn_last = seq;
|
| 3468 |
|
|
while (1)
|
| 3469 |
|
|
{
|
| 3470 |
|
|
if (INSN_P (insn_last)
|
| 3471 |
|
|
&& rtx_equal_p (PATTERN (insn_last), pat))
|
| 3472 |
|
|
return trial;
|
| 3473 |
|
|
if (!NEXT_INSN (insn_last))
|
| 3474 |
|
|
break;
|
| 3475 |
|
|
insn_last = NEXT_INSN (insn_last);
|
| 3476 |
|
|
}
|
| 3477 |
|
|
|
| 3478 |
|
|
/* We will be adding the new sequence to the function. The splitters
|
| 3479 |
|
|
may have introduced invalid RTL sharing, so unshare the sequence now. */
|
| 3480 |
|
|
unshare_all_rtl_in_chain (seq);
|
| 3481 |
|
|
|
| 3482 |
|
|
/* Mark labels. */
|
| 3483 |
|
|
for (insn = insn_last; insn ; insn = PREV_INSN (insn))
|
| 3484 |
|
|
{
|
| 3485 |
|
|
if (JUMP_P (insn))
|
| 3486 |
|
|
{
|
| 3487 |
|
|
mark_jump_label (PATTERN (insn), insn, 0);
|
| 3488 |
|
|
njumps++;
|
| 3489 |
|
|
if (probability != -1
|
| 3490 |
|
|
&& any_condjump_p (insn)
|
| 3491 |
|
|
&& !find_reg_note (insn, REG_BR_PROB, 0))
|
| 3492 |
|
|
{
|
| 3493 |
|
|
/* We can preserve the REG_BR_PROB notes only if exactly
|
| 3494 |
|
|
one jump is created, otherwise the machine description
|
| 3495 |
|
|
is responsible for this step using
|
| 3496 |
|
|
split_branch_probability variable. */
|
| 3497 |
|
|
gcc_assert (njumps == 1);
|
| 3498 |
|
|
add_reg_note (insn, REG_BR_PROB, GEN_INT (probability));
|
| 3499 |
|
|
}
|
| 3500 |
|
|
}
|
| 3501 |
|
|
}
|
| 3502 |
|
|
|
| 3503 |
|
|
/* If we are splitting a CALL_INSN, look for the CALL_INSN
|
| 3504 |
|
|
in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
|
| 3505 |
|
|
if (CALL_P (trial))
|
| 3506 |
|
|
{
|
| 3507 |
|
|
for (insn = insn_last; insn ; insn = PREV_INSN (insn))
|
| 3508 |
|
|
if (CALL_P (insn))
|
| 3509 |
|
|
{
|
| 3510 |
|
|
rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
|
| 3511 |
|
|
while (*p)
|
| 3512 |
|
|
p = &XEXP (*p, 1);
|
| 3513 |
|
|
*p = CALL_INSN_FUNCTION_USAGE (trial);
|
| 3514 |
|
|
SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
|
| 3515 |
|
|
|
| 3516 |
|
|
/* Update the debug information for the CALL_INSN. */
|
| 3517 |
|
|
if (flag_enable_icf_debug)
|
| 3518 |
|
|
(*debug_hooks->copy_call_info) (trial, insn);
|
| 3519 |
|
|
}
|
| 3520 |
|
|
}
|
| 3521 |
|
|
|
| 3522 |
|
|
/* Copy notes, particularly those related to the CFG. */
|
| 3523 |
|
|
for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
|
| 3524 |
|
|
{
|
| 3525 |
|
|
switch (REG_NOTE_KIND (note))
|
| 3526 |
|
|
{
|
| 3527 |
|
|
case REG_EH_REGION:
|
| 3528 |
|
|
copy_reg_eh_region_note_backward (note, insn_last, NULL);
|
| 3529 |
|
|
break;
|
| 3530 |
|
|
|
| 3531 |
|
|
case REG_NORETURN:
|
| 3532 |
|
|
case REG_SETJMP:
|
| 3533 |
|
|
for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
|
| 3534 |
|
|
{
|
| 3535 |
|
|
if (CALL_P (insn))
|
| 3536 |
|
|
add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
|
| 3537 |
|
|
}
|
| 3538 |
|
|
break;
|
| 3539 |
|
|
|
| 3540 |
|
|
case REG_NON_LOCAL_GOTO:
|
| 3541 |
|
|
for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
|
| 3542 |
|
|
{
|
| 3543 |
|
|
if (JUMP_P (insn))
|
| 3544 |
|
|
add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
|
| 3545 |
|
|
}
|
| 3546 |
|
|
break;
|
| 3547 |
|
|
|
| 3548 |
|
|
#ifdef AUTO_INC_DEC
|
| 3549 |
|
|
case REG_INC:
|
| 3550 |
|
|
for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
|
| 3551 |
|
|
{
|
| 3552 |
|
|
rtx reg = XEXP (note, 0);
|
| 3553 |
|
|
if (!FIND_REG_INC_NOTE (insn, reg)
|
| 3554 |
|
|
&& for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
|
| 3555 |
|
|
add_reg_note (insn, REG_INC, reg);
|
| 3556 |
|
|
}
|
| 3557 |
|
|
break;
|
| 3558 |
|
|
#endif
|
| 3559 |
|
|
|
| 3560 |
|
|
default:
|
| 3561 |
|
|
break;
|
| 3562 |
|
|
}
|
| 3563 |
|
|
}
|
| 3564 |
|
|
|
| 3565 |
|
|
/* If there are LABELS inside the split insns increment the
|
| 3566 |
|
|
usage count so we don't delete the label. */
|
| 3567 |
|
|
if (INSN_P (trial))
|
| 3568 |
|
|
{
|
| 3569 |
|
|
insn = insn_last;
|
| 3570 |
|
|
while (insn != NULL_RTX)
|
| 3571 |
|
|
{
|
| 3572 |
|
|
/* JUMP_P insns have already been "marked" above. */
|
| 3573 |
|
|
if (NONJUMP_INSN_P (insn))
|
| 3574 |
|
|
mark_label_nuses (PATTERN (insn));
|
| 3575 |
|
|
|
| 3576 |
|
|
insn = PREV_INSN (insn);
|
| 3577 |
|
|
}
|
| 3578 |
|
|
}
|
| 3579 |
|
|
|
| 3580 |
|
|
tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
|
| 3581 |
|
|
|
| 3582 |
|
|
delete_insn (trial);
|
| 3583 |
|
|
if (has_barrier)
|
| 3584 |
|
|
emit_barrier_after (tem);
|
| 3585 |
|
|
|
| 3586 |
|
|
/* Recursively call try_split for each new insn created; by the
|
| 3587 |
|
|
time control returns here that insn will be fully split, so
|
| 3588 |
|
|
set LAST and continue from the insn after the one returned.
|
| 3589 |
|
|
We can't use next_active_insn here since AFTER may be a note.
|
| 3590 |
|
|
Ignore deleted insns, which can be occur if not optimizing. */
|
| 3591 |
|
|
for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
|
| 3592 |
|
|
if (! INSN_DELETED_P (tem) && INSN_P (tem))
|
| 3593 |
|
|
tem = try_split (PATTERN (tem), tem, 1);
|
| 3594 |
|
|
|
| 3595 |
|
|
/* Return either the first or the last insn, depending on which was
|
| 3596 |
|
|
requested. */
|
| 3597 |
|
|
return last
|
| 3598 |
|
|
? (after ? PREV_INSN (after) : last_insn)
|
| 3599 |
|
|
: NEXT_INSN (before);
|
| 3600 |
|
|
}
|
| 3601 |
|
|
|
| 3602 |
|
|
/* Make and return an INSN rtx, initializing all its slots.
|
| 3603 |
|
|
Store PATTERN in the pattern slots. */
|
| 3604 |
|
|
|
| 3605 |
|
|
rtx
|
| 3606 |
|
|
make_insn_raw (rtx pattern)
|
| 3607 |
|
|
{
|
| 3608 |
|
|
rtx insn;
|
| 3609 |
|
|
|
| 3610 |
|
|
insn = rtx_alloc (INSN);
|
| 3611 |
|
|
|
| 3612 |
|
|
INSN_UID (insn) = cur_insn_uid++;
|
| 3613 |
|
|
PATTERN (insn) = pattern;
|
| 3614 |
|
|
INSN_CODE (insn) = -1;
|
| 3615 |
|
|
REG_NOTES (insn) = NULL;
|
| 3616 |
|
|
INSN_LOCATOR (insn) = curr_insn_locator ();
|
| 3617 |
|
|
BLOCK_FOR_INSN (insn) = NULL;
|
| 3618 |
|
|
|
| 3619 |
|
|
#ifdef ENABLE_RTL_CHECKING
|
| 3620 |
|
|
if (insn
|
| 3621 |
|
|
&& INSN_P (insn)
|
| 3622 |
|
|
&& (returnjump_p (insn)
|
| 3623 |
|
|
|| (GET_CODE (insn) == SET
|
| 3624 |
|
|
&& SET_DEST (insn) == pc_rtx)))
|
| 3625 |
|
|
{
|
| 3626 |
|
|
warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
|
| 3627 |
|
|
debug_rtx (insn);
|
| 3628 |
|
|
}
|
| 3629 |
|
|
#endif
|
| 3630 |
|
|
|
| 3631 |
|
|
return insn;
|
| 3632 |
|
|
}
|
| 3633 |
|
|
|
| 3634 |
|
|
/* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
|
| 3635 |
|
|
|
| 3636 |
|
|
rtx
|
| 3637 |
|
|
make_debug_insn_raw (rtx pattern)
|
| 3638 |
|
|
{
|
| 3639 |
|
|
rtx insn;
|
| 3640 |
|
|
|
| 3641 |
|
|
insn = rtx_alloc (DEBUG_INSN);
|
| 3642 |
|
|
INSN_UID (insn) = cur_debug_insn_uid++;
|
| 3643 |
|
|
if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
|
| 3644 |
|
|
INSN_UID (insn) = cur_insn_uid++;
|
| 3645 |
|
|
|
| 3646 |
|
|
PATTERN (insn) = pattern;
|
| 3647 |
|
|
INSN_CODE (insn) = -1;
|
| 3648 |
|
|
REG_NOTES (insn) = NULL;
|
| 3649 |
|
|
INSN_LOCATOR (insn) = curr_insn_locator ();
|
| 3650 |
|
|
BLOCK_FOR_INSN (insn) = NULL;
|
| 3651 |
|
|
|
| 3652 |
|
|
return insn;
|
| 3653 |
|
|
}
|
| 3654 |
|
|
|
| 3655 |
|
|
/* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
|
| 3656 |
|
|
|
| 3657 |
|
|
rtx
|
| 3658 |
|
|
make_jump_insn_raw (rtx pattern)
|
| 3659 |
|
|
{
|
| 3660 |
|
|
rtx insn;
|
| 3661 |
|
|
|
| 3662 |
|
|
insn = rtx_alloc (JUMP_INSN);
|
| 3663 |
|
|
INSN_UID (insn) = cur_insn_uid++;
|
| 3664 |
|
|
|
| 3665 |
|
|
PATTERN (insn) = pattern;
|
| 3666 |
|
|
INSN_CODE (insn) = -1;
|
| 3667 |
|
|
REG_NOTES (insn) = NULL;
|
| 3668 |
|
|
JUMP_LABEL (insn) = NULL;
|
| 3669 |
|
|
INSN_LOCATOR (insn) = curr_insn_locator ();
|
| 3670 |
|
|
BLOCK_FOR_INSN (insn) = NULL;
|
| 3671 |
|
|
|
| 3672 |
|
|
return insn;
|
| 3673 |
|
|
}
|
| 3674 |
|
|
|
| 3675 |
|
|
/* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
|
| 3676 |
|
|
|
| 3677 |
|
|
static rtx
|
| 3678 |
|
|
make_call_insn_raw (rtx pattern)
|
| 3679 |
|
|
{
|
| 3680 |
|
|
rtx insn;
|
| 3681 |
|
|
|
| 3682 |
|
|
insn = rtx_alloc (CALL_INSN);
|
| 3683 |
|
|
INSN_UID (insn) = cur_insn_uid++;
|
| 3684 |
|
|
|
| 3685 |
|
|
PATTERN (insn) = pattern;
|
| 3686 |
|
|
INSN_CODE (insn) = -1;
|
| 3687 |
|
|
REG_NOTES (insn) = NULL;
|
| 3688 |
|
|
CALL_INSN_FUNCTION_USAGE (insn) = NULL;
|
| 3689 |
|
|
INSN_LOCATOR (insn) = curr_insn_locator ();
|
| 3690 |
|
|
BLOCK_FOR_INSN (insn) = NULL;
|
| 3691 |
|
|
|
| 3692 |
|
|
return insn;
|
| 3693 |
|
|
}
|
| 3694 |
|
|
|
| 3695 |
|
|
/* Add INSN to the end of the doubly-linked list.
|
| 3696 |
|
|
INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
|
| 3697 |
|
|
|
| 3698 |
|
|
void
|
| 3699 |
|
|
add_insn (rtx insn)
|
| 3700 |
|
|
{
|
| 3701 |
|
|
PREV_INSN (insn) = last_insn;
|
| 3702 |
|
|
NEXT_INSN (insn) = 0;
|
| 3703 |
|
|
|
| 3704 |
|
|
if (NULL != last_insn)
|
| 3705 |
|
|
NEXT_INSN (last_insn) = insn;
|
| 3706 |
|
|
|
| 3707 |
|
|
if (NULL == first_insn)
|
| 3708 |
|
|
first_insn = insn;
|
| 3709 |
|
|
|
| 3710 |
|
|
last_insn = insn;
|
| 3711 |
|
|
}
|
| 3712 |
|
|
|
| 3713 |
|
|
/* Add INSN into the doubly-linked list after insn AFTER. This and
|
| 3714 |
|
|
the next should be the only functions called to insert an insn once
|
| 3715 |
|
|
delay slots have been filled since only they know how to update a
|
| 3716 |
|
|
SEQUENCE. */
|
| 3717 |
|
|
|
| 3718 |
|
|
void
|
| 3719 |
|
|
add_insn_after (rtx insn, rtx after, basic_block bb)
|
| 3720 |
|
|
{
|
| 3721 |
|
|
rtx next = NEXT_INSN (after);
|
| 3722 |
|
|
|
| 3723 |
|
|
gcc_assert (!optimize || !INSN_DELETED_P (after));
|
| 3724 |
|
|
|
| 3725 |
|
|
NEXT_INSN (insn) = next;
|
| 3726 |
|
|
PREV_INSN (insn) = after;
|
| 3727 |
|
|
|
| 3728 |
|
|
if (next)
|
| 3729 |
|
|
{
|
| 3730 |
|
|
PREV_INSN (next) = insn;
|
| 3731 |
|
|
if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
|
| 3732 |
|
|
PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
|
| 3733 |
|
|
}
|
| 3734 |
|
|
else if (last_insn == after)
|
| 3735 |
|
|
last_insn = insn;
|
| 3736 |
|
|
else
|
| 3737 |
|
|
{
|
| 3738 |
|
|
struct sequence_stack *stack = seq_stack;
|
| 3739 |
|
|
/* Scan all pending sequences too. */
|
| 3740 |
|
|
for (; stack; stack = stack->next)
|
| 3741 |
|
|
if (after == stack->last)
|
| 3742 |
|
|
{
|
| 3743 |
|
|
stack->last = insn;
|
| 3744 |
|
|
break;
|
| 3745 |
|
|
}
|
| 3746 |
|
|
|
| 3747 |
|
|
gcc_assert (stack);
|
| 3748 |
|
|
}
|
| 3749 |
|
|
|
| 3750 |
|
|
if (!BARRIER_P (after)
|
| 3751 |
|
|
&& !BARRIER_P (insn)
|
| 3752 |
|
|
&& (bb = BLOCK_FOR_INSN (after)))
|
| 3753 |
|
|
{
|
| 3754 |
|
|
set_block_for_insn (insn, bb);
|
| 3755 |
|
|
if (INSN_P (insn))
|
| 3756 |
|
|
df_insn_rescan (insn);
|
| 3757 |
|
|
/* Should not happen as first in the BB is always
|
| 3758 |
|
|
either NOTE or LABEL. */
|
| 3759 |
|
|
if (BB_END (bb) == after
|
| 3760 |
|
|
/* Avoid clobbering of structure when creating new BB. */
|
| 3761 |
|
|
&& !BARRIER_P (insn)
|
| 3762 |
|
|
&& !NOTE_INSN_BASIC_BLOCK_P (insn))
|
| 3763 |
|
|
BB_END (bb) = insn;
|
| 3764 |
|
|
}
|
| 3765 |
|
|
|
| 3766 |
|
|
NEXT_INSN (after) = insn;
|
| 3767 |
|
|
if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
|
| 3768 |
|
|
{
|
| 3769 |
|
|
rtx sequence = PATTERN (after);
|
| 3770 |
|
|
NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
|
| 3771 |
|
|
}
|
| 3772 |
|
|
}
|
| 3773 |
|
|
|
| 3774 |
|
|
/* Add INSN into the doubly-linked list before insn BEFORE. This and
|
| 3775 |
|
|
the previous should be the only functions called to insert an insn
|
| 3776 |
|
|
once delay slots have been filled since only they know how to
|
| 3777 |
|
|
update a SEQUENCE. If BB is NULL, an attempt is made to infer the
|
| 3778 |
|
|
bb from before. */
|
| 3779 |
|
|
|
| 3780 |
|
|
void
|
| 3781 |
|
|
add_insn_before (rtx insn, rtx before, basic_block bb)
|
| 3782 |
|
|
{
|
| 3783 |
|
|
rtx prev = PREV_INSN (before);
|
| 3784 |
|
|
|
| 3785 |
|
|
gcc_assert (!optimize || !INSN_DELETED_P (before));
|
| 3786 |
|
|
|
| 3787 |
|
|
PREV_INSN (insn) = prev;
|
| 3788 |
|
|
NEXT_INSN (insn) = before;
|
| 3789 |
|
|
|
| 3790 |
|
|
if (prev)
|
| 3791 |
|
|
{
|
| 3792 |
|
|
NEXT_INSN (prev) = insn;
|
| 3793 |
|
|
if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
|
| 3794 |
|
|
{
|
| 3795 |
|
|
rtx sequence = PATTERN (prev);
|
| 3796 |
|
|
NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
|
| 3797 |
|
|
}
|
| 3798 |
|
|
}
|
| 3799 |
|
|
else if (first_insn == before)
|
| 3800 |
|
|
first_insn = insn;
|
| 3801 |
|
|
else
|
| 3802 |
|
|
{
|
| 3803 |
|
|
struct sequence_stack *stack = seq_stack;
|
| 3804 |
|
|
/* Scan all pending sequences too. */
|
| 3805 |
|
|
for (; stack; stack = stack->next)
|
| 3806 |
|
|
if (before == stack->first)
|
| 3807 |
|
|
{
|
| 3808 |
|
|
stack->first = insn;
|
| 3809 |
|
|
break;
|
| 3810 |
|
|
}
|
| 3811 |
|
|
|
| 3812 |
|
|
gcc_assert (stack);
|
| 3813 |
|
|
}
|
| 3814 |
|
|
|
| 3815 |
|
|
if (!bb
|
| 3816 |
|
|
&& !BARRIER_P (before)
|
| 3817 |
|
|
&& !BARRIER_P (insn))
|
| 3818 |
|
|
bb = BLOCK_FOR_INSN (before);
|
| 3819 |
|
|
|
| 3820 |
|
|
if (bb)
|
| 3821 |
|
|
{
|
| 3822 |
|
|
set_block_for_insn (insn, bb);
|
| 3823 |
|
|
if (INSN_P (insn))
|
| 3824 |
|
|
df_insn_rescan (insn);
|
| 3825 |
|
|
/* Should not happen as first in the BB is always either NOTE or
|
| 3826 |
|
|
LABEL. */
|
| 3827 |
|
|
gcc_assert (BB_HEAD (bb) != insn
|
| 3828 |
|
|
/* Avoid clobbering of structure when creating new BB. */
|
| 3829 |
|
|
|| BARRIER_P (insn)
|
| 3830 |
|
|
|| NOTE_INSN_BASIC_BLOCK_P (insn));
|
| 3831 |
|
|
}
|
| 3832 |
|
|
|
| 3833 |
|
|
PREV_INSN (before) = insn;
|
| 3834 |
|
|
if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
|
| 3835 |
|
|
PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
|
| 3836 |
|
|
}
|
| 3837 |
|
|
|
| 3838 |
|
|
|
| 3839 |
|
|
/* Replace insn with an deleted instruction note. */
|
| 3840 |
|
|
|
| 3841 |
|
|
void
|
| 3842 |
|
|
set_insn_deleted (rtx insn)
|
| 3843 |
|
|
{
|
| 3844 |
|
|
df_insn_delete (BLOCK_FOR_INSN (insn), INSN_UID (insn));
|
| 3845 |
|
|
PUT_CODE (insn, NOTE);
|
| 3846 |
|
|
NOTE_KIND (insn) = NOTE_INSN_DELETED;
|
| 3847 |
|
|
}
|
| 3848 |
|
|
|
| 3849 |
|
|
|
| 3850 |
|
|
/* Remove an insn from its doubly-linked list. This function knows how
|
| 3851 |
|
|
to handle sequences. */
|
| 3852 |
|
|
void
|
| 3853 |
|
|
remove_insn (rtx insn)
|
| 3854 |
|
|
{
|
| 3855 |
|
|
rtx next = NEXT_INSN (insn);
|
| 3856 |
|
|
rtx prev = PREV_INSN (insn);
|
| 3857 |
|
|
basic_block bb;
|
| 3858 |
|
|
|
| 3859 |
|
|
/* Later in the code, the block will be marked dirty. */
|
| 3860 |
|
|
df_insn_delete (NULL, INSN_UID (insn));
|
| 3861 |
|
|
|
| 3862 |
|
|
if (prev)
|
| 3863 |
|
|
{
|
| 3864 |
|
|
NEXT_INSN (prev) = next;
|
| 3865 |
|
|
if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
|
| 3866 |
|
|
{
|
| 3867 |
|
|
rtx sequence = PATTERN (prev);
|
| 3868 |
|
|
NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
|
| 3869 |
|
|
}
|
| 3870 |
|
|
}
|
| 3871 |
|
|
else if (first_insn == insn)
|
| 3872 |
|
|
first_insn = next;
|
| 3873 |
|
|
else
|
| 3874 |
|
|
{
|
| 3875 |
|
|
struct sequence_stack *stack = seq_stack;
|
| 3876 |
|
|
/* Scan all pending sequences too. */
|
| 3877 |
|
|
for (; stack; stack = stack->next)
|
| 3878 |
|
|
if (insn == stack->first)
|
| 3879 |
|
|
{
|
| 3880 |
|
|
stack->first = next;
|
| 3881 |
|
|
break;
|
| 3882 |
|
|
}
|
| 3883 |
|
|
|
| 3884 |
|
|
gcc_assert (stack);
|
| 3885 |
|
|
}
|
| 3886 |
|
|
|
| 3887 |
|
|
if (next)
|
| 3888 |
|
|
{
|
| 3889 |
|
|
PREV_INSN (next) = prev;
|
| 3890 |
|
|
if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
|
| 3891 |
|
|
PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
|
| 3892 |
|
|
}
|
| 3893 |
|
|
else if (last_insn == insn)
|
| 3894 |
|
|
last_insn = prev;
|
| 3895 |
|
|
else
|
| 3896 |
|
|
{
|
| 3897 |
|
|
struct sequence_stack *stack = seq_stack;
|
| 3898 |
|
|
/* Scan all pending sequences too. */
|
| 3899 |
|
|
for (; stack; stack = stack->next)
|
| 3900 |
|
|
if (insn == stack->last)
|
| 3901 |
|
|
{
|
| 3902 |
|
|
stack->last = prev;
|
| 3903 |
|
|
break;
|
| 3904 |
|
|
}
|
| 3905 |
|
|
|
| 3906 |
|
|
gcc_assert (stack);
|
| 3907 |
|
|
}
|
| 3908 |
|
|
if (!BARRIER_P (insn)
|
| 3909 |
|
|
&& (bb = BLOCK_FOR_INSN (insn)))
|
| 3910 |
|
|
{
|
| 3911 |
|
|
if (INSN_P (insn))
|
| 3912 |
|
|
df_set_bb_dirty (bb);
|
| 3913 |
|
|
if (BB_HEAD (bb) == insn)
|
| 3914 |
|
|
{
|
| 3915 |
|
|
/* Never ever delete the basic block note without deleting whole
|
| 3916 |
|
|
basic block. */
|
| 3917 |
|
|
gcc_assert (!NOTE_P (insn));
|
| 3918 |
|
|
BB_HEAD (bb) = next;
|
| 3919 |
|
|
}
|
| 3920 |
|
|
if (BB_END (bb) == insn)
|
| 3921 |
|
|
BB_END (bb) = prev;
|
| 3922 |
|
|
}
|
| 3923 |
|
|
}
|
| 3924 |
|
|
|
| 3925 |
|
|
/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
|
| 3926 |
|
|
|
| 3927 |
|
|
void
|
| 3928 |
|
|
add_function_usage_to (rtx call_insn, rtx call_fusage)
|
| 3929 |
|
|
{
|
| 3930 |
|
|
gcc_assert (call_insn && CALL_P (call_insn));
|
| 3931 |
|
|
|
| 3932 |
|
|
/* Put the register usage information on the CALL. If there is already
|
| 3933 |
|
|
some usage information, put ours at the end. */
|
| 3934 |
|
|
if (CALL_INSN_FUNCTION_USAGE (call_insn))
|
| 3935 |
|
|
{
|
| 3936 |
|
|
rtx link;
|
| 3937 |
|
|
|
| 3938 |
|
|
for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
|
| 3939 |
|
|
link = XEXP (link, 1))
|
| 3940 |
|
|
;
|
| 3941 |
|
|
|
| 3942 |
|
|
XEXP (link, 1) = call_fusage;
|
| 3943 |
|
|
}
|
| 3944 |
|
|
else
|
| 3945 |
|
|
CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
|
| 3946 |
|
|
}
|
| 3947 |
|
|
|
| 3948 |
|
|
/* Delete all insns made since FROM.
|
| 3949 |
|
|
FROM becomes the new last instruction. */
|
| 3950 |
|
|
|
| 3951 |
|
|
void
|
| 3952 |
|
|
delete_insns_since (rtx from)
|
| 3953 |
|
|
{
|
| 3954 |
|
|
if (from == 0)
|
| 3955 |
|
|
first_insn = 0;
|
| 3956 |
|
|
else
|
| 3957 |
|
|
NEXT_INSN (from) = 0;
|
| 3958 |
|
|
last_insn = from;
|
| 3959 |
|
|
}
|
| 3960 |
|
|
|
| 3961 |
|
|
/* This function is deprecated, please use sequences instead.
|
| 3962 |
|
|
|
| 3963 |
|
|
Move a consecutive bunch of insns to a different place in the chain.
|
| 3964 |
|
|
The insns to be moved are those between FROM and TO.
|
| 3965 |
|
|
They are moved to a new position after the insn AFTER.
|
| 3966 |
|
|
AFTER must not be FROM or TO or any insn in between.
|
| 3967 |
|
|
|
| 3968 |
|
|
This function does not know about SEQUENCEs and hence should not be
|
| 3969 |
|
|
called after delay-slot filling has been done. */
|
| 3970 |
|
|
|
| 3971 |
|
|
void
|
| 3972 |
|
|
reorder_insns_nobb (rtx from, rtx to, rtx after)
|
| 3973 |
|
|
{
|
| 3974 |
|
|
/* Splice this bunch out of where it is now. */
|
| 3975 |
|
|
if (PREV_INSN (from))
|
| 3976 |
|
|
NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
|
| 3977 |
|
|
if (NEXT_INSN (to))
|
| 3978 |
|
|
PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
|
| 3979 |
|
|
if (last_insn == to)
|
| 3980 |
|
|
last_insn = PREV_INSN (from);
|
| 3981 |
|
|
if (first_insn == from)
|
| 3982 |
|
|
first_insn = NEXT_INSN (to);
|
| 3983 |
|
|
|
| 3984 |
|
|
/* Make the new neighbors point to it and it to them. */
|
| 3985 |
|
|
if (NEXT_INSN (after))
|
| 3986 |
|
|
PREV_INSN (NEXT_INSN (after)) = to;
|
| 3987 |
|
|
|
| 3988 |
|
|
NEXT_INSN (to) = NEXT_INSN (after);
|
| 3989 |
|
|
PREV_INSN (from) = after;
|
| 3990 |
|
|
NEXT_INSN (after) = from;
|
| 3991 |
|
|
if (after == last_insn)
|
| 3992 |
|
|
last_insn = to;
|
| 3993 |
|
|
}
|
| 3994 |
|
|
|
| 3995 |
|
|
/* Same as function above, but take care to update BB boundaries. */
|
| 3996 |
|
|
void
|
| 3997 |
|
|
reorder_insns (rtx from, rtx to, rtx after)
|
| 3998 |
|
|
{
|
| 3999 |
|
|
rtx prev = PREV_INSN (from);
|
| 4000 |
|
|
basic_block bb, bb2;
|
| 4001 |
|
|
|
| 4002 |
|
|
reorder_insns_nobb (from, to, after);
|
| 4003 |
|
|
|
| 4004 |
|
|
if (!BARRIER_P (after)
|
| 4005 |
|
|
&& (bb = BLOCK_FOR_INSN (after)))
|
| 4006 |
|
|
{
|
| 4007 |
|
|
rtx x;
|
| 4008 |
|
|
df_set_bb_dirty (bb);
|
| 4009 |
|
|
|
| 4010 |
|
|
if (!BARRIER_P (from)
|
| 4011 |
|
|
&& (bb2 = BLOCK_FOR_INSN (from)))
|
| 4012 |
|
|
{
|
| 4013 |
|
|
if (BB_END (bb2) == to)
|
| 4014 |
|
|
BB_END (bb2) = prev;
|
| 4015 |
|
|
df_set_bb_dirty (bb2);
|
| 4016 |
|
|
}
|
| 4017 |
|
|
|
| 4018 |
|
|
if (BB_END (bb) == after)
|
| 4019 |
|
|
BB_END (bb) = to;
|
| 4020 |
|
|
|
| 4021 |
|
|
for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
|
| 4022 |
|
|
if (!BARRIER_P (x))
|
| 4023 |
|
|
df_insn_change_bb (x, bb);
|
| 4024 |
|
|
}
|
| 4025 |
|
|
}
|
| 4026 |
|
|
|
| 4027 |
|
|
|
| 4028 |
|
|
/* Emit insn(s) of given code and pattern
|
| 4029 |
|
|
at a specified place within the doubly-linked list.
|
| 4030 |
|
|
|
| 4031 |
|
|
All of the emit_foo global entry points accept an object
|
| 4032 |
|
|
X which is either an insn list or a PATTERN of a single
|
| 4033 |
|
|
instruction.
|
| 4034 |
|
|
|
| 4035 |
|
|
There are thus a few canonical ways to generate code and
|
| 4036 |
|
|
emit it at a specific place in the instruction stream. For
|
| 4037 |
|
|
example, consider the instruction named SPOT and the fact that
|
| 4038 |
|
|
we would like to emit some instructions before SPOT. We might
|
| 4039 |
|
|
do it like this:
|
| 4040 |
|
|
|
| 4041 |
|
|
start_sequence ();
|
| 4042 |
|
|
... emit the new instructions ...
|
| 4043 |
|
|
insns_head = get_insns ();
|
| 4044 |
|
|
end_sequence ();
|
| 4045 |
|
|
|
| 4046 |
|
|
emit_insn_before (insns_head, SPOT);
|
| 4047 |
|
|
|
| 4048 |
|
|
It used to be common to generate SEQUENCE rtl instead, but that
|
| 4049 |
|
|
is a relic of the past which no longer occurs. The reason is that
|
| 4050 |
|
|
SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
|
| 4051 |
|
|
generated would almost certainly die right after it was created. */
|
| 4052 |
|
|
|
| 4053 |
|
|
/* Make X be output before the instruction BEFORE. */
|
| 4054 |
|
|
|
| 4055 |
|
|
rtx
|
| 4056 |
|
|
emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
|
| 4057 |
|
|
{
|
| 4058 |
|
|
rtx last = before;
|
| 4059 |
|
|
rtx insn;
|
| 4060 |
|
|
|
| 4061 |
|
|
gcc_assert (before);
|
| 4062 |
|
|
|
| 4063 |
|
|
if (x == NULL_RTX)
|
| 4064 |
|
|
return last;
|
| 4065 |
|
|
|
| 4066 |
|
|
switch (GET_CODE (x))
|
| 4067 |
|
|
{
|
| 4068 |
|
|
case DEBUG_INSN:
|
| 4069 |
|
|
case INSN:
|
| 4070 |
|
|
case JUMP_INSN:
|
| 4071 |
|
|
case CALL_INSN:
|
| 4072 |
|
|
case CODE_LABEL:
|
| 4073 |
|
|
case BARRIER:
|
| 4074 |
|
|
case NOTE:
|
| 4075 |
|
|
insn = x;
|
| 4076 |
|
|
while (insn)
|
| 4077 |
|
|
{
|
| 4078 |
|
|
rtx next = NEXT_INSN (insn);
|
| 4079 |
|
|
add_insn_before (insn, before, bb);
|
| 4080 |
|
|
last = insn;
|
| 4081 |
|
|
insn = next;
|
| 4082 |
|
|
}
|
| 4083 |
|
|
break;
|
| 4084 |
|
|
|
| 4085 |
|
|
#ifdef ENABLE_RTL_CHECKING
|
| 4086 |
|
|
case SEQUENCE:
|
| 4087 |
|
|
gcc_unreachable ();
|
| 4088 |
|
|
break;
|
| 4089 |
|
|
#endif
|
| 4090 |
|
|
|
| 4091 |
|
|
default:
|
| 4092 |
|
|
last = make_insn_raw (x);
|
| 4093 |
|
|
add_insn_before (last, before, bb);
|
| 4094 |
|
|
break;
|
| 4095 |
|
|
}
|
| 4096 |
|
|
|
| 4097 |
|
|
return last;
|
| 4098 |
|
|
}
|
| 4099 |
|
|
|
| 4100 |
|
|
/* Make an instruction with body X and code JUMP_INSN
|
| 4101 |
|
|
and output it before the instruction BEFORE. */
|
| 4102 |
|
|
|
| 4103 |
|
|
rtx
|
| 4104 |
|
|
emit_jump_insn_before_noloc (rtx x, rtx before)
|
| 4105 |
|
|
{
|
| 4106 |
|
|
rtx insn, last = NULL_RTX;
|
| 4107 |
|
|
|
| 4108 |
|
|
gcc_assert (before);
|
| 4109 |
|
|
|
| 4110 |
|
|
switch (GET_CODE (x))
|
| 4111 |
|
|
{
|
| 4112 |
|
|
case DEBUG_INSN:
|
| 4113 |
|
|
case INSN:
|
| 4114 |
|
|
case JUMP_INSN:
|
| 4115 |
|
|
case CALL_INSN:
|
| 4116 |
|
|
case CODE_LABEL:
|
| 4117 |
|
|
case BARRIER:
|
| 4118 |
|
|
case NOTE:
|
| 4119 |
|
|
insn = x;
|
| 4120 |
|
|
while (insn)
|
| 4121 |
|
|
{
|
| 4122 |
|
|
rtx next = NEXT_INSN (insn);
|
| 4123 |
|
|
add_insn_before (insn, before, NULL);
|
| 4124 |
|
|
last = insn;
|
| 4125 |
|
|
insn = next;
|
| 4126 |
|
|
}
|
| 4127 |
|
|
break;
|
| 4128 |
|
|
|
| 4129 |
|
|
#ifdef ENABLE_RTL_CHECKING
|
| 4130 |
|
|
case SEQUENCE:
|
| 4131 |
|
|
gcc_unreachable ();
|
| 4132 |
|
|
break;
|
| 4133 |
|
|
#endif
|
| 4134 |
|
|
|
| 4135 |
|
|
default:
|
| 4136 |
|
|
last = make_jump_insn_raw (x);
|
| 4137 |
|
|
add_insn_before (last, before, NULL);
|
| 4138 |
|
|
break;
|
| 4139 |
|
|
}
|
| 4140 |
|
|
|
| 4141 |
|
|
return last;
|
| 4142 |
|
|
}
|
| 4143 |
|
|
|
| 4144 |
|
|
/* Make an instruction with body X and code CALL_INSN
|
| 4145 |
|
|
and output it before the instruction BEFORE. */
|
| 4146 |
|
|
|
| 4147 |
|
|
rtx
|
| 4148 |
|
|
emit_call_insn_before_noloc (rtx x, rtx before)
|
| 4149 |
|
|
{
|
| 4150 |
|
|
rtx last = NULL_RTX, insn;
|
| 4151 |
|
|
|
| 4152 |
|
|
gcc_assert (before);
|
| 4153 |
|
|
|
| 4154 |
|
|
switch (GET_CODE (x))
|
| 4155 |
|
|
{
|
| 4156 |
|
|
case DEBUG_INSN:
|
| 4157 |
|
|
case INSN:
|
| 4158 |
|
|
case JUMP_INSN:
|
| 4159 |
|
|
case CALL_INSN:
|
| 4160 |
|
|
case CODE_LABEL:
|
| 4161 |
|
|
case BARRIER:
|
| 4162 |
|
|
case NOTE:
|
| 4163 |
|
|
insn = x;
|
| 4164 |
|
|
while (insn)
|
| 4165 |
|
|
{
|
| 4166 |
|
|
rtx next = NEXT_INSN (insn);
|
| 4167 |
|
|
add_insn_before (insn, before, NULL);
|
| 4168 |
|
|
last = insn;
|
| 4169 |
|
|
insn = next;
|
| 4170 |
|
|
}
|
| 4171 |
|
|
break;
|
| 4172 |
|
|
|
| 4173 |
|
|
#ifdef ENABLE_RTL_CHECKING
|
| 4174 |
|
|
case SEQUENCE:
|
| 4175 |
|
|
gcc_unreachable ();
|
| 4176 |
|
|
break;
|
| 4177 |
|
|
#endif
|
| 4178 |
|
|
|
| 4179 |
|
|
default:
|
| 4180 |
|
|
last = make_call_insn_raw (x);
|
| 4181 |
|
|
add_insn_before (last, before, NULL);
|
| 4182 |
|
|
break;
|
| 4183 |
|
|
}
|
| 4184 |
|
|
|
| 4185 |
|
|
return last;
|
| 4186 |
|
|
}
|
| 4187 |
|
|
|
| 4188 |
|
|
/* Make an instruction with body X and code DEBUG_INSN
|
| 4189 |
|
|
and output it before the instruction BEFORE. */
|
| 4190 |
|
|
|
| 4191 |
|
|
rtx
|
| 4192 |
|
|
emit_debug_insn_before_noloc (rtx x, rtx before)
|
| 4193 |
|
|
{
|
| 4194 |
|
|
rtx last = NULL_RTX, insn;
|
| 4195 |
|
|
|
| 4196 |
|
|
gcc_assert (before);
|
| 4197 |
|
|
|
| 4198 |
|
|
switch (GET_CODE (x))
|
| 4199 |
|
|
{
|
| 4200 |
|
|
case DEBUG_INSN:
|
| 4201 |
|
|
case INSN:
|
| 4202 |
|
|
case JUMP_INSN:
|
| 4203 |
|
|
case CALL_INSN:
|
| 4204 |
|
|
case CODE_LABEL:
|
| 4205 |
|
|
case BARRIER:
|
| 4206 |
|
|
case NOTE:
|
| 4207 |
|
|
insn = x;
|
| 4208 |
|
|
while (insn)
|
| 4209 |
|
|
{
|
| 4210 |
|
|
rtx next = NEXT_INSN (insn);
|
| 4211 |
|
|
add_insn_before (insn, before, NULL);
|
| 4212 |
|
|
last = insn;
|
| 4213 |
|
|
insn = next;
|
| 4214 |
|
|
}
|
| 4215 |
|
|
break;
|
| 4216 |
|
|
|
| 4217 |
|
|
#ifdef ENABLE_RTL_CHECKING
|
| 4218 |
|
|
case SEQUENCE:
|
| 4219 |
|
|
gcc_unreachable ();
|
| 4220 |
|
|
break;
|
| 4221 |
|
|
#endif
|
| 4222 |
|
|
|
| 4223 |
|
|
default:
|
| 4224 |
|
|
last = make_debug_insn_raw (x);
|
| 4225 |
|
|
add_insn_before (last, before, NULL);
|
| 4226 |
|
|
break;
|
| 4227 |
|
|
}
|
| 4228 |
|
|
|
| 4229 |
|
|
return last;
|
| 4230 |
|
|
}
|
| 4231 |
|
|
|
| 4232 |
|
|
/* Make an insn of code BARRIER
|
| 4233 |
|
|
and output it before the insn BEFORE. */
|
| 4234 |
|
|
|
| 4235 |
|
|
rtx
|
| 4236 |
|
|
emit_barrier_before (rtx before)
|
| 4237 |
|
|
{
|
| 4238 |
|
|
rtx insn = rtx_alloc (BARRIER);
|
| 4239 |
|
|
|
| 4240 |
|
|
INSN_UID (insn) = cur_insn_uid++;
|
| 4241 |
|
|
|
| 4242 |
|
|
add_insn_before (insn, before, NULL);
|
| 4243 |
|
|
return insn;
|
| 4244 |
|
|
}
|
| 4245 |
|
|
|
| 4246 |
|
|
/* Emit the label LABEL before the insn BEFORE. */
|
| 4247 |
|
|
|
| 4248 |
|
|
rtx
|
| 4249 |
|
|
emit_label_before (rtx label, rtx before)
|
| 4250 |
|
|
{
|
| 4251 |
|
|
/* This can be called twice for the same label as a result of the
|
| 4252 |
|
|
confusion that follows a syntax error! So make it harmless. */
|
| 4253 |
|
|
if (INSN_UID (label) == 0)
|
| 4254 |
|
|
{
|
| 4255 |
|
|
INSN_UID (label) = cur_insn_uid++;
|
| 4256 |
|
|
add_insn_before (label, before, NULL);
|
| 4257 |
|
|
}
|
| 4258 |
|
|
|
| 4259 |
|
|
return label;
|
| 4260 |
|
|
}
|
| 4261 |
|
|
|
| 4262 |
|
|
/* Emit a note of subtype SUBTYPE before the insn BEFORE. */
|
| 4263 |
|
|
|
| 4264 |
|
|
rtx
|
| 4265 |
|
|
emit_note_before (enum insn_note subtype, rtx before)
|
| 4266 |
|
|
{
|
| 4267 |
|
|
rtx note = rtx_alloc (NOTE);
|
| 4268 |
|
|
INSN_UID (note) = cur_insn_uid++;
|
| 4269 |
|
|
NOTE_KIND (note) = subtype;
|
| 4270 |
|
|
BLOCK_FOR_INSN (note) = NULL;
|
| 4271 |
|
|
memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
|
| 4272 |
|
|
|
| 4273 |
|
|
add_insn_before (note, before, NULL);
|
| 4274 |
|
|
return note;
|
| 4275 |
|
|
}
|
| 4276 |
|
|
|
| 4277 |
|
|
/* Helper for emit_insn_after, handles lists of instructions
|
| 4278 |
|
|
efficiently. */
|
| 4279 |
|
|
|
| 4280 |
|
|
static rtx
|
| 4281 |
|
|
emit_insn_after_1 (rtx first, rtx after, basic_block bb)
|
| 4282 |
|
|
{
|
| 4283 |
|
|
rtx last;
|
| 4284 |
|
|
rtx after_after;
|
| 4285 |
|
|
if (!bb && !BARRIER_P (after))
|
| 4286 |
|
|
bb = BLOCK_FOR_INSN (after);
|
| 4287 |
|
|
|
| 4288 |
|
|
if (bb)
|
| 4289 |
|
|
{
|
| 4290 |
|
|
df_set_bb_dirty (bb);
|
| 4291 |
|
|
for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
|
| 4292 |
|
|
if (!BARRIER_P (last))
|
| 4293 |
|
|
{
|
| 4294 |
|
|
set_block_for_insn (last, bb);
|
| 4295 |
|
|
df_insn_rescan (last);
|
| 4296 |
|
|
}
|
| 4297 |
|
|
if (!BARRIER_P (last))
|
| 4298 |
|
|
{
|
| 4299 |
|
|
set_block_for_insn (last, bb);
|
| 4300 |
|
|
df_insn_rescan (last);
|
| 4301 |
|
|
}
|
| 4302 |
|
|
if (BB_END (bb) == after)
|
| 4303 |
|
|
BB_END (bb) = last;
|
| 4304 |
|
|
}
|
| 4305 |
|
|
else
|
| 4306 |
|
|
for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
|
| 4307 |
|
|
continue;
|
| 4308 |
|
|
|
| 4309 |
|
|
after_after = NEXT_INSN (after);
|
| 4310 |
|
|
|
| 4311 |
|
|
NEXT_INSN (after) = first;
|
| 4312 |
|
|
PREV_INSN (first) = after;
|
| 4313 |
|
|
NEXT_INSN (last) = after_after;
|
| 4314 |
|
|
if (after_after)
|
| 4315 |
|
|
PREV_INSN (after_after) = last;
|
| 4316 |
|
|
|
| 4317 |
|
|
if (after == last_insn)
|
| 4318 |
|
|
last_insn = last;
|
| 4319 |
|
|
|
| 4320 |
|
|
return last;
|
| 4321 |
|
|
}
|
| 4322 |
|
|
|
| 4323 |
|
|
/* Make X be output after the insn AFTER and set the BB of insn. If
|
| 4324 |
|
|
BB is NULL, an attempt is made to infer the BB from AFTER. */
|
| 4325 |
|
|
|
| 4326 |
|
|
rtx
|
| 4327 |
|
|
emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
|
| 4328 |
|
|
{
|
| 4329 |
|
|
rtx last = after;
|
| 4330 |
|
|
|
| 4331 |
|
|
gcc_assert (after);
|
| 4332 |
|
|
|
| 4333 |
|
|
if (x == NULL_RTX)
|
| 4334 |
|
|
return last;
|
| 4335 |
|
|
|
| 4336 |
|
|
switch (GET_CODE (x))
|
| 4337 |
|
|
{
|
| 4338 |
|
|
case DEBUG_INSN:
|
| 4339 |
|
|
case INSN:
|
| 4340 |
|
|
case JUMP_INSN:
|
| 4341 |
|
|
case CALL_INSN:
|
| 4342 |
|
|
case CODE_LABEL:
|
| 4343 |
|
|
case BARRIER:
|
| 4344 |
|
|
case NOTE:
|
| 4345 |
|
|
last = emit_insn_after_1 (x, after, bb);
|
| 4346 |
|
|
break;
|
| 4347 |
|
|
|
| 4348 |
|
|
#ifdef ENABLE_RTL_CHECKING
|
| 4349 |
|
|
case SEQUENCE:
|
| 4350 |
|
|
gcc_unreachable ();
|
| 4351 |
|
|
break;
|
| 4352 |
|
|
#endif
|
| 4353 |
|
|
|
| 4354 |
|
|
default:
|
| 4355 |
|
|
last = make_insn_raw (x);
|
| 4356 |
|
|
add_insn_after (last, after, bb);
|
| 4357 |
|
|
break;
|
| 4358 |
|
|
}
|
| 4359 |
|
|
|
| 4360 |
|
|
return last;
|
| 4361 |
|
|
}
|
| 4362 |
|
|
|
| 4363 |
|
|
|
| 4364 |
|
|
/* Make an insn of code JUMP_INSN with body X
|
| 4365 |
|
|
and output it after the insn AFTER. */
|
| 4366 |
|
|
|
| 4367 |
|
|
rtx
|
| 4368 |
|
|
emit_jump_insn_after_noloc (rtx x, rtx after)
|
| 4369 |
|
|
{
|
| 4370 |
|
|
rtx last;
|
| 4371 |
|
|
|
| 4372 |
|
|
gcc_assert (after);
|
| 4373 |
|
|
|
| 4374 |
|
|
switch (GET_CODE (x))
|
| 4375 |
|
|
{
|
| 4376 |
|
|
case DEBUG_INSN:
|
| 4377 |
|
|
case INSN:
|
| 4378 |
|
|
case JUMP_INSN:
|
| 4379 |
|
|
case CALL_INSN:
|
| 4380 |
|
|
case CODE_LABEL:
|
| 4381 |
|
|
case BARRIER:
|
| 4382 |
|
|
case NOTE:
|
| 4383 |
|
|
last = emit_insn_after_1 (x, after, NULL);
|
| 4384 |
|
|
break;
|
| 4385 |
|
|
|
| 4386 |
|
|
#ifdef ENABLE_RTL_CHECKING
|
| 4387 |
|
|
case SEQUENCE:
|
| 4388 |
|
|
gcc_unreachable ();
|
| 4389 |
|
|
break;
|
| 4390 |
|
|
#endif
|
| 4391 |
|
|
|
| 4392 |
|
|
default:
|
| 4393 |
|
|
last = make_jump_insn_raw (x);
|
| 4394 |
|
|
add_insn_after (last, after, NULL);
|
| 4395 |
|
|
break;
|
| 4396 |
|
|
}
|
| 4397 |
|
|
|
| 4398 |
|
|
return last;
|
| 4399 |
|
|
}
|
| 4400 |
|
|
|
| 4401 |
|
|
/* Make an instruction with body X and code CALL_INSN
|
| 4402 |
|
|
and output it after the instruction AFTER. */
|
| 4403 |
|
|
|
| 4404 |
|
|
rtx
|
| 4405 |
|
|
emit_call_insn_after_noloc (rtx x, rtx after)
|
| 4406 |
|
|
{
|
| 4407 |
|
|
rtx last;
|
| 4408 |
|
|
|
| 4409 |
|
|
gcc_assert (after);
|
| 4410 |
|
|
|
| 4411 |
|
|
switch (GET_CODE (x))
|
| 4412 |
|
|
{
|
| 4413 |
|
|
case DEBUG_INSN:
|
| 4414 |
|
|
case INSN:
|
| 4415 |
|
|
case JUMP_INSN:
|
| 4416 |
|
|
case CALL_INSN:
|
| 4417 |
|
|
case CODE_LABEL:
|
| 4418 |
|
|
case BARRIER:
|
| 4419 |
|
|
case NOTE:
|
| 4420 |
|
|
last = emit_insn_after_1 (x, after, NULL);
|
| 4421 |
|
|
break;
|
| 4422 |
|
|
|
| 4423 |
|
|
#ifdef ENABLE_RTL_CHECKING
|
| 4424 |
|
|
case SEQUENCE:
|
| 4425 |
|
|
gcc_unreachable ();
|
| 4426 |
|
|
break;
|
| 4427 |
|
|
#endif
|
| 4428 |
|
|
|
| 4429 |
|
|
default:
|
| 4430 |
|
|
last = make_call_insn_raw (x);
|
| 4431 |
|
|
add_insn_after (last, after, NULL);
|
| 4432 |
|
|
break;
|
| 4433 |
|
|
}
|
| 4434 |
|
|
|
| 4435 |
|
|
return last;
|
| 4436 |
|
|
}
|
| 4437 |
|
|
|
| 4438 |
|
|
/* Make an instruction with body X and code CALL_INSN
|
| 4439 |
|
|
and output it after the instruction AFTER. */
|
| 4440 |
|
|
|
| 4441 |
|
|
rtx
|
| 4442 |
|
|
emit_debug_insn_after_noloc (rtx x, rtx after)
|
| 4443 |
|
|
{
|
| 4444 |
|
|
rtx last;
|
| 4445 |
|
|
|
| 4446 |
|
|
gcc_assert (after);
|
| 4447 |
|
|
|
| 4448 |
|
|
switch (GET_CODE (x))
|
| 4449 |
|
|
{
|
| 4450 |
|
|
case DEBUG_INSN:
|
| 4451 |
|
|
case INSN:
|
| 4452 |
|
|
case JUMP_INSN:
|
| 4453 |
|
|
case CALL_INSN:
|
| 4454 |
|
|
case CODE_LABEL:
|
| 4455 |
|
|
case BARRIER:
|
| 4456 |
|
|
case NOTE:
|
| 4457 |
|
|
last = emit_insn_after_1 (x, after, NULL);
|
| 4458 |
|
|
break;
|
| 4459 |
|
|
|
| 4460 |
|
|
#ifdef ENABLE_RTL_CHECKING
|
| 4461 |
|
|
case SEQUENCE:
|
| 4462 |
|
|
gcc_unreachable ();
|
| 4463 |
|
|
break;
|
| 4464 |
|
|
#endif
|
| 4465 |
|
|
|
| 4466 |
|
|
default:
|
| 4467 |
|
|
last = make_debug_insn_raw (x);
|
| 4468 |
|
|
add_insn_after (last, after, NULL);
|
| 4469 |
|
|
break;
|
| 4470 |
|
|
}
|
| 4471 |
|
|
|
| 4472 |
|
|
return last;
|
| 4473 |
|
|
}
|
| 4474 |
|
|
|
| 4475 |
|
|
/* Make an insn of code BARRIER
|
| 4476 |
|
|
and output it after the insn AFTER. */
|
| 4477 |
|
|
|
| 4478 |
|
|
rtx
|
| 4479 |
|
|
emit_barrier_after (rtx after)
|
| 4480 |
|
|
{
|
| 4481 |
|
|
rtx insn = rtx_alloc (BARRIER);
|
| 4482 |
|
|
|
| 4483 |
|
|
INSN_UID (insn) = cur_insn_uid++;
|
| 4484 |
|
|
|
| 4485 |
|
|
add_insn_after (insn, after, NULL);
|
| 4486 |
|
|
return insn;
|
| 4487 |
|
|
}
|
| 4488 |
|
|
|
| 4489 |
|
|
/* Emit the label LABEL after the insn AFTER. */
|
| 4490 |
|
|
|
| 4491 |
|
|
rtx
|
| 4492 |
|
|
emit_label_after (rtx label, rtx after)
|
| 4493 |
|
|
{
|
| 4494 |
|
|
/* This can be called twice for the same label
|
| 4495 |
|
|
as a result of the confusion that follows a syntax error!
|
| 4496 |
|
|
So make it harmless. */
|
| 4497 |
|
|
if (INSN_UID (label) == 0)
|
| 4498 |
|
|
{
|
| 4499 |
|
|
INSN_UID (label) = cur_insn_uid++;
|
| 4500 |
|
|
add_insn_after (label, after, NULL);
|
| 4501 |
|
|
}
|
| 4502 |
|
|
|
| 4503 |
|
|
return label;
|
| 4504 |
|
|
}
|
| 4505 |
|
|
|
| 4506 |
|
|
/* Emit a note of subtype SUBTYPE after the insn AFTER. */
|
| 4507 |
|
|
|
| 4508 |
|
|
rtx
|
| 4509 |
|
|
emit_note_after (enum insn_note subtype, rtx after)
|
| 4510 |
|
|
{
|
| 4511 |
|
|
rtx note = rtx_alloc (NOTE);
|
| 4512 |
|
|
INSN_UID (note) = cur_insn_uid++;
|
| 4513 |
|
|
NOTE_KIND (note) = subtype;
|
| 4514 |
|
|
BLOCK_FOR_INSN (note) = NULL;
|
| 4515 |
|
|
memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
|
| 4516 |
|
|
add_insn_after (note, after, NULL);
|
| 4517 |
|
|
return note;
|
| 4518 |
|
|
}
|
| 4519 |
|
|
|
| 4520 |
|
|
/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
|
| 4521 |
|
|
rtx
|
| 4522 |
|
|
emit_insn_after_setloc (rtx pattern, rtx after, int loc)
|
| 4523 |
|
|
{
|
| 4524 |
|
|
rtx last = emit_insn_after_noloc (pattern, after, NULL);
|
| 4525 |
|
|
|
| 4526 |
|
|
if (pattern == NULL_RTX || !loc)
|
| 4527 |
|
|
return last;
|
| 4528 |
|
|
|
| 4529 |
|
|
after = NEXT_INSN (after);
|
| 4530 |
|
|
while (1)
|
| 4531 |
|
|
{
|
| 4532 |
|
|
if (active_insn_p (after) && !INSN_LOCATOR (after))
|
| 4533 |
|
|
INSN_LOCATOR (after) = loc;
|
| 4534 |
|
|
if (after == last)
|
| 4535 |
|
|
break;
|
| 4536 |
|
|
after = NEXT_INSN (after);
|
| 4537 |
|
|
}
|
| 4538 |
|
|
return last;
|
| 4539 |
|
|
}
|
| 4540 |
|
|
|
| 4541 |
|
|
/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
|
| 4542 |
|
|
rtx
|
| 4543 |
|
|
emit_insn_after (rtx pattern, rtx after)
|
| 4544 |
|
|
{
|
| 4545 |
|
|
rtx prev = after;
|
| 4546 |
|
|
|
| 4547 |
|
|
while (DEBUG_INSN_P (prev))
|
| 4548 |
|
|
prev = PREV_INSN (prev);
|
| 4549 |
|
|
|
| 4550 |
|
|
if (INSN_P (prev))
|
| 4551 |
|
|
return emit_insn_after_setloc (pattern, after, INSN_LOCATOR (prev));
|
| 4552 |
|
|
else
|
| 4553 |
|
|
return emit_insn_after_noloc (pattern, after, NULL);
|
| 4554 |
|
|
}
|
| 4555 |
|
|
|
| 4556 |
|
|
/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
|
| 4557 |
|
|
rtx
|
| 4558 |
|
|
emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
|
| 4559 |
|
|
{
|
| 4560 |
|
|
rtx last = emit_jump_insn_after_noloc (pattern, after);
|
| 4561 |
|
|
|
| 4562 |
|
|
if (pattern == NULL_RTX || !loc)
|
| 4563 |
|
|
return last;
|
| 4564 |
|
|
|
| 4565 |
|
|
after = NEXT_INSN (after);
|
| 4566 |
|
|
while (1)
|
| 4567 |
|
|
{
|
| 4568 |
|
|
if (active_insn_p (after) && !INSN_LOCATOR (after))
|
| 4569 |
|
|
INSN_LOCATOR (after) = loc;
|
| 4570 |
|
|
if (after == last)
|
| 4571 |
|
|
break;
|
| 4572 |
|
|
after = NEXT_INSN (after);
|
| 4573 |
|
|
}
|
| 4574 |
|
|
return last;
|
| 4575 |
|
|
}
|
| 4576 |
|
|
|
| 4577 |
|
|
/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
|
| 4578 |
|
|
rtx
|
| 4579 |
|
|
emit_jump_insn_after (rtx pattern, rtx after)
|
| 4580 |
|
|
{
|
| 4581 |
|
|
rtx prev = after;
|
| 4582 |
|
|
|
| 4583 |
|
|
while (DEBUG_INSN_P (prev))
|
| 4584 |
|
|
prev = PREV_INSN (prev);
|
| 4585 |
|
|
|
| 4586 |
|
|
if (INSN_P (prev))
|
| 4587 |
|
|
return emit_jump_insn_after_setloc (pattern, after, INSN_LOCATOR (prev));
|
| 4588 |
|
|
else
|
| 4589 |
|
|
return emit_jump_insn_after_noloc (pattern, after);
|
| 4590 |
|
|
}
|
| 4591 |
|
|
|
| 4592 |
|
|
/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
|
| 4593 |
|
|
rtx
|
| 4594 |
|
|
emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
|
| 4595 |
|
|
{
|
| 4596 |
|
|
rtx last = emit_call_insn_after_noloc (pattern, after);
|
| 4597 |
|
|
|
| 4598 |
|
|
if (pattern == NULL_RTX || !loc)
|
| 4599 |
|
|
return last;
|
| 4600 |
|
|
|
| 4601 |
|
|
after = NEXT_INSN (after);
|
| 4602 |
|
|
while (1)
|
| 4603 |
|
|
{
|
| 4604 |
|
|
if (active_insn_p (after) && !INSN_LOCATOR (after))
|
| 4605 |
|
|
INSN_LOCATOR (after) = loc;
|
| 4606 |
|
|
if (after == last)
|
| 4607 |
|
|
break;
|
| 4608 |
|
|
after = NEXT_INSN (after);
|
| 4609 |
|
|
}
|
| 4610 |
|
|
return last;
|
| 4611 |
|
|
}
|
| 4612 |
|
|
|
| 4613 |
|
|
/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
|
| 4614 |
|
|
rtx
|
| 4615 |
|
|
emit_call_insn_after (rtx pattern, rtx after)
|
| 4616 |
|
|
{
|
| 4617 |
|
|
rtx prev = after;
|
| 4618 |
|
|
|
| 4619 |
|
|
while (DEBUG_INSN_P (prev))
|
| 4620 |
|
|
prev = PREV_INSN (prev);
|
| 4621 |
|
|
|
| 4622 |
|
|
if (INSN_P (prev))
|
| 4623 |
|
|
return emit_call_insn_after_setloc (pattern, after, INSN_LOCATOR (prev));
|
| 4624 |
|
|
else
|
| 4625 |
|
|
return emit_call_insn_after_noloc (pattern, after);
|
| 4626 |
|
|
}
|
| 4627 |
|
|
|
| 4628 |
|
|
/* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
|
| 4629 |
|
|
rtx
|
| 4630 |
|
|
emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
|
| 4631 |
|
|
{
|
| 4632 |
|
|
rtx last = emit_debug_insn_after_noloc (pattern, after);
|
| 4633 |
|
|
|
| 4634 |
|
|
if (pattern == NULL_RTX || !loc)
|
| 4635 |
|
|
return last;
|
| 4636 |
|
|
|
| 4637 |
|
|
after = NEXT_INSN (after);
|
| 4638 |
|
|
while (1)
|
| 4639 |
|
|
{
|
| 4640 |
|
|
if (active_insn_p (after) && !INSN_LOCATOR (after))
|
| 4641 |
|
|
INSN_LOCATOR (after) = loc;
|
| 4642 |
|
|
if (after == last)
|
| 4643 |
|
|
break;
|
| 4644 |
|
|
after = NEXT_INSN (after);
|
| 4645 |
|
|
}
|
| 4646 |
|
|
return last;
|
| 4647 |
|
|
}
|
| 4648 |
|
|
|
| 4649 |
|
|
/* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
|
| 4650 |
|
|
rtx
|
| 4651 |
|
|
emit_debug_insn_after (rtx pattern, rtx after)
|
| 4652 |
|
|
{
|
| 4653 |
|
|
if (INSN_P (after))
|
| 4654 |
|
|
return emit_debug_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
|
| 4655 |
|
|
else
|
| 4656 |
|
|
return emit_debug_insn_after_noloc (pattern, after);
|
| 4657 |
|
|
}
|
| 4658 |
|
|
|
| 4659 |
|
|
/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to SCOPE. */
|
| 4660 |
|
|
rtx
|
| 4661 |
|
|
emit_insn_before_setloc (rtx pattern, rtx before, int loc)
|
| 4662 |
|
|
{
|
| 4663 |
|
|
rtx first = PREV_INSN (before);
|
| 4664 |
|
|
rtx last = emit_insn_before_noloc (pattern, before, NULL);
|
| 4665 |
|
|
|
| 4666 |
|
|
if (pattern == NULL_RTX || !loc)
|
| 4667 |
|
|
return last;
|
| 4668 |
|
|
|
| 4669 |
|
|
if (!first)
|
| 4670 |
|
|
first = get_insns ();
|
| 4671 |
|
|
else
|
| 4672 |
|
|
first = NEXT_INSN (first);
|
| 4673 |
|
|
while (1)
|
| 4674 |
|
|
{
|
| 4675 |
|
|
if (active_insn_p (first) && !INSN_LOCATOR (first))
|
| 4676 |
|
|
INSN_LOCATOR (first) = loc;
|
| 4677 |
|
|
if (first == last)
|
| 4678 |
|
|
break;
|
| 4679 |
|
|
first = NEXT_INSN (first);
|
| 4680 |
|
|
}
|
| 4681 |
|
|
return last;
|
| 4682 |
|
|
}
|
| 4683 |
|
|
|
| 4684 |
|
|
/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
|
| 4685 |
|
|
rtx
|
| 4686 |
|
|
emit_insn_before (rtx pattern, rtx before)
|
| 4687 |
|
|
{
|
| 4688 |
|
|
rtx next = before;
|
| 4689 |
|
|
|
| 4690 |
|
|
while (DEBUG_INSN_P (next))
|
| 4691 |
|
|
next = PREV_INSN (next);
|
| 4692 |
|
|
|
| 4693 |
|
|
if (INSN_P (next))
|
| 4694 |
|
|
return emit_insn_before_setloc (pattern, before, INSN_LOCATOR (next));
|
| 4695 |
|
|
else
|
| 4696 |
|
|
return emit_insn_before_noloc (pattern, before, NULL);
|
| 4697 |
|
|
}
|
| 4698 |
|
|
|
| 4699 |
|
|
/* like emit_insn_before_noloc, but set insn_locator according to scope. */
|
| 4700 |
|
|
rtx
|
| 4701 |
|
|
emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
|
| 4702 |
|
|
{
|
| 4703 |
|
|
rtx first = PREV_INSN (before);
|
| 4704 |
|
|
rtx last = emit_jump_insn_before_noloc (pattern, before);
|
| 4705 |
|
|
|
| 4706 |
|
|
if (pattern == NULL_RTX)
|
| 4707 |
|
|
return last;
|
| 4708 |
|
|
|
| 4709 |
|
|
first = NEXT_INSN (first);
|
| 4710 |
|
|
while (1)
|
| 4711 |
|
|
{
|
| 4712 |
|
|
if (active_insn_p (first) && !INSN_LOCATOR (first))
|
| 4713 |
|
|
INSN_LOCATOR (first) = loc;
|
| 4714 |
|
|
if (first == last)
|
| 4715 |
|
|
break;
|
| 4716 |
|
|
first = NEXT_INSN (first);
|
| 4717 |
|
|
}
|
| 4718 |
|
|
return last;
|
| 4719 |
|
|
}
|
| 4720 |
|
|
|
| 4721 |
|
|
/* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
|
| 4722 |
|
|
rtx
|
| 4723 |
|
|
emit_jump_insn_before (rtx pattern, rtx before)
|
| 4724 |
|
|
{
|
| 4725 |
|
|
rtx next = before;
|
| 4726 |
|
|
|
| 4727 |
|
|
while (DEBUG_INSN_P (next))
|
| 4728 |
|
|
next = PREV_INSN (next);
|
| 4729 |
|
|
|
| 4730 |
|
|
if (INSN_P (next))
|
| 4731 |
|
|
return emit_jump_insn_before_setloc (pattern, before, INSN_LOCATOR (next));
|
| 4732 |
|
|
else
|
| 4733 |
|
|
return emit_jump_insn_before_noloc (pattern, before);
|
| 4734 |
|
|
}
|
| 4735 |
|
|
|
| 4736 |
|
|
/* like emit_insn_before_noloc, but set insn_locator according to scope. */
|
| 4737 |
|
|
rtx
|
| 4738 |
|
|
emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
|
| 4739 |
|
|
{
|
| 4740 |
|
|
rtx first = PREV_INSN (before);
|
| 4741 |
|
|
rtx last = emit_call_insn_before_noloc (pattern, before);
|
| 4742 |
|
|
|
| 4743 |
|
|
if (pattern == NULL_RTX)
|
| 4744 |
|
|
return last;
|
| 4745 |
|
|
|
| 4746 |
|
|
first = NEXT_INSN (first);
|
| 4747 |
|
|
while (1)
|
| 4748 |
|
|
{
|
| 4749 |
|
|
if (active_insn_p (first) && !INSN_LOCATOR (first))
|
| 4750 |
|
|
INSN_LOCATOR (first) = loc;
|
| 4751 |
|
|
if (first == last)
|
| 4752 |
|
|
break;
|
| 4753 |
|
|
first = NEXT_INSN (first);
|
| 4754 |
|
|
}
|
| 4755 |
|
|
return last;
|
| 4756 |
|
|
}
|
| 4757 |
|
|
|
| 4758 |
|
|
/* like emit_call_insn_before_noloc,
|
| 4759 |
|
|
but set insn_locator according to before. */
|
| 4760 |
|
|
rtx
|
| 4761 |
|
|
emit_call_insn_before (rtx pattern, rtx before)
|
| 4762 |
|
|
{
|
| 4763 |
|
|
rtx next = before;
|
| 4764 |
|
|
|
| 4765 |
|
|
while (DEBUG_INSN_P (next))
|
| 4766 |
|
|
next = PREV_INSN (next);
|
| 4767 |
|
|
|
| 4768 |
|
|
if (INSN_P (next))
|
| 4769 |
|
|
return emit_call_insn_before_setloc (pattern, before, INSN_LOCATOR (next));
|
| 4770 |
|
|
else
|
| 4771 |
|
|
return emit_call_insn_before_noloc (pattern, before);
|
| 4772 |
|
|
}
|
| 4773 |
|
|
|
| 4774 |
|
|
/* like emit_insn_before_noloc, but set insn_locator according to scope. */
|
| 4775 |
|
|
rtx
|
| 4776 |
|
|
emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
|
| 4777 |
|
|
{
|
| 4778 |
|
|
rtx first = PREV_INSN (before);
|
| 4779 |
|
|
rtx last = emit_debug_insn_before_noloc (pattern, before);
|
| 4780 |
|
|
|
| 4781 |
|
|
if (pattern == NULL_RTX)
|
| 4782 |
|
|
return last;
|
| 4783 |
|
|
|
| 4784 |
|
|
first = NEXT_INSN (first);
|
| 4785 |
|
|
while (1)
|
| 4786 |
|
|
{
|
| 4787 |
|
|
if (active_insn_p (first) && !INSN_LOCATOR (first))
|
| 4788 |
|
|
INSN_LOCATOR (first) = loc;
|
| 4789 |
|
|
if (first == last)
|
| 4790 |
|
|
break;
|
| 4791 |
|
|
first = NEXT_INSN (first);
|
| 4792 |
|
|
}
|
| 4793 |
|
|
return last;
|
| 4794 |
|
|
}
|
| 4795 |
|
|
|
| 4796 |
|
|
/* like emit_debug_insn_before_noloc,
|
| 4797 |
|
|
but set insn_locator according to before. */
|
| 4798 |
|
|
rtx
|
| 4799 |
|
|
emit_debug_insn_before (rtx pattern, rtx before)
|
| 4800 |
|
|
{
|
| 4801 |
|
|
if (INSN_P (before))
|
| 4802 |
|
|
return emit_debug_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
|
| 4803 |
|
|
else
|
| 4804 |
|
|
return emit_debug_insn_before_noloc (pattern, before);
|
| 4805 |
|
|
}
|
| 4806 |
|
|
|
| 4807 |
|
|
/* Take X and emit it at the end of the doubly-linked
|
| 4808 |
|
|
INSN list.
|
| 4809 |
|
|
|
| 4810 |
|
|
Returns the last insn emitted. */
|
| 4811 |
|
|
|
| 4812 |
|
|
rtx
|
| 4813 |
|
|
emit_insn (rtx x)
|
| 4814 |
|
|
{
|
| 4815 |
|
|
rtx last = last_insn;
|
| 4816 |
|
|
rtx insn;
|
| 4817 |
|
|
|
| 4818 |
|
|
if (x == NULL_RTX)
|
| 4819 |
|
|
return last;
|
| 4820 |
|
|
|
| 4821 |
|
|
switch (GET_CODE (x))
|
| 4822 |
|
|
{
|
| 4823 |
|
|
case DEBUG_INSN:
|
| 4824 |
|
|
case INSN:
|
| 4825 |
|
|
case JUMP_INSN:
|
| 4826 |
|
|
case CALL_INSN:
|
| 4827 |
|
|
case CODE_LABEL:
|
| 4828 |
|
|
case BARRIER:
|
| 4829 |
|
|
case NOTE:
|
| 4830 |
|
|
insn = x;
|
| 4831 |
|
|
while (insn)
|
| 4832 |
|
|
{
|
| 4833 |
|
|
rtx next = NEXT_INSN (insn);
|
| 4834 |
|
|
add_insn (insn);
|
| 4835 |
|
|
last = insn;
|
| 4836 |
|
|
insn = next;
|
| 4837 |
|
|
}
|
| 4838 |
|
|
break;
|
| 4839 |
|
|
|
| 4840 |
|
|
#ifdef ENABLE_RTL_CHECKING
|
| 4841 |
|
|
case SEQUENCE:
|
| 4842 |
|
|
gcc_unreachable ();
|
| 4843 |
|
|
break;
|
| 4844 |
|
|
#endif
|
| 4845 |
|
|
|
| 4846 |
|
|
default:
|
| 4847 |
|
|
last = make_insn_raw (x);
|
| 4848 |
|
|
add_insn (last);
|
| 4849 |
|
|
break;
|
| 4850 |
|
|
}
|
| 4851 |
|
|
|
| 4852 |
|
|
return last;
|
| 4853 |
|
|
}
|
| 4854 |
|
|
|
| 4855 |
|
|
/* Make an insn of code DEBUG_INSN with pattern X
|
| 4856 |
|
|
and add it to the end of the doubly-linked list. */
|
| 4857 |
|
|
|
| 4858 |
|
|
rtx
|
| 4859 |
|
|
emit_debug_insn (rtx x)
|
| 4860 |
|
|
{
|
| 4861 |
|
|
rtx last = last_insn;
|
| 4862 |
|
|
rtx insn;
|
| 4863 |
|
|
|
| 4864 |
|
|
if (x == NULL_RTX)
|
| 4865 |
|
|
return last;
|
| 4866 |
|
|
|
| 4867 |
|
|
switch (GET_CODE (x))
|
| 4868 |
|
|
{
|
| 4869 |
|
|
case DEBUG_INSN:
|
| 4870 |
|
|
case INSN:
|
| 4871 |
|
|
case JUMP_INSN:
|
| 4872 |
|
|
case CALL_INSN:
|
| 4873 |
|
|
case CODE_LABEL:
|
| 4874 |
|
|
case BARRIER:
|
| 4875 |
|
|
case NOTE:
|
| 4876 |
|
|
insn = x;
|
| 4877 |
|
|
while (insn)
|
| 4878 |
|
|
{
|
| 4879 |
|
|
rtx next = NEXT_INSN (insn);
|
| 4880 |
|
|
add_insn (insn);
|
| 4881 |
|
|
last = insn;
|
| 4882 |
|
|
insn = next;
|
| 4883 |
|
|
}
|
| 4884 |
|
|
break;
|
| 4885 |
|
|
|
| 4886 |
|
|
#ifdef ENABLE_RTL_CHECKING
|
| 4887 |
|
|
case SEQUENCE:
|
| 4888 |
|
|
gcc_unreachable ();
|
| 4889 |
|
|
break;
|
| 4890 |
|
|
#endif
|
| 4891 |
|
|
|
| 4892 |
|
|
default:
|
| 4893 |
|
|
last = make_debug_insn_raw (x);
|
| 4894 |
|
|
add_insn (last);
|
| 4895 |
|
|
break;
|
| 4896 |
|
|
}
|
| 4897 |
|
|
|
| 4898 |
|
|
return last;
|
| 4899 |
|
|
}
|
| 4900 |
|
|
|
| 4901 |
|
|
/* Make an insn of code JUMP_INSN with pattern X
|
| 4902 |
|
|
and add it to the end of the doubly-linked list. */
|
| 4903 |
|
|
|
| 4904 |
|
|
rtx
|
| 4905 |
|
|
emit_jump_insn (rtx x)
|
| 4906 |
|
|
{
|
| 4907 |
|
|
rtx last = NULL_RTX, insn;
|
| 4908 |
|
|
|
| 4909 |
|
|
switch (GET_CODE (x))
|
| 4910 |
|
|
{
|
| 4911 |
|
|
case DEBUG_INSN:
|
| 4912 |
|
|
case INSN:
|
| 4913 |
|
|
case JUMP_INSN:
|
| 4914 |
|
|
case CALL_INSN:
|
| 4915 |
|
|
case CODE_LABEL:
|
| 4916 |
|
|
case BARRIER:
|
| 4917 |
|
|
case NOTE:
|
| 4918 |
|
|
insn = x;
|
| 4919 |
|
|
while (insn)
|
| 4920 |
|
|
{
|
| 4921 |
|
|
rtx next = NEXT_INSN (insn);
|
| 4922 |
|
|
add_insn (insn);
|
| 4923 |
|
|
last = insn;
|
| 4924 |
|
|
insn = next;
|
| 4925 |
|
|
}
|
| 4926 |
|
|
break;
|
| 4927 |
|
|
|
| 4928 |
|
|
#ifdef ENABLE_RTL_CHECKING
|
| 4929 |
|
|
case SEQUENCE:
|
| 4930 |
|
|
gcc_unreachable ();
|
| 4931 |
|
|
break;
|
| 4932 |
|
|
#endif
|
| 4933 |
|
|
|
| 4934 |
|
|
default:
|
| 4935 |
|
|
last = make_jump_insn_raw (x);
|
| 4936 |
|
|
add_insn (last);
|
| 4937 |
|
|
break;
|
| 4938 |
|
|
}
|
| 4939 |
|
|
|
| 4940 |
|
|
return last;
|
| 4941 |
|
|
}
|
| 4942 |
|
|
|
| 4943 |
|
|
/* Make an insn of code CALL_INSN with pattern X
|
| 4944 |
|
|
and add it to the end of the doubly-linked list. */
|
| 4945 |
|
|
|
| 4946 |
|
|
rtx
|
| 4947 |
|
|
emit_call_insn (rtx x)
|
| 4948 |
|
|
{
|
| 4949 |
|
|
rtx insn;
|
| 4950 |
|
|
|
| 4951 |
|
|
switch (GET_CODE (x))
|
| 4952 |
|
|
{
|
| 4953 |
|
|
case DEBUG_INSN:
|
| 4954 |
|
|
case INSN:
|
| 4955 |
|
|
case JUMP_INSN:
|
| 4956 |
|
|
case CALL_INSN:
|
| 4957 |
|
|
case CODE_LABEL:
|
| 4958 |
|
|
case BARRIER:
|
| 4959 |
|
|
case NOTE:
|
| 4960 |
|
|
insn = emit_insn (x);
|
| 4961 |
|
|
break;
|
| 4962 |
|
|
|
| 4963 |
|
|
#ifdef ENABLE_RTL_CHECKING
|
| 4964 |
|
|
case SEQUENCE:
|
| 4965 |
|
|
gcc_unreachable ();
|
| 4966 |
|
|
break;
|
| 4967 |
|
|
#endif
|
| 4968 |
|
|
|
| 4969 |
|
|
default:
|
| 4970 |
|
|
insn = make_call_insn_raw (x);
|
| 4971 |
|
|
add_insn (insn);
|
| 4972 |
|
|
break;
|
| 4973 |
|
|
}
|
| 4974 |
|
|
|
| 4975 |
|
|
return insn;
|
| 4976 |
|
|
}
|
| 4977 |
|
|
|
| 4978 |
|
|
/* Add the label LABEL to the end of the doubly-linked list. */
|
| 4979 |
|
|
|
| 4980 |
|
|
rtx
|
| 4981 |
|
|
emit_label (rtx label)
|
| 4982 |
|
|
{
|
| 4983 |
|
|
/* This can be called twice for the same label
|
| 4984 |
|
|
as a result of the confusion that follows a syntax error!
|
| 4985 |
|
|
So make it harmless. */
|
| 4986 |
|
|
if (INSN_UID (label) == 0)
|
| 4987 |
|
|
{
|
| 4988 |
|
|
INSN_UID (label) = cur_insn_uid++;
|
| 4989 |
|
|
add_insn (label);
|
| 4990 |
|
|
}
|
| 4991 |
|
|
return label;
|
| 4992 |
|
|
}
|
| 4993 |
|
|
|
| 4994 |
|
|
/* Make an insn of code BARRIER
|
| 4995 |
|
|
and add it to the end of the doubly-linked list. */
|
| 4996 |
|
|
|
| 4997 |
|
|
rtx
|
| 4998 |
|
|
emit_barrier (void)
|
| 4999 |
|
|
{
|
| 5000 |
|
|
rtx barrier = rtx_alloc (BARRIER);
|
| 5001 |
|
|
INSN_UID (barrier) = cur_insn_uid++;
|
| 5002 |
|
|
add_insn (barrier);
|
| 5003 |
|
|
return barrier;
|
| 5004 |
|
|
}
|
| 5005 |
|
|
|
| 5006 |
|
|
/* Emit a copy of note ORIG. */
|
| 5007 |
|
|
|
| 5008 |
|
|
rtx
|
| 5009 |
|
|
emit_note_copy (rtx orig)
|
| 5010 |
|
|
{
|
| 5011 |
|
|
rtx note;
|
| 5012 |
|
|
|
| 5013 |
|
|
note = rtx_alloc (NOTE);
|
| 5014 |
|
|
|
| 5015 |
|
|
INSN_UID (note) = cur_insn_uid++;
|
| 5016 |
|
|
NOTE_DATA (note) = NOTE_DATA (orig);
|
| 5017 |
|
|
NOTE_KIND (note) = NOTE_KIND (orig);
|
| 5018 |
|
|
BLOCK_FOR_INSN (note) = NULL;
|
| 5019 |
|
|
add_insn (note);
|
| 5020 |
|
|
|
| 5021 |
|
|
return note;
|
| 5022 |
|
|
}
|
| 5023 |
|
|
|
| 5024 |
|
|
/* Make an insn of code NOTE or type NOTE_NO
|
| 5025 |
|
|
and add it to the end of the doubly-linked list. */
|
| 5026 |
|
|
|
| 5027 |
|
|
rtx
|
| 5028 |
|
|
emit_note (enum insn_note kind)
|
| 5029 |
|
|
{
|
| 5030 |
|
|
rtx note;
|
| 5031 |
|
|
|
| 5032 |
|
|
note = rtx_alloc (NOTE);
|
| 5033 |
|
|
INSN_UID (note) = cur_insn_uid++;
|
| 5034 |
|
|
NOTE_KIND (note) = kind;
|
| 5035 |
|
|
memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
|
| 5036 |
|
|
BLOCK_FOR_INSN (note) = NULL;
|
| 5037 |
|
|
add_insn (note);
|
| 5038 |
|
|
return note;
|
| 5039 |
|
|
}
|
| 5040 |
|
|
|
| 5041 |
|
|
/* Emit a clobber of lvalue X. */
|
| 5042 |
|
|
|
| 5043 |
|
|
rtx
|
| 5044 |
|
|
emit_clobber (rtx x)
|
| 5045 |
|
|
{
|
| 5046 |
|
|
/* CONCATs should not appear in the insn stream. */
|
| 5047 |
|
|
if (GET_CODE (x) == CONCAT)
|
| 5048 |
|
|
{
|
| 5049 |
|
|
emit_clobber (XEXP (x, 0));
|
| 5050 |
|
|
return emit_clobber (XEXP (x, 1));
|
| 5051 |
|
|
}
|
| 5052 |
|
|
return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
|
| 5053 |
|
|
}
|
| 5054 |
|
|
|
| 5055 |
|
|
/* Return a sequence of insns to clobber lvalue X. */
|
| 5056 |
|
|
|
| 5057 |
|
|
rtx
|
| 5058 |
|
|
gen_clobber (rtx x)
|
| 5059 |
|
|
{
|
| 5060 |
|
|
rtx seq;
|
| 5061 |
|
|
|
| 5062 |
|
|
start_sequence ();
|
| 5063 |
|
|
emit_clobber (x);
|
| 5064 |
|
|
seq = get_insns ();
|
| 5065 |
|
|
end_sequence ();
|
| 5066 |
|
|
return seq;
|
| 5067 |
|
|
}
|
| 5068 |
|
|
|
| 5069 |
|
|
/* Emit a use of rvalue X. */
|
| 5070 |
|
|
|
| 5071 |
|
|
rtx
|
| 5072 |
|
|
emit_use (rtx x)
|
| 5073 |
|
|
{
|
| 5074 |
|
|
/* CONCATs should not appear in the insn stream. */
|
| 5075 |
|
|
if (GET_CODE (x) == CONCAT)
|
| 5076 |
|
|
{
|
| 5077 |
|
|
emit_use (XEXP (x, 0));
|
| 5078 |
|
|
return emit_use (XEXP (x, 1));
|
| 5079 |
|
|
}
|
| 5080 |
|
|
return emit_insn (gen_rtx_USE (VOIDmode, x));
|
| 5081 |
|
|
}
|
| 5082 |
|
|
|
| 5083 |
|
|
/* Return a sequence of insns to use rvalue X. */
|
| 5084 |
|
|
|
| 5085 |
|
|
rtx
|
| 5086 |
|
|
gen_use (rtx x)
|
| 5087 |
|
|
{
|
| 5088 |
|
|
rtx seq;
|
| 5089 |
|
|
|
| 5090 |
|
|
start_sequence ();
|
| 5091 |
|
|
emit_use (x);
|
| 5092 |
|
|
seq = get_insns ();
|
| 5093 |
|
|
end_sequence ();
|
| 5094 |
|
|
return seq;
|
| 5095 |
|
|
}
|
| 5096 |
|
|
|
| 5097 |
|
|
/* Cause next statement to emit a line note even if the line number
|
| 5098 |
|
|
has not changed. */
|
| 5099 |
|
|
|
| 5100 |
|
|
void
|
| 5101 |
|
|
force_next_line_note (void)
|
| 5102 |
|
|
{
|
| 5103 |
|
|
last_location = -1;
|
| 5104 |
|
|
}
|
| 5105 |
|
|
|
| 5106 |
|
|
/* Place a note of KIND on insn INSN with DATUM as the datum. If a
|
| 5107 |
|
|
note of this type already exists, remove it first. */
|
| 5108 |
|
|
|
| 5109 |
|
|
rtx
|
| 5110 |
|
|
set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
|
| 5111 |
|
|
{
|
| 5112 |
|
|
rtx note = find_reg_note (insn, kind, NULL_RTX);
|
| 5113 |
|
|
|
| 5114 |
|
|
switch (kind)
|
| 5115 |
|
|
{
|
| 5116 |
|
|
case REG_EQUAL:
|
| 5117 |
|
|
case REG_EQUIV:
|
| 5118 |
|
|
/* Don't add REG_EQUAL/REG_EQUIV notes if the insn
|
| 5119 |
|
|
has multiple sets (some callers assume single_set
|
| 5120 |
|
|
means the insn only has one set, when in fact it
|
| 5121 |
|
|
means the insn only has one * useful * set). */
|
| 5122 |
|
|
if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
|
| 5123 |
|
|
{
|
| 5124 |
|
|
gcc_assert (!note);
|
| 5125 |
|
|
return NULL_RTX;
|
| 5126 |
|
|
}
|
| 5127 |
|
|
|
| 5128 |
|
|
/* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
|
| 5129 |
|
|
It serves no useful purpose and breaks eliminate_regs. */
|
| 5130 |
|
|
if (GET_CODE (datum) == ASM_OPERANDS)
|
| 5131 |
|
|
return NULL_RTX;
|
| 5132 |
|
|
|
| 5133 |
|
|
if (note)
|
| 5134 |
|
|
{
|
| 5135 |
|
|
XEXP (note, 0) = datum;
|
| 5136 |
|
|
df_notes_rescan (insn);
|
| 5137 |
|
|
return note;
|
| 5138 |
|
|
}
|
| 5139 |
|
|
break;
|
| 5140 |
|
|
|
| 5141 |
|
|
default:
|
| 5142 |
|
|
if (note)
|
| 5143 |
|
|
{
|
| 5144 |
|
|
XEXP (note, 0) = datum;
|
| 5145 |
|
|
return note;
|
| 5146 |
|
|
}
|
| 5147 |
|
|
break;
|
| 5148 |
|
|
}
|
| 5149 |
|
|
|
| 5150 |
|
|
add_reg_note (insn, kind, datum);
|
| 5151 |
|
|
|
| 5152 |
|
|
switch (kind)
|
| 5153 |
|
|
{
|
| 5154 |
|
|
case REG_EQUAL:
|
| 5155 |
|
|
case REG_EQUIV:
|
| 5156 |
|
|
df_notes_rescan (insn);
|
| 5157 |
|
|
break;
|
| 5158 |
|
|
default:
|
| 5159 |
|
|
break;
|
| 5160 |
|
|
}
|
| 5161 |
|
|
|
| 5162 |
|
|
return REG_NOTES (insn);
|
| 5163 |
|
|
}
|
| 5164 |
|
|
|
| 5165 |
|
|
/* Return an indication of which type of insn should have X as a body.
|
| 5166 |
|
|
The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
|
| 5167 |
|
|
|
| 5168 |
|
|
static enum rtx_code
|
| 5169 |
|
|
classify_insn (rtx x)
|
| 5170 |
|
|
{
|
| 5171 |
|
|
if (LABEL_P (x))
|
| 5172 |
|
|
return CODE_LABEL;
|
| 5173 |
|
|
if (GET_CODE (x) == CALL)
|
| 5174 |
|
|
return CALL_INSN;
|
| 5175 |
|
|
if (GET_CODE (x) == RETURN)
|
| 5176 |
|
|
return JUMP_INSN;
|
| 5177 |
|
|
if (GET_CODE (x) == SET)
|
| 5178 |
|
|
{
|
| 5179 |
|
|
if (SET_DEST (x) == pc_rtx)
|
| 5180 |
|
|
return JUMP_INSN;
|
| 5181 |
|
|
else if (GET_CODE (SET_SRC (x)) == CALL)
|
| 5182 |
|
|
return CALL_INSN;
|
| 5183 |
|
|
else
|
| 5184 |
|
|
return INSN;
|
| 5185 |
|
|
}
|
| 5186 |
|
|
if (GET_CODE (x) == PARALLEL)
|
| 5187 |
|
|
{
|
| 5188 |
|
|
int j;
|
| 5189 |
|
|
for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
|
| 5190 |
|
|
if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
|
| 5191 |
|
|
return CALL_INSN;
|
| 5192 |
|
|
else if (GET_CODE (XVECEXP (x, 0, j)) == SET
|
| 5193 |
|
|
&& SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
|
| 5194 |
|
|
return JUMP_INSN;
|
| 5195 |
|
|
else if (GET_CODE (XVECEXP (x, 0, j)) == SET
|
| 5196 |
|
|
&& GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
|
| 5197 |
|
|
return CALL_INSN;
|
| 5198 |
|
|
}
|
| 5199 |
|
|
return INSN;
|
| 5200 |
|
|
}
|
| 5201 |
|
|
|
| 5202 |
|
|
/* Emit the rtl pattern X as an appropriate kind of insn.
|
| 5203 |
|
|
If X is a label, it is simply added into the insn chain. */
|
| 5204 |
|
|
|
| 5205 |
|
|
rtx
|
| 5206 |
|
|
emit (rtx x)
|
| 5207 |
|
|
{
|
| 5208 |
|
|
enum rtx_code code = classify_insn (x);
|
| 5209 |
|
|
|
| 5210 |
|
|
switch (code)
|
| 5211 |
|
|
{
|
| 5212 |
|
|
case CODE_LABEL:
|
| 5213 |
|
|
return emit_label (x);
|
| 5214 |
|
|
case INSN:
|
| 5215 |
|
|
return emit_insn (x);
|
| 5216 |
|
|
case JUMP_INSN:
|
| 5217 |
|
|
{
|
| 5218 |
|
|
rtx insn = emit_jump_insn (x);
|
| 5219 |
|
|
if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
|
| 5220 |
|
|
return emit_barrier ();
|
| 5221 |
|
|
return insn;
|
| 5222 |
|
|
}
|
| 5223 |
|
|
case CALL_INSN:
|
| 5224 |
|
|
return emit_call_insn (x);
|
| 5225 |
|
|
case DEBUG_INSN:
|
| 5226 |
|
|
return emit_debug_insn (x);
|
| 5227 |
|
|
default:
|
| 5228 |
|
|
gcc_unreachable ();
|
| 5229 |
|
|
}
|
| 5230 |
|
|
}
|
| 5231 |
|
|
|
| 5232 |
|
|
/* Space for free sequence stack entries. */
|
| 5233 |
|
|
static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
|
| 5234 |
|
|
|
| 5235 |
|
|
/* Begin emitting insns to a sequence. If this sequence will contain
|
| 5236 |
|
|
something that might cause the compiler to pop arguments to function
|
| 5237 |
|
|
calls (because those pops have previously been deferred; see
|
| 5238 |
|
|
INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
|
| 5239 |
|
|
before calling this function. That will ensure that the deferred
|
| 5240 |
|
|
pops are not accidentally emitted in the middle of this sequence. */
|
| 5241 |
|
|
|
| 5242 |
|
|
void
|
| 5243 |
|
|
start_sequence (void)
|
| 5244 |
|
|
{
|
| 5245 |
|
|
struct sequence_stack *tem;
|
| 5246 |
|
|
|
| 5247 |
|
|
if (free_sequence_stack != NULL)
|
| 5248 |
|
|
{
|
| 5249 |
|
|
tem = free_sequence_stack;
|
| 5250 |
|
|
free_sequence_stack = tem->next;
|
| 5251 |
|
|
}
|
| 5252 |
|
|
else
|
| 5253 |
|
|
tem = GGC_NEW (struct sequence_stack);
|
| 5254 |
|
|
|
| 5255 |
|
|
tem->next = seq_stack;
|
| 5256 |
|
|
tem->first = first_insn;
|
| 5257 |
|
|
tem->last = last_insn;
|
| 5258 |
|
|
|
| 5259 |
|
|
seq_stack = tem;
|
| 5260 |
|
|
|
| 5261 |
|
|
first_insn = 0;
|
| 5262 |
|
|
last_insn = 0;
|
| 5263 |
|
|
}
|
| 5264 |
|
|
|
| 5265 |
|
|
/* Set up the insn chain starting with FIRST as the current sequence,
|
| 5266 |
|
|
saving the previously current one. See the documentation for
|
| 5267 |
|
|
start_sequence for more information about how to use this function. */
|
| 5268 |
|
|
|
| 5269 |
|
|
void
|
| 5270 |
|
|
push_to_sequence (rtx first)
|
| 5271 |
|
|
{
|
| 5272 |
|
|
rtx last;
|
| 5273 |
|
|
|
| 5274 |
|
|
start_sequence ();
|
| 5275 |
|
|
|
| 5276 |
|
|
for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
|
| 5277 |
|
|
|
| 5278 |
|
|
first_insn = first;
|
| 5279 |
|
|
last_insn = last;
|
| 5280 |
|
|
}
|
| 5281 |
|
|
|
| 5282 |
|
|
/* Like push_to_sequence, but take the last insn as an argument to avoid
|
| 5283 |
|
|
looping through the list. */
|
| 5284 |
|
|
|
| 5285 |
|
|
void
|
| 5286 |
|
|
push_to_sequence2 (rtx first, rtx last)
|
| 5287 |
|
|
{
|
| 5288 |
|
|
start_sequence ();
|
| 5289 |
|
|
|
| 5290 |
|
|
first_insn = first;
|
| 5291 |
|
|
last_insn = last;
|
| 5292 |
|
|
}
|
| 5293 |
|
|
|
| 5294 |
|
|
/* Set up the outer-level insn chain
|
| 5295 |
|
|
as the current sequence, saving the previously current one. */
|
| 5296 |
|
|
|
| 5297 |
|
|
void
|
| 5298 |
|
|
push_topmost_sequence (void)
|
| 5299 |
|
|
{
|
| 5300 |
|
|
struct sequence_stack *stack, *top = NULL;
|
| 5301 |
|
|
|
| 5302 |
|
|
start_sequence ();
|
| 5303 |
|
|
|
| 5304 |
|
|
for (stack = seq_stack; stack; stack = stack->next)
|
| 5305 |
|
|
top = stack;
|
| 5306 |
|
|
|
| 5307 |
|
|
first_insn = top->first;
|
| 5308 |
|
|
last_insn = top->last;
|
| 5309 |
|
|
}
|
| 5310 |
|
|
|
| 5311 |
|
|
/* After emitting to the outer-level insn chain, update the outer-level
|
| 5312 |
|
|
insn chain, and restore the previous saved state. */
|
| 5313 |
|
|
|
| 5314 |
|
|
void
|
| 5315 |
|
|
pop_topmost_sequence (void)
|
| 5316 |
|
|
{
|
| 5317 |
|
|
struct sequence_stack *stack, *top = NULL;
|
| 5318 |
|
|
|
| 5319 |
|
|
for (stack = seq_stack; stack; stack = stack->next)
|
| 5320 |
|
|
top = stack;
|
| 5321 |
|
|
|
| 5322 |
|
|
top->first = first_insn;
|
| 5323 |
|
|
top->last = last_insn;
|
| 5324 |
|
|
|
| 5325 |
|
|
end_sequence ();
|
| 5326 |
|
|
}
|
| 5327 |
|
|
|
| 5328 |
|
|
/* After emitting to a sequence, restore previous saved state.
|
| 5329 |
|
|
|
| 5330 |
|
|
To get the contents of the sequence just made, you must call
|
| 5331 |
|
|
`get_insns' *before* calling here.
|
| 5332 |
|
|
|
| 5333 |
|
|
If the compiler might have deferred popping arguments while
|
| 5334 |
|
|
generating this sequence, and this sequence will not be immediately
|
| 5335 |
|
|
inserted into the instruction stream, use do_pending_stack_adjust
|
| 5336 |
|
|
before calling get_insns. That will ensure that the deferred
|
| 5337 |
|
|
pops are inserted into this sequence, and not into some random
|
| 5338 |
|
|
location in the instruction stream. See INHIBIT_DEFER_POP for more
|
| 5339 |
|
|
information about deferred popping of arguments. */
|
| 5340 |
|
|
|
| 5341 |
|
|
void
|
| 5342 |
|
|
end_sequence (void)
|
| 5343 |
|
|
{
|
| 5344 |
|
|
struct sequence_stack *tem = seq_stack;
|
| 5345 |
|
|
|
| 5346 |
|
|
first_insn = tem->first;
|
| 5347 |
|
|
last_insn = tem->last;
|
| 5348 |
|
|
seq_stack = tem->next;
|
| 5349 |
|
|
|
| 5350 |
|
|
memset (tem, 0, sizeof (*tem));
|
| 5351 |
|
|
tem->next = free_sequence_stack;
|
| 5352 |
|
|
free_sequence_stack = tem;
|
| 5353 |
|
|
}
|
| 5354 |
|
|
|
| 5355 |
|
|
/* Return 1 if currently emitting into a sequence. */
|
| 5356 |
|
|
|
| 5357 |
|
|
int
|
| 5358 |
|
|
in_sequence_p (void)
|
| 5359 |
|
|
{
|
| 5360 |
|
|
return seq_stack != 0;
|
| 5361 |
|
|
}
|
| 5362 |
|
|
|
| 5363 |
|
|
/* Put the various virtual registers into REGNO_REG_RTX. */
|
| 5364 |
|
|
|
| 5365 |
|
|
static void
|
| 5366 |
|
|
init_virtual_regs (void)
|
| 5367 |
|
|
{
|
| 5368 |
|
|
regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
|
| 5369 |
|
|
regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
|
| 5370 |
|
|
regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
|
| 5371 |
|
|
regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
|
| 5372 |
|
|
regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
|
| 5373 |
|
|
}
|
| 5374 |
|
|
|
| 5375 |
|
|
|
| 5376 |
|
|
/* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
|
| 5377 |
|
|
static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
|
| 5378 |
|
|
static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
|
| 5379 |
|
|
static int copy_insn_n_scratches;
|
| 5380 |
|
|
|
| 5381 |
|
|
/* When an insn is being copied by copy_insn_1, this is nonzero if we have
|
| 5382 |
|
|
copied an ASM_OPERANDS.
|
| 5383 |
|
|
In that case, it is the original input-operand vector. */
|
| 5384 |
|
|
static rtvec orig_asm_operands_vector;
|
| 5385 |
|
|
|
| 5386 |
|
|
/* When an insn is being copied by copy_insn_1, this is nonzero if we have
|
| 5387 |
|
|
copied an ASM_OPERANDS.
|
| 5388 |
|
|
In that case, it is the copied input-operand vector. */
|
| 5389 |
|
|
static rtvec copy_asm_operands_vector;
|
| 5390 |
|
|
|
| 5391 |
|
|
/* Likewise for the constraints vector. */
|
| 5392 |
|
|
static rtvec orig_asm_constraints_vector;
|
| 5393 |
|
|
static rtvec copy_asm_constraints_vector;
|
| 5394 |
|
|
|
| 5395 |
|
|
/* Recursively create a new copy of an rtx for copy_insn.
|
| 5396 |
|
|
This function differs from copy_rtx in that it handles SCRATCHes and
|
| 5397 |
|
|
ASM_OPERANDs properly.
|
| 5398 |
|
|
Normally, this function is not used directly; use copy_insn as front end.
|
| 5399 |
|
|
However, you could first copy an insn pattern with copy_insn and then use
|
| 5400 |
|
|
this function afterwards to properly copy any REG_NOTEs containing
|
| 5401 |
|
|
SCRATCHes. */
|
| 5402 |
|
|
|
| 5403 |
|
|
rtx
|
| 5404 |
|
|
copy_insn_1 (rtx orig)
|
| 5405 |
|
|
{
|
| 5406 |
|
|
rtx copy;
|
| 5407 |
|
|
int i, j;
|
| 5408 |
|
|
RTX_CODE code;
|
| 5409 |
|
|
const char *format_ptr;
|
| 5410 |
|
|
|
| 5411 |
|
|
if (orig == NULL)
|
| 5412 |
|
|
return NULL;
|
| 5413 |
|
|
|
| 5414 |
|
|
code = GET_CODE (orig);
|
| 5415 |
|
|
|
| 5416 |
|
|
switch (code)
|
| 5417 |
|
|
{
|
| 5418 |
|
|
case REG:
|
| 5419 |
|
|
case CONST_INT:
|
| 5420 |
|
|
case CONST_DOUBLE:
|
| 5421 |
|
|
case CONST_FIXED:
|
| 5422 |
|
|
case CONST_VECTOR:
|
| 5423 |
|
|
case SYMBOL_REF:
|
| 5424 |
|
|
case CODE_LABEL:
|
| 5425 |
|
|
case PC:
|
| 5426 |
|
|
case CC0:
|
| 5427 |
|
|
return orig;
|
| 5428 |
|
|
case CLOBBER:
|
| 5429 |
|
|
if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
|
| 5430 |
|
|
return orig;
|
| 5431 |
|
|
break;
|
| 5432 |
|
|
|
| 5433 |
|
|
case SCRATCH:
|
| 5434 |
|
|
for (i = 0; i < copy_insn_n_scratches; i++)
|
| 5435 |
|
|
if (copy_insn_scratch_in[i] == orig)
|
| 5436 |
|
|
return copy_insn_scratch_out[i];
|
| 5437 |
|
|
break;
|
| 5438 |
|
|
|
| 5439 |
|
|
case CONST:
|
| 5440 |
|
|
if (shared_const_p (orig))
|
| 5441 |
|
|
return orig;
|
| 5442 |
|
|
break;
|
| 5443 |
|
|
|
| 5444 |
|
|
/* A MEM with a constant address is not sharable. The problem is that
|
| 5445 |
|
|
the constant address may need to be reloaded. If the mem is shared,
|
| 5446 |
|
|
then reloading one copy of this mem will cause all copies to appear
|
| 5447 |
|
|
to have been reloaded. */
|
| 5448 |
|
|
|
| 5449 |
|
|
default:
|
| 5450 |
|
|
break;
|
| 5451 |
|
|
}
|
| 5452 |
|
|
|
| 5453 |
|
|
/* Copy the various flags, fields, and other information. We assume
|
| 5454 |
|
|
that all fields need copying, and then clear the fields that should
|
| 5455 |
|
|
not be copied. That is the sensible default behavior, and forces
|
| 5456 |
|
|
us to explicitly document why we are *not* copying a flag. */
|
| 5457 |
|
|
copy = shallow_copy_rtx (orig);
|
| 5458 |
|
|
|
| 5459 |
|
|
/* We do not copy the USED flag, which is used as a mark bit during
|
| 5460 |
|
|
walks over the RTL. */
|
| 5461 |
|
|
RTX_FLAG (copy, used) = 0;
|
| 5462 |
|
|
|
| 5463 |
|
|
/* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
|
| 5464 |
|
|
if (INSN_P (orig))
|
| 5465 |
|
|
{
|
| 5466 |
|
|
RTX_FLAG (copy, jump) = 0;
|
| 5467 |
|
|
RTX_FLAG (copy, call) = 0;
|
| 5468 |
|
|
RTX_FLAG (copy, frame_related) = 0;
|
| 5469 |
|
|
}
|
| 5470 |
|
|
|
| 5471 |
|
|
format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
|
| 5472 |
|
|
|
| 5473 |
|
|
for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
|
| 5474 |
|
|
switch (*format_ptr++)
|
| 5475 |
|
|
{
|
| 5476 |
|
|
case 'e':
|
| 5477 |
|
|
if (XEXP (orig, i) != NULL)
|
| 5478 |
|
|
XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
|
| 5479 |
|
|
break;
|
| 5480 |
|
|
|
| 5481 |
|
|
case 'E':
|
| 5482 |
|
|
case 'V':
|
| 5483 |
|
|
if (XVEC (orig, i) == orig_asm_constraints_vector)
|
| 5484 |
|
|
XVEC (copy, i) = copy_asm_constraints_vector;
|
| 5485 |
|
|
else if (XVEC (orig, i) == orig_asm_operands_vector)
|
| 5486 |
|
|
XVEC (copy, i) = copy_asm_operands_vector;
|
| 5487 |
|
|
else if (XVEC (orig, i) != NULL)
|
| 5488 |
|
|
{
|
| 5489 |
|
|
XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
|
| 5490 |
|
|
for (j = 0; j < XVECLEN (copy, i); j++)
|
| 5491 |
|
|
XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
|
| 5492 |
|
|
}
|
| 5493 |
|
|
break;
|
| 5494 |
|
|
|
| 5495 |
|
|
case 't':
|
| 5496 |
|
|
case 'w':
|
| 5497 |
|
|
case 'i':
|
| 5498 |
|
|
case 's':
|
| 5499 |
|
|
case 'S':
|
| 5500 |
|
|
case 'u':
|
| 5501 |
|
|
case '0':
|
| 5502 |
|
|
/* These are left unchanged. */
|
| 5503 |
|
|
break;
|
| 5504 |
|
|
|
| 5505 |
|
|
default:
|
| 5506 |
|
|
gcc_unreachable ();
|
| 5507 |
|
|
}
|
| 5508 |
|
|
|
| 5509 |
|
|
if (code == SCRATCH)
|
| 5510 |
|
|
{
|
| 5511 |
|
|
i = copy_insn_n_scratches++;
|
| 5512 |
|
|
gcc_assert (i < MAX_RECOG_OPERANDS);
|
| 5513 |
|
|
copy_insn_scratch_in[i] = orig;
|
| 5514 |
|
|
copy_insn_scratch_out[i] = copy;
|
| 5515 |
|
|
}
|
| 5516 |
|
|
else if (code == ASM_OPERANDS)
|
| 5517 |
|
|
{
|
| 5518 |
|
|
orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
|
| 5519 |
|
|
copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
|
| 5520 |
|
|
orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
|
| 5521 |
|
|
copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
|
| 5522 |
|
|
}
|
| 5523 |
|
|
|
| 5524 |
|
|
return copy;
|
| 5525 |
|
|
}
|
| 5526 |
|
|
|
| 5527 |
|
|
/* Create a new copy of an rtx.
|
| 5528 |
|
|
This function differs from copy_rtx in that it handles SCRATCHes and
|
| 5529 |
|
|
ASM_OPERANDs properly.
|
| 5530 |
|
|
INSN doesn't really have to be a full INSN; it could be just the
|
| 5531 |
|
|
pattern. */
|
| 5532 |
|
|
rtx
|
| 5533 |
|
|
copy_insn (rtx insn)
|
| 5534 |
|
|
{
|
| 5535 |
|
|
copy_insn_n_scratches = 0;
|
| 5536 |
|
|
orig_asm_operands_vector = 0;
|
| 5537 |
|
|
orig_asm_constraints_vector = 0;
|
| 5538 |
|
|
copy_asm_operands_vector = 0;
|
| 5539 |
|
|
copy_asm_constraints_vector = 0;
|
| 5540 |
|
|
return copy_insn_1 (insn);
|
| 5541 |
|
|
}
|
| 5542 |
|
|
|
| 5543 |
|
|
/* Initialize data structures and variables in this file
|
| 5544 |
|
|
before generating rtl for each function. */
|
| 5545 |
|
|
|
| 5546 |
|
|
void
|
| 5547 |
|
|
init_emit (void)
|
| 5548 |
|
|
{
|
| 5549 |
|
|
first_insn = NULL;
|
| 5550 |
|
|
last_insn = NULL;
|
| 5551 |
|
|
if (MIN_NONDEBUG_INSN_UID)
|
| 5552 |
|
|
cur_insn_uid = MIN_NONDEBUG_INSN_UID;
|
| 5553 |
|
|
else
|
| 5554 |
|
|
cur_insn_uid = 1;
|
| 5555 |
|
|
cur_debug_insn_uid = 1;
|
| 5556 |
|
|
reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
|
| 5557 |
|
|
last_location = UNKNOWN_LOCATION;
|
| 5558 |
|
|
first_label_num = label_num;
|
| 5559 |
|
|
seq_stack = NULL;
|
| 5560 |
|
|
|
| 5561 |
|
|
/* Init the tables that describe all the pseudo regs. */
|
| 5562 |
|
|
|
| 5563 |
|
|
crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
|
| 5564 |
|
|
|
| 5565 |
|
|
crtl->emit.regno_pointer_align
|
| 5566 |
|
|
= XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
|
| 5567 |
|
|
|
| 5568 |
|
|
regno_reg_rtx
|
| 5569 |
|
|
= GGC_NEWVEC (rtx, crtl->emit.regno_pointer_align_length);
|
| 5570 |
|
|
|
| 5571 |
|
|
/* Put copies of all the hard registers into regno_reg_rtx. */
|
| 5572 |
|
|
memcpy (regno_reg_rtx,
|
| 5573 |
|
|
static_regno_reg_rtx,
|
| 5574 |
|
|
FIRST_PSEUDO_REGISTER * sizeof (rtx));
|
| 5575 |
|
|
|
| 5576 |
|
|
/* Put copies of all the virtual register rtx into regno_reg_rtx. */
|
| 5577 |
|
|
init_virtual_regs ();
|
| 5578 |
|
|
|
| 5579 |
|
|
/* Indicate that the virtual registers and stack locations are
|
| 5580 |
|
|
all pointers. */
|
| 5581 |
|
|
REG_POINTER (stack_pointer_rtx) = 1;
|
| 5582 |
|
|
REG_POINTER (frame_pointer_rtx) = 1;
|
| 5583 |
|
|
REG_POINTER (hard_frame_pointer_rtx) = 1;
|
| 5584 |
|
|
REG_POINTER (arg_pointer_rtx) = 1;
|
| 5585 |
|
|
|
| 5586 |
|
|
REG_POINTER (virtual_incoming_args_rtx) = 1;
|
| 5587 |
|
|
REG_POINTER (virtual_stack_vars_rtx) = 1;
|
| 5588 |
|
|
REG_POINTER (virtual_stack_dynamic_rtx) = 1;
|
| 5589 |
|
|
REG_POINTER (virtual_outgoing_args_rtx) = 1;
|
| 5590 |
|
|
REG_POINTER (virtual_cfa_rtx) = 1;
|
| 5591 |
|
|
|
| 5592 |
|
|
#ifdef STACK_BOUNDARY
|
| 5593 |
|
|
REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
|
| 5594 |
|
|
REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
|
| 5595 |
|
|
REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
|
| 5596 |
|
|
REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
|
| 5597 |
|
|
|
| 5598 |
|
|
REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
|
| 5599 |
|
|
REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
|
| 5600 |
|
|
REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
|
| 5601 |
|
|
REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
|
| 5602 |
|
|
REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
|
| 5603 |
|
|
#endif
|
| 5604 |
|
|
|
| 5605 |
|
|
#ifdef INIT_EXPANDERS
|
| 5606 |
|
|
INIT_EXPANDERS;
|
| 5607 |
|
|
#endif
|
| 5608 |
|
|
}
|
| 5609 |
|
|
|
| 5610 |
|
|
/* Generate a vector constant for mode MODE and constant value CONSTANT. */
|
| 5611 |
|
|
|
| 5612 |
|
|
static rtx
|
| 5613 |
|
|
gen_const_vector (enum machine_mode mode, int constant)
|
| 5614 |
|
|
{
|
| 5615 |
|
|
rtx tem;
|
| 5616 |
|
|
rtvec v;
|
| 5617 |
|
|
int units, i;
|
| 5618 |
|
|
enum machine_mode inner;
|
| 5619 |
|
|
|
| 5620 |
|
|
units = GET_MODE_NUNITS (mode);
|
| 5621 |
|
|
inner = GET_MODE_INNER (mode);
|
| 5622 |
|
|
|
| 5623 |
|
|
gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
|
| 5624 |
|
|
|
| 5625 |
|
|
v = rtvec_alloc (units);
|
| 5626 |
|
|
|
| 5627 |
|
|
/* We need to call this function after we set the scalar const_tiny_rtx
|
| 5628 |
|
|
entries. */
|
| 5629 |
|
|
gcc_assert (const_tiny_rtx[constant][(int) inner]);
|
| 5630 |
|
|
|
| 5631 |
|
|
for (i = 0; i < units; ++i)
|
| 5632 |
|
|
RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
|
| 5633 |
|
|
|
| 5634 |
|
|
tem = gen_rtx_raw_CONST_VECTOR (mode, v);
|
| 5635 |
|
|
return tem;
|
| 5636 |
|
|
}
|
| 5637 |
|
|
|
| 5638 |
|
|
/* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
|
| 5639 |
|
|
all elements are zero, and the one vector when all elements are one. */
|
| 5640 |
|
|
rtx
|
| 5641 |
|
|
gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
|
| 5642 |
|
|
{
|
| 5643 |
|
|
enum machine_mode inner = GET_MODE_INNER (mode);
|
| 5644 |
|
|
int nunits = GET_MODE_NUNITS (mode);
|
| 5645 |
|
|
rtx x;
|
| 5646 |
|
|
int i;
|
| 5647 |
|
|
|
| 5648 |
|
|
/* Check to see if all of the elements have the same value. */
|
| 5649 |
|
|
x = RTVEC_ELT (v, nunits - 1);
|
| 5650 |
|
|
for (i = nunits - 2; i >= 0; i--)
|
| 5651 |
|
|
if (RTVEC_ELT (v, i) != x)
|
| 5652 |
|
|
break;
|
| 5653 |
|
|
|
| 5654 |
|
|
/* If the values are all the same, check to see if we can use one of the
|
| 5655 |
|
|
standard constant vectors. */
|
| 5656 |
|
|
if (i == -1)
|
| 5657 |
|
|
{
|
| 5658 |
|
|
if (x == CONST0_RTX (inner))
|
| 5659 |
|
|
return CONST0_RTX (mode);
|
| 5660 |
|
|
else if (x == CONST1_RTX (inner))
|
| 5661 |
|
|
return CONST1_RTX (mode);
|
| 5662 |
|
|
}
|
| 5663 |
|
|
|
| 5664 |
|
|
return gen_rtx_raw_CONST_VECTOR (mode, v);
|
| 5665 |
|
|
}
|
| 5666 |
|
|
|
| 5667 |
|
|
/* Initialise global register information required by all functions. */
|
| 5668 |
|
|
|
| 5669 |
|
|
void
|
| 5670 |
|
|
init_emit_regs (void)
|
| 5671 |
|
|
{
|
| 5672 |
|
|
int i;
|
| 5673 |
|
|
|
| 5674 |
|
|
/* Reset register attributes */
|
| 5675 |
|
|
htab_empty (reg_attrs_htab);
|
| 5676 |
|
|
|
| 5677 |
|
|
/* We need reg_raw_mode, so initialize the modes now. */
|
| 5678 |
|
|
init_reg_modes_target ();
|
| 5679 |
|
|
|
| 5680 |
|
|
/* Assign register numbers to the globally defined register rtx. */
|
| 5681 |
|
|
pc_rtx = gen_rtx_PC (VOIDmode);
|
| 5682 |
|
|
cc0_rtx = gen_rtx_CC0 (VOIDmode);
|
| 5683 |
|
|
stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
|
| 5684 |
|
|
frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
|
| 5685 |
|
|
hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
|
| 5686 |
|
|
arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
|
| 5687 |
|
|
virtual_incoming_args_rtx =
|
| 5688 |
|
|
gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
|
| 5689 |
|
|
virtual_stack_vars_rtx =
|
| 5690 |
|
|
gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
|
| 5691 |
|
|
virtual_stack_dynamic_rtx =
|
| 5692 |
|
|
gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
|
| 5693 |
|
|
virtual_outgoing_args_rtx =
|
| 5694 |
|
|
gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
|
| 5695 |
|
|
virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
|
| 5696 |
|
|
|
| 5697 |
|
|
/* Initialize RTL for commonly used hard registers. These are
|
| 5698 |
|
|
copied into regno_reg_rtx as we begin to compile each function. */
|
| 5699 |
|
|
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
|
| 5700 |
|
|
static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
|
| 5701 |
|
|
|
| 5702 |
|
|
#ifdef RETURN_ADDRESS_POINTER_REGNUM
|
| 5703 |
|
|
return_address_pointer_rtx
|
| 5704 |
|
|
= gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
|
| 5705 |
|
|
#endif
|
| 5706 |
|
|
|
| 5707 |
|
|
if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
|
| 5708 |
|
|
pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
|
| 5709 |
|
|
else
|
| 5710 |
|
|
pic_offset_table_rtx = NULL_RTX;
|
| 5711 |
|
|
}
|
| 5712 |
|
|
|
| 5713 |
|
|
/* Create some permanent unique rtl objects shared between all functions. */
|
| 5714 |
|
|
|
| 5715 |
|
|
void
|
| 5716 |
|
|
init_emit_once (void)
|
| 5717 |
|
|
{
|
| 5718 |
|
|
int i;
|
| 5719 |
|
|
enum machine_mode mode;
|
| 5720 |
|
|
enum machine_mode double_mode;
|
| 5721 |
|
|
|
| 5722 |
|
|
/* Initialize the CONST_INT, CONST_DOUBLE, CONST_FIXED, and memory attribute
|
| 5723 |
|
|
hash tables. */
|
| 5724 |
|
|
const_int_htab = htab_create_ggc (37, const_int_htab_hash,
|
| 5725 |
|
|
const_int_htab_eq, NULL);
|
| 5726 |
|
|
|
| 5727 |
|
|
const_double_htab = htab_create_ggc (37, const_double_htab_hash,
|
| 5728 |
|
|
const_double_htab_eq, NULL);
|
| 5729 |
|
|
|
| 5730 |
|
|
const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
|
| 5731 |
|
|
const_fixed_htab_eq, NULL);
|
| 5732 |
|
|
|
| 5733 |
|
|
mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
|
| 5734 |
|
|
mem_attrs_htab_eq, NULL);
|
| 5735 |
|
|
reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
|
| 5736 |
|
|
reg_attrs_htab_eq, NULL);
|
| 5737 |
|
|
|
| 5738 |
|
|
/* Compute the word and byte modes. */
|
| 5739 |
|
|
|
| 5740 |
|
|
byte_mode = VOIDmode;
|
| 5741 |
|
|
word_mode = VOIDmode;
|
| 5742 |
|
|
double_mode = VOIDmode;
|
| 5743 |
|
|
|
| 5744 |
|
|
for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
|
| 5745 |
|
|
mode != VOIDmode;
|
| 5746 |
|
|
mode = GET_MODE_WIDER_MODE (mode))
|
| 5747 |
|
|
{
|
| 5748 |
|
|
if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
|
| 5749 |
|
|
&& byte_mode == VOIDmode)
|
| 5750 |
|
|
byte_mode = mode;
|
| 5751 |
|
|
|
| 5752 |
|
|
if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
|
| 5753 |
|
|
&& word_mode == VOIDmode)
|
| 5754 |
|
|
word_mode = mode;
|
| 5755 |
|
|
}
|
| 5756 |
|
|
|
| 5757 |
|
|
for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
|
| 5758 |
|
|
mode != VOIDmode;
|
| 5759 |
|
|
mode = GET_MODE_WIDER_MODE (mode))
|
| 5760 |
|
|
{
|
| 5761 |
|
|
if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
|
| 5762 |
|
|
&& double_mode == VOIDmode)
|
| 5763 |
|
|
double_mode = mode;
|
| 5764 |
|
|
}
|
| 5765 |
|
|
|
| 5766 |
|
|
ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
|
| 5767 |
|
|
|
| 5768 |
|
|
#ifdef INIT_EXPANDERS
|
| 5769 |
|
|
/* This is to initialize {init|mark|free}_machine_status before the first
|
| 5770 |
|
|
call to push_function_context_to. This is needed by the Chill front
|
| 5771 |
|
|
end which calls push_function_context_to before the first call to
|
| 5772 |
|
|
init_function_start. */
|
| 5773 |
|
|
INIT_EXPANDERS;
|
| 5774 |
|
|
#endif
|
| 5775 |
|
|
|
| 5776 |
|
|
/* Create the unique rtx's for certain rtx codes and operand values. */
|
| 5777 |
|
|
|
| 5778 |
|
|
/* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
|
| 5779 |
|
|
tries to use these variables. */
|
| 5780 |
|
|
for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
|
| 5781 |
|
|
const_int_rtx[i + MAX_SAVED_CONST_INT] =
|
| 5782 |
|
|
gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
|
| 5783 |
|
|
|
| 5784 |
|
|
if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
|
| 5785 |
|
|
&& STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
|
| 5786 |
|
|
const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
|
| 5787 |
|
|
else
|
| 5788 |
|
|
const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
|
| 5789 |
|
|
|
| 5790 |
|
|
REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
|
| 5791 |
|
|
REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
|
| 5792 |
|
|
REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
|
| 5793 |
|
|
|
| 5794 |
|
|
dconstm1 = dconst1;
|
| 5795 |
|
|
dconstm1.sign = 1;
|
| 5796 |
|
|
|
| 5797 |
|
|
dconsthalf = dconst1;
|
| 5798 |
|
|
SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
|
| 5799 |
|
|
|
| 5800 |
|
|
for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
|
| 5801 |
|
|
{
|
| 5802 |
|
|
const REAL_VALUE_TYPE *const r =
|
| 5803 |
|
|
(i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
|
| 5804 |
|
|
|
| 5805 |
|
|
for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
|
| 5806 |
|
|
mode != VOIDmode;
|
| 5807 |
|
|
mode = GET_MODE_WIDER_MODE (mode))
|
| 5808 |
|
|
const_tiny_rtx[i][(int) mode] =
|
| 5809 |
|
|
CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
|
| 5810 |
|
|
|
| 5811 |
|
|
for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
|
| 5812 |
|
|
mode != VOIDmode;
|
| 5813 |
|
|
mode = GET_MODE_WIDER_MODE (mode))
|
| 5814 |
|
|
const_tiny_rtx[i][(int) mode] =
|
| 5815 |
|
|
CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
|
| 5816 |
|
|
|
| 5817 |
|
|
const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
|
| 5818 |
|
|
|
| 5819 |
|
|
for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
|
| 5820 |
|
|
mode != VOIDmode;
|
| 5821 |
|
|
mode = GET_MODE_WIDER_MODE (mode))
|
| 5822 |
|
|
const_tiny_rtx[i][(int) mode] = GEN_INT (i);
|
| 5823 |
|
|
|
| 5824 |
|
|
for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
|
| 5825 |
|
|
mode != VOIDmode;
|
| 5826 |
|
|
mode = GET_MODE_WIDER_MODE (mode))
|
| 5827 |
|
|
const_tiny_rtx[i][(int) mode] = GEN_INT (i);
|
| 5828 |
|
|
}
|
| 5829 |
|
|
|
| 5830 |
|
|
for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
|
| 5831 |
|
|
mode != VOIDmode;
|
| 5832 |
|
|
mode = GET_MODE_WIDER_MODE (mode))
|
| 5833 |
|
|
{
|
| 5834 |
|
|
rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
|
| 5835 |
|
|
const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
|
| 5836 |
|
|
}
|
| 5837 |
|
|
|
| 5838 |
|
|
for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
|
| 5839 |
|
|
mode != VOIDmode;
|
| 5840 |
|
|
mode = GET_MODE_WIDER_MODE (mode))
|
| 5841 |
|
|
{
|
| 5842 |
|
|
rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
|
| 5843 |
|
|
const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
|
| 5844 |
|
|
}
|
| 5845 |
|
|
|
| 5846 |
|
|
for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
|
| 5847 |
|
|
mode != VOIDmode;
|
| 5848 |
|
|
mode = GET_MODE_WIDER_MODE (mode))
|
| 5849 |
|
|
{
|
| 5850 |
|
|
const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
|
| 5851 |
|
|
const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
|
| 5852 |
|
|
}
|
| 5853 |
|
|
|
| 5854 |
|
|
for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
|
| 5855 |
|
|
mode != VOIDmode;
|
| 5856 |
|
|
mode = GET_MODE_WIDER_MODE (mode))
|
| 5857 |
|
|
{
|
| 5858 |
|
|
const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
|
| 5859 |
|
|
const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
|
| 5860 |
|
|
}
|
| 5861 |
|
|
|
| 5862 |
|
|
for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
|
| 5863 |
|
|
mode != VOIDmode;
|
| 5864 |
|
|
mode = GET_MODE_WIDER_MODE (mode))
|
| 5865 |
|
|
{
|
| 5866 |
|
|
FCONST0(mode).data.high = 0;
|
| 5867 |
|
|
FCONST0(mode).data.low = 0;
|
| 5868 |
|
|
FCONST0(mode).mode = mode;
|
| 5869 |
|
|
const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
|
| 5870 |
|
|
FCONST0 (mode), mode);
|
| 5871 |
|
|
}
|
| 5872 |
|
|
|
| 5873 |
|
|
for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
|
| 5874 |
|
|
mode != VOIDmode;
|
| 5875 |
|
|
mode = GET_MODE_WIDER_MODE (mode))
|
| 5876 |
|
|
{
|
| 5877 |
|
|
FCONST0(mode).data.high = 0;
|
| 5878 |
|
|
FCONST0(mode).data.low = 0;
|
| 5879 |
|
|
FCONST0(mode).mode = mode;
|
| 5880 |
|
|
const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
|
| 5881 |
|
|
FCONST0 (mode), mode);
|
| 5882 |
|
|
}
|
| 5883 |
|
|
|
| 5884 |
|
|
for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
|
| 5885 |
|
|
mode != VOIDmode;
|
| 5886 |
|
|
mode = GET_MODE_WIDER_MODE (mode))
|
| 5887 |
|
|
{
|
| 5888 |
|
|
FCONST0(mode).data.high = 0;
|
| 5889 |
|
|
FCONST0(mode).data.low = 0;
|
| 5890 |
|
|
FCONST0(mode).mode = mode;
|
| 5891 |
|
|
const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
|
| 5892 |
|
|
FCONST0 (mode), mode);
|
| 5893 |
|
|
|
| 5894 |
|
|
/* We store the value 1. */
|
| 5895 |
|
|
FCONST1(mode).data.high = 0;
|
| 5896 |
|
|
FCONST1(mode).data.low = 0;
|
| 5897 |
|
|
FCONST1(mode).mode = mode;
|
| 5898 |
|
|
lshift_double (1, 0, GET_MODE_FBIT (mode),
|
| 5899 |
|
|
2 * HOST_BITS_PER_WIDE_INT,
|
| 5900 |
|
|
&FCONST1(mode).data.low,
|
| 5901 |
|
|
&FCONST1(mode).data.high,
|
| 5902 |
|
|
SIGNED_FIXED_POINT_MODE_P (mode));
|
| 5903 |
|
|
const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
|
| 5904 |
|
|
FCONST1 (mode), mode);
|
| 5905 |
|
|
}
|
| 5906 |
|
|
|
| 5907 |
|
|
for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
|
| 5908 |
|
|
mode != VOIDmode;
|
| 5909 |
|
|
mode = GET_MODE_WIDER_MODE (mode))
|
| 5910 |
|
|
{
|
| 5911 |
|
|
FCONST0(mode).data.high = 0;
|
| 5912 |
|
|
FCONST0(mode).data.low = 0;
|
| 5913 |
|
|
FCONST0(mode).mode = mode;
|
| 5914 |
|
|
const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
|
| 5915 |
|
|
FCONST0 (mode), mode);
|
| 5916 |
|
|
|
| 5917 |
|
|
/* We store the value 1. */
|
| 5918 |
|
|
FCONST1(mode).data.high = 0;
|
| 5919 |
|
|
FCONST1(mode).data.low = 0;
|
| 5920 |
|
|
FCONST1(mode).mode = mode;
|
| 5921 |
|
|
lshift_double (1, 0, GET_MODE_FBIT (mode),
|
| 5922 |
|
|
2 * HOST_BITS_PER_WIDE_INT,
|
| 5923 |
|
|
&FCONST1(mode).data.low,
|
| 5924 |
|
|
&FCONST1(mode).data.high,
|
| 5925 |
|
|
SIGNED_FIXED_POINT_MODE_P (mode));
|
| 5926 |
|
|
const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
|
| 5927 |
|
|
FCONST1 (mode), mode);
|
| 5928 |
|
|
}
|
| 5929 |
|
|
|
| 5930 |
|
|
for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
|
| 5931 |
|
|
mode != VOIDmode;
|
| 5932 |
|
|
mode = GET_MODE_WIDER_MODE (mode))
|
| 5933 |
|
|
{
|
| 5934 |
|
|
const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
|
| 5935 |
|
|
}
|
| 5936 |
|
|
|
| 5937 |
|
|
for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
|
| 5938 |
|
|
mode != VOIDmode;
|
| 5939 |
|
|
mode = GET_MODE_WIDER_MODE (mode))
|
| 5940 |
|
|
{
|
| 5941 |
|
|
const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
|
| 5942 |
|
|
}
|
| 5943 |
|
|
|
| 5944 |
|
|
for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
|
| 5945 |
|
|
mode != VOIDmode;
|
| 5946 |
|
|
mode = GET_MODE_WIDER_MODE (mode))
|
| 5947 |
|
|
{
|
| 5948 |
|
|
const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
|
| 5949 |
|
|
const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
|
| 5950 |
|
|
}
|
| 5951 |
|
|
|
| 5952 |
|
|
for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
|
| 5953 |
|
|
mode != VOIDmode;
|
| 5954 |
|
|
mode = GET_MODE_WIDER_MODE (mode))
|
| 5955 |
|
|
{
|
| 5956 |
|
|
const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
|
| 5957 |
|
|
const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
|
| 5958 |
|
|
}
|
| 5959 |
|
|
|
| 5960 |
|
|
for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
|
| 5961 |
|
|
if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
|
| 5962 |
|
|
const_tiny_rtx[0][i] = const0_rtx;
|
| 5963 |
|
|
|
| 5964 |
|
|
const_tiny_rtx[0][(int) BImode] = const0_rtx;
|
| 5965 |
|
|
if (STORE_FLAG_VALUE == 1)
|
| 5966 |
|
|
const_tiny_rtx[1][(int) BImode] = const1_rtx;
|
| 5967 |
|
|
}
|
| 5968 |
|
|
|
| 5969 |
|
|
/* Produce exact duplicate of insn INSN after AFTER.
|
| 5970 |
|
|
Care updating of libcall regions if present. */
|
| 5971 |
|
|
|
| 5972 |
|
|
rtx
|
| 5973 |
|
|
emit_copy_of_insn_after (rtx insn, rtx after)
|
| 5974 |
|
|
{
|
| 5975 |
|
|
rtx new_rtx, link;
|
| 5976 |
|
|
|
| 5977 |
|
|
switch (GET_CODE (insn))
|
| 5978 |
|
|
{
|
| 5979 |
|
|
case INSN:
|
| 5980 |
|
|
new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
|
| 5981 |
|
|
break;
|
| 5982 |
|
|
|
| 5983 |
|
|
case JUMP_INSN:
|
| 5984 |
|
|
new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
|
| 5985 |
|
|
break;
|
| 5986 |
|
|
|
| 5987 |
|
|
case DEBUG_INSN:
|
| 5988 |
|
|
new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
|
| 5989 |
|
|
break;
|
| 5990 |
|
|
|
| 5991 |
|
|
case CALL_INSN:
|
| 5992 |
|
|
new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
|
| 5993 |
|
|
if (CALL_INSN_FUNCTION_USAGE (insn))
|
| 5994 |
|
|
CALL_INSN_FUNCTION_USAGE (new_rtx)
|
| 5995 |
|
|
= copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
|
| 5996 |
|
|
SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
|
| 5997 |
|
|
RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
|
| 5998 |
|
|
RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
|
| 5999 |
|
|
RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
|
| 6000 |
|
|
= RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
|
| 6001 |
|
|
break;
|
| 6002 |
|
|
|
| 6003 |
|
|
default:
|
| 6004 |
|
|
gcc_unreachable ();
|
| 6005 |
|
|
}
|
| 6006 |
|
|
|
| 6007 |
|
|
/* Update LABEL_NUSES. */
|
| 6008 |
|
|
mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
|
| 6009 |
|
|
|
| 6010 |
|
|
INSN_LOCATOR (new_rtx) = INSN_LOCATOR (insn);
|
| 6011 |
|
|
|
| 6012 |
|
|
/* If the old insn is frame related, then so is the new one. This is
|
| 6013 |
|
|
primarily needed for IA-64 unwind info which marks epilogue insns,
|
| 6014 |
|
|
which may be duplicated by the basic block reordering code. */
|
| 6015 |
|
|
RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
|
| 6016 |
|
|
|
| 6017 |
|
|
/* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
|
| 6018 |
|
|
will make them. REG_LABEL_TARGETs are created there too, but are
|
| 6019 |
|
|
supposed to be sticky, so we copy them. */
|
| 6020 |
|
|
for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
|
| 6021 |
|
|
if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
|
| 6022 |
|
|
{
|
| 6023 |
|
|
if (GET_CODE (link) == EXPR_LIST)
|
| 6024 |
|
|
add_reg_note (new_rtx, REG_NOTE_KIND (link),
|
| 6025 |
|
|
copy_insn_1 (XEXP (link, 0)));
|
| 6026 |
|
|
else
|
| 6027 |
|
|
add_reg_note (new_rtx, REG_NOTE_KIND (link), XEXP (link, 0));
|
| 6028 |
|
|
}
|
| 6029 |
|
|
|
| 6030 |
|
|
INSN_CODE (new_rtx) = INSN_CODE (insn);
|
| 6031 |
|
|
return new_rtx;
|
| 6032 |
|
|
}
|
| 6033 |
|
|
|
| 6034 |
|
|
static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
|
| 6035 |
|
|
rtx
|
| 6036 |
|
|
gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
|
| 6037 |
|
|
{
|
| 6038 |
|
|
if (hard_reg_clobbers[mode][regno])
|
| 6039 |
|
|
return hard_reg_clobbers[mode][regno];
|
| 6040 |
|
|
else
|
| 6041 |
|
|
return (hard_reg_clobbers[mode][regno] =
|
| 6042 |
|
|
gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
|
| 6043 |
|
|
}
|
| 6044 |
|
|
|
| 6045 |
|
|
#include "gt-emit-rtl.h"
|