| 1 |
684 |
jeremybenn |
/* Definitions for code generation pass of GNU compiler.
|
| 2 |
|
|
Copyright (C) 1987, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
|
| 3 |
|
|
1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
|
| 4 |
|
|
Free Software Foundation, Inc.
|
| 5 |
|
|
|
| 6 |
|
|
This file is part of GCC.
|
| 7 |
|
|
|
| 8 |
|
|
GCC is free software; you can redistribute it and/or modify it under
|
| 9 |
|
|
the terms of the GNU General Public License as published by the Free
|
| 10 |
|
|
Software Foundation; either version 3, or (at your option) any later
|
| 11 |
|
|
version.
|
| 12 |
|
|
|
| 13 |
|
|
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
|
| 14 |
|
|
WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
| 15 |
|
|
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
| 16 |
|
|
for more details.
|
| 17 |
|
|
|
| 18 |
|
|
You should have received a copy of the GNU General Public License
|
| 19 |
|
|
along with GCC; see the file COPYING3. If not see
|
| 20 |
|
|
<http://www.gnu.org/licenses/>. */
|
| 21 |
|
|
|
| 22 |
|
|
#ifndef GCC_EXPR_H
|
| 23 |
|
|
#define GCC_EXPR_H
|
| 24 |
|
|
|
| 25 |
|
|
/* For inhibit_defer_pop */
|
| 26 |
|
|
#include "function.h"
|
| 27 |
|
|
/* For XEXP, GEN_INT, rtx_code */
|
| 28 |
|
|
#include "rtl.h"
|
| 29 |
|
|
/* For optimize_size */
|
| 30 |
|
|
#include "flags.h"
|
| 31 |
|
|
/* For host_integerp, tree_low_cst, fold_convert, size_binop, ssize_int,
|
| 32 |
|
|
TREE_CODE, TYPE_SIZE, int_size_in_bytes, */
|
| 33 |
|
|
#include "tree.h"
|
| 34 |
|
|
/* For GET_MODE_BITSIZE, word_mode */
|
| 35 |
|
|
#include "machmode.h"
|
| 36 |
|
|
|
| 37 |
|
|
/* This is the 4th arg to `expand_expr'.
|
| 38 |
|
|
EXPAND_STACK_PARM means we are possibly expanding a call param onto
|
| 39 |
|
|
the stack.
|
| 40 |
|
|
EXPAND_SUM means it is ok to return a PLUS rtx or MULT rtx.
|
| 41 |
|
|
EXPAND_INITIALIZER is similar but also record any labels on forced_labels.
|
| 42 |
|
|
EXPAND_CONST_ADDRESS means it is ok to return a MEM whose address
|
| 43 |
|
|
is a constant that is not a legitimate address.
|
| 44 |
|
|
EXPAND_WRITE means we are only going to write to the resulting rtx.
|
| 45 |
|
|
EXPAND_MEMORY means we are interested in a memory result, even if
|
| 46 |
|
|
the memory is constant and we could have propagated a constant value. */
|
| 47 |
|
|
enum expand_modifier {EXPAND_NORMAL = 0, EXPAND_STACK_PARM, EXPAND_SUM,
|
| 48 |
|
|
EXPAND_CONST_ADDRESS, EXPAND_INITIALIZER, EXPAND_WRITE,
|
| 49 |
|
|
EXPAND_MEMORY};
|
| 50 |
|
|
|
| 51 |
|
|
/* Prevent the compiler from deferring stack pops. See
|
| 52 |
|
|
inhibit_defer_pop for more information. */
|
| 53 |
|
|
#define NO_DEFER_POP (inhibit_defer_pop += 1)
|
| 54 |
|
|
|
| 55 |
|
|
/* Allow the compiler to defer stack pops. See inhibit_defer_pop for
|
| 56 |
|
|
more information. */
|
| 57 |
|
|
#define OK_DEFER_POP (inhibit_defer_pop -= 1)
|
| 58 |
|
|
|
| 59 |
|
|
enum direction {none, upward, downward};
|
| 60 |
|
|
|
| 61 |
|
|
/* Structure to record the size of a sequence of arguments
|
| 62 |
|
|
as the sum of a tree-expression and a constant. This structure is
|
| 63 |
|
|
also used to store offsets from the stack, which might be negative,
|
| 64 |
|
|
so the variable part must be ssizetype, not sizetype. */
|
| 65 |
|
|
|
| 66 |
|
|
struct args_size
|
| 67 |
|
|
{
|
| 68 |
|
|
HOST_WIDE_INT constant;
|
| 69 |
|
|
tree var;
|
| 70 |
|
|
};
|
| 71 |
|
|
|
| 72 |
|
|
/* Package up various arg related fields of struct args for
|
| 73 |
|
|
locate_and_pad_parm. */
|
| 74 |
|
|
struct locate_and_pad_arg_data
|
| 75 |
|
|
{
|
| 76 |
|
|
/* Size of this argument on the stack, rounded up for any padding it
|
| 77 |
|
|
gets. If REG_PARM_STACK_SPACE is defined, then register parms are
|
| 78 |
|
|
counted here, otherwise they aren't. */
|
| 79 |
|
|
struct args_size size;
|
| 80 |
|
|
/* Offset of this argument from beginning of stack-args. */
|
| 81 |
|
|
struct args_size offset;
|
| 82 |
|
|
/* Offset to the start of the stack slot. Different from OFFSET
|
| 83 |
|
|
if this arg pads downward. */
|
| 84 |
|
|
struct args_size slot_offset;
|
| 85 |
|
|
/* The amount that the stack pointer needs to be adjusted to
|
| 86 |
|
|
force alignment for the next argument. */
|
| 87 |
|
|
struct args_size alignment_pad;
|
| 88 |
|
|
/* Which way we should pad this arg. */
|
| 89 |
|
|
enum direction where_pad;
|
| 90 |
|
|
/* slot_offset is at least this aligned. */
|
| 91 |
|
|
unsigned int boundary;
|
| 92 |
|
|
};
|
| 93 |
|
|
|
| 94 |
|
|
/* Add the value of the tree INC to the `struct args_size' TO. */
|
| 95 |
|
|
|
| 96 |
|
|
#define ADD_PARM_SIZE(TO, INC) \
|
| 97 |
|
|
do { \
|
| 98 |
|
|
tree inc = (INC); \
|
| 99 |
|
|
if (host_integerp (inc, 0)) \
|
| 100 |
|
|
(TO).constant += tree_low_cst (inc, 0); \
|
| 101 |
|
|
else if ((TO).var == 0) \
|
| 102 |
|
|
(TO).var = fold_convert (ssizetype, inc); \
|
| 103 |
|
|
else \
|
| 104 |
|
|
(TO).var = size_binop (PLUS_EXPR, (TO).var, \
|
| 105 |
|
|
fold_convert (ssizetype, inc)); \
|
| 106 |
|
|
} while (0)
|
| 107 |
|
|
|
| 108 |
|
|
#define SUB_PARM_SIZE(TO, DEC) \
|
| 109 |
|
|
do { \
|
| 110 |
|
|
tree dec = (DEC); \
|
| 111 |
|
|
if (host_integerp (dec, 0)) \
|
| 112 |
|
|
(TO).constant -= tree_low_cst (dec, 0); \
|
| 113 |
|
|
else if ((TO).var == 0) \
|
| 114 |
|
|
(TO).var = size_binop (MINUS_EXPR, ssize_int (0), \
|
| 115 |
|
|
fold_convert (ssizetype, dec)); \
|
| 116 |
|
|
else \
|
| 117 |
|
|
(TO).var = size_binop (MINUS_EXPR, (TO).var, \
|
| 118 |
|
|
fold_convert (ssizetype, dec)); \
|
| 119 |
|
|
} while (0)
|
| 120 |
|
|
|
| 121 |
|
|
/* Convert the implicit sum in a `struct args_size' into a tree
|
| 122 |
|
|
of type ssizetype. */
|
| 123 |
|
|
#define ARGS_SIZE_TREE(SIZE) \
|
| 124 |
|
|
((SIZE).var == 0 ? ssize_int ((SIZE).constant) \
|
| 125 |
|
|
: size_binop (PLUS_EXPR, fold_convert (ssizetype, (SIZE).var), \
|
| 126 |
|
|
ssize_int ((SIZE).constant)))
|
| 127 |
|
|
|
| 128 |
|
|
/* Convert the implicit sum in a `struct args_size' into an rtx. */
|
| 129 |
|
|
#define ARGS_SIZE_RTX(SIZE) \
|
| 130 |
|
|
((SIZE).var == 0 ? GEN_INT ((SIZE).constant) \
|
| 131 |
|
|
: expand_normal (ARGS_SIZE_TREE (SIZE)))
|
| 132 |
|
|
|
| 133 |
|
|
|
| 134 |
|
|
/* This structure is used to pass around information about exploded
|
| 135 |
|
|
unary, binary and trinary expressions between expand_expr_real_1 and
|
| 136 |
|
|
friends. */
|
| 137 |
|
|
typedef struct separate_ops
|
| 138 |
|
|
{
|
| 139 |
|
|
enum tree_code code;
|
| 140 |
|
|
location_t location;
|
| 141 |
|
|
tree type;
|
| 142 |
|
|
tree op0, op1, op2;
|
| 143 |
|
|
} *sepops;
|
| 144 |
|
|
|
| 145 |
|
|
/* Functions from optabs.c, commonly used, and without need for the optabs
|
| 146 |
|
|
tables: */
|
| 147 |
|
|
|
| 148 |
|
|
/* Passed to expand_simple_binop and expand_binop to say which options
|
| 149 |
|
|
to try to use if the requested operation can't be open-coded on the
|
| 150 |
|
|
requisite mode. Either OPTAB_LIB or OPTAB_LIB_WIDEN says try using
|
| 151 |
|
|
a library call. Either OPTAB_WIDEN or OPTAB_LIB_WIDEN says try
|
| 152 |
|
|
using a wider mode. OPTAB_MUST_WIDEN says try widening and don't
|
| 153 |
|
|
try anything else. */
|
| 154 |
|
|
|
| 155 |
|
|
enum optab_methods
|
| 156 |
|
|
{
|
| 157 |
|
|
OPTAB_DIRECT,
|
| 158 |
|
|
OPTAB_LIB,
|
| 159 |
|
|
OPTAB_WIDEN,
|
| 160 |
|
|
OPTAB_LIB_WIDEN,
|
| 161 |
|
|
OPTAB_MUST_WIDEN
|
| 162 |
|
|
};
|
| 163 |
|
|
|
| 164 |
|
|
/* Generate code for a simple binary or unary operation. "Simple" in
|
| 165 |
|
|
this case means "can be unambiguously described by a (mode, code)
|
| 166 |
|
|
pair and mapped to a single optab." */
|
| 167 |
|
|
extern rtx expand_simple_binop (enum machine_mode, enum rtx_code, rtx,
|
| 168 |
|
|
rtx, rtx, int, enum optab_methods);
|
| 169 |
|
|
extern rtx expand_simple_unop (enum machine_mode, enum rtx_code, rtx, rtx,
|
| 170 |
|
|
int);
|
| 171 |
|
|
|
| 172 |
|
|
/* Report whether the machine description contains an insn which can
|
| 173 |
|
|
perform the operation described by CODE and MODE. */
|
| 174 |
|
|
extern int have_insn_for (enum rtx_code, enum machine_mode);
|
| 175 |
|
|
|
| 176 |
|
|
/* Emit code to make a call to a constant function or a library call. */
|
| 177 |
|
|
extern void emit_libcall_block (rtx, rtx, rtx, rtx);
|
| 178 |
|
|
|
| 179 |
|
|
/* Create but don't emit one rtl instruction to perform certain operations.
|
| 180 |
|
|
Modes must match; operands must meet the operation's predicates.
|
| 181 |
|
|
Likewise for subtraction and for just copying. */
|
| 182 |
|
|
extern rtx gen_add2_insn (rtx, rtx);
|
| 183 |
|
|
extern rtx gen_add3_insn (rtx, rtx, rtx);
|
| 184 |
|
|
extern rtx gen_sub2_insn (rtx, rtx);
|
| 185 |
|
|
extern rtx gen_sub3_insn (rtx, rtx, rtx);
|
| 186 |
|
|
extern rtx gen_move_insn (rtx, rtx);
|
| 187 |
|
|
extern int have_add2_insn (rtx, rtx);
|
| 188 |
|
|
extern int have_sub2_insn (rtx, rtx);
|
| 189 |
|
|
|
| 190 |
|
|
/* Emit a pair of rtl insns to compare two rtx's and to jump
|
| 191 |
|
|
to a label if the comparison is true. */
|
| 192 |
|
|
extern void emit_cmp_and_jump_insns (rtx, rtx, enum rtx_code, rtx,
|
| 193 |
|
|
enum machine_mode, int, rtx);
|
| 194 |
|
|
|
| 195 |
|
|
/* Generate code to indirectly jump to a location given in the rtx LOC. */
|
| 196 |
|
|
extern void emit_indirect_jump (rtx);
|
| 197 |
|
|
|
| 198 |
|
|
/* Generate a conditional trap instruction. */
|
| 199 |
|
|
extern rtx gen_cond_trap (enum rtx_code, rtx, rtx, rtx);
|
| 200 |
|
|
|
| 201 |
|
|
#include "insn-config.h"
|
| 202 |
|
|
|
| 203 |
|
|
#ifdef HAVE_conditional_move
|
| 204 |
|
|
/* Emit a conditional move operation. */
|
| 205 |
|
|
rtx emit_conditional_move (rtx, enum rtx_code, rtx, rtx, enum machine_mode,
|
| 206 |
|
|
rtx, rtx, enum machine_mode, int);
|
| 207 |
|
|
|
| 208 |
|
|
/* Return nonzero if the conditional move is supported. */
|
| 209 |
|
|
int can_conditionally_move_p (enum machine_mode mode);
|
| 210 |
|
|
|
| 211 |
|
|
#endif
|
| 212 |
|
|
rtx emit_conditional_add (rtx, enum rtx_code, rtx, rtx, enum machine_mode,
|
| 213 |
|
|
rtx, rtx, enum machine_mode, int);
|
| 214 |
|
|
|
| 215 |
|
|
rtx expand_sync_operation (rtx, rtx, enum rtx_code);
|
| 216 |
|
|
rtx expand_sync_fetch_operation (rtx, rtx, enum rtx_code, bool, rtx);
|
| 217 |
|
|
rtx expand_sync_lock_test_and_set (rtx, rtx, rtx);
|
| 218 |
|
|
|
| 219 |
|
|
rtx expand_atomic_exchange (rtx, rtx, rtx, enum memmodel);
|
| 220 |
|
|
rtx expand_atomic_load (rtx, rtx, enum memmodel);
|
| 221 |
|
|
rtx expand_atomic_store (rtx, rtx, enum memmodel, bool);
|
| 222 |
|
|
rtx expand_atomic_fetch_op (rtx, rtx, rtx, enum rtx_code, enum memmodel,
|
| 223 |
|
|
bool);
|
| 224 |
|
|
rtx expand_atomic_test_and_set (rtx, rtx, enum memmodel);
|
| 225 |
|
|
rtx expand_atomic_clear (rtx, enum memmodel);
|
| 226 |
|
|
void expand_atomic_thread_fence (enum memmodel);
|
| 227 |
|
|
void expand_atomic_signal_fence (enum memmodel);
|
| 228 |
|
|
|
| 229 |
|
|
|
| 230 |
|
|
/* Functions from expmed.c: */
|
| 231 |
|
|
|
| 232 |
|
|
/* Arguments MODE, RTX: return an rtx for the negation of that value.
|
| 233 |
|
|
May emit insns. */
|
| 234 |
|
|
extern rtx negate_rtx (enum machine_mode, rtx);
|
| 235 |
|
|
|
| 236 |
|
|
/* Expand a logical AND operation. */
|
| 237 |
|
|
extern rtx expand_and (enum machine_mode, rtx, rtx, rtx);
|
| 238 |
|
|
|
| 239 |
|
|
/* Emit a store-flag operation. */
|
| 240 |
|
|
extern rtx emit_store_flag (rtx, enum rtx_code, rtx, rtx, enum machine_mode,
|
| 241 |
|
|
int, int);
|
| 242 |
|
|
|
| 243 |
|
|
/* Like emit_store_flag, but always succeeds. */
|
| 244 |
|
|
extern rtx emit_store_flag_force (rtx, enum rtx_code, rtx, rtx,
|
| 245 |
|
|
enum machine_mode, int, int);
|
| 246 |
|
|
|
| 247 |
|
|
/* Functions from builtins.c: */
|
| 248 |
|
|
extern rtx expand_builtin (tree, rtx, rtx, enum machine_mode, int);
|
| 249 |
|
|
extern tree std_build_builtin_va_list (void);
|
| 250 |
|
|
extern tree std_fn_abi_va_list (tree);
|
| 251 |
|
|
extern tree std_canonical_va_list_type (tree);
|
| 252 |
|
|
|
| 253 |
|
|
extern void std_expand_builtin_va_start (tree, rtx);
|
| 254 |
|
|
extern rtx default_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
|
| 255 |
|
|
extern void expand_builtin_setjmp_setup (rtx, rtx);
|
| 256 |
|
|
extern void expand_builtin_setjmp_receiver (rtx);
|
| 257 |
|
|
extern rtx expand_builtin_saveregs (void);
|
| 258 |
|
|
extern void expand_builtin_trap (void);
|
| 259 |
|
|
extern rtx builtin_strncpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
|
| 260 |
|
|
|
| 261 |
|
|
/* Functions from expr.c: */
|
| 262 |
|
|
|
| 263 |
|
|
/* This is run during target initialization to set up which modes can be
|
| 264 |
|
|
used directly in memory and to initialize the block move optab. */
|
| 265 |
|
|
extern void init_expr_target (void);
|
| 266 |
|
|
|
| 267 |
|
|
/* This is run at the start of compiling a function. */
|
| 268 |
|
|
extern void init_expr (void);
|
| 269 |
|
|
|
| 270 |
|
|
/* Emit some rtl insns to move data between rtx's, converting machine modes.
|
| 271 |
|
|
Both modes must be floating or both fixed. */
|
| 272 |
|
|
extern void convert_move (rtx, rtx, int);
|
| 273 |
|
|
|
| 274 |
|
|
/* Convert an rtx to specified machine mode and return the result. */
|
| 275 |
|
|
extern rtx convert_to_mode (enum machine_mode, rtx, int);
|
| 276 |
|
|
|
| 277 |
|
|
/* Convert an rtx to MODE from OLDMODE and return the result. */
|
| 278 |
|
|
extern rtx convert_modes (enum machine_mode, enum machine_mode, rtx, int);
|
| 279 |
|
|
|
| 280 |
|
|
/* Emit code to move a block Y to a block X. */
|
| 281 |
|
|
|
| 282 |
|
|
enum block_op_methods
|
| 283 |
|
|
{
|
| 284 |
|
|
BLOCK_OP_NORMAL,
|
| 285 |
|
|
BLOCK_OP_NO_LIBCALL,
|
| 286 |
|
|
BLOCK_OP_CALL_PARM,
|
| 287 |
|
|
/* Like BLOCK_OP_NORMAL, but the libcall can be tail call optimized. */
|
| 288 |
|
|
BLOCK_OP_TAILCALL
|
| 289 |
|
|
};
|
| 290 |
|
|
|
| 291 |
|
|
extern GTY(()) tree block_clear_fn;
|
| 292 |
|
|
extern void init_block_move_fn (const char *);
|
| 293 |
|
|
extern void init_block_clear_fn (const char *);
|
| 294 |
|
|
|
| 295 |
|
|
extern rtx emit_block_move (rtx, rtx, rtx, enum block_op_methods);
|
| 296 |
|
|
extern rtx emit_block_move_via_libcall (rtx, rtx, rtx, bool);
|
| 297 |
|
|
extern rtx emit_block_move_hints (rtx, rtx, rtx, enum block_op_methods,
|
| 298 |
|
|
unsigned int, HOST_WIDE_INT);
|
| 299 |
|
|
extern bool emit_storent_insn (rtx to, rtx from);
|
| 300 |
|
|
|
| 301 |
|
|
/* Copy all or part of a value X into registers starting at REGNO.
|
| 302 |
|
|
The number of registers to be filled is NREGS. */
|
| 303 |
|
|
extern void move_block_to_reg (int, rtx, int, enum machine_mode);
|
| 304 |
|
|
|
| 305 |
|
|
/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
|
| 306 |
|
|
The number of registers to be filled is NREGS. */
|
| 307 |
|
|
extern void move_block_from_reg (int, rtx, int);
|
| 308 |
|
|
|
| 309 |
|
|
/* Generate a non-consecutive group of registers represented by a PARALLEL. */
|
| 310 |
|
|
extern rtx gen_group_rtx (rtx);
|
| 311 |
|
|
|
| 312 |
|
|
/* Load a BLKmode value into non-consecutive registers represented by a
|
| 313 |
|
|
PARALLEL. */
|
| 314 |
|
|
extern void emit_group_load (rtx, rtx, tree, int);
|
| 315 |
|
|
|
| 316 |
|
|
/* Similarly, but load into new temporaries. */
|
| 317 |
|
|
extern rtx emit_group_load_into_temps (rtx, rtx, tree, int);
|
| 318 |
|
|
|
| 319 |
|
|
/* Move a non-consecutive group of registers represented by a PARALLEL into
|
| 320 |
|
|
a non-consecutive group of registers represented by a PARALLEL. */
|
| 321 |
|
|
extern void emit_group_move (rtx, rtx);
|
| 322 |
|
|
|
| 323 |
|
|
/* Move a group of registers represented by a PARALLEL into pseudos. */
|
| 324 |
|
|
extern rtx emit_group_move_into_temps (rtx);
|
| 325 |
|
|
|
| 326 |
|
|
/* Store a BLKmode value from non-consecutive registers represented by a
|
| 327 |
|
|
PARALLEL. */
|
| 328 |
|
|
extern void emit_group_store (rtx, rtx, tree, int);
|
| 329 |
|
|
|
| 330 |
|
|
/* Copy BLKmode object from a set of registers. */
|
| 331 |
|
|
extern rtx copy_blkmode_from_reg (rtx, rtx, tree);
|
| 332 |
|
|
|
| 333 |
|
|
/* Mark REG as holding a parameter for the next CALL_INSN.
|
| 334 |
|
|
Mode is TYPE_MODE of the non-promoted parameter, or VOIDmode. */
|
| 335 |
|
|
extern void use_reg_mode (rtx *, rtx, enum machine_mode);
|
| 336 |
|
|
|
| 337 |
|
|
extern rtx copy_blkmode_to_reg (enum machine_mode, tree);
|
| 338 |
|
|
|
| 339 |
|
|
/* Mark REG as holding a parameter for the next CALL_INSN. */
|
| 340 |
|
|
static inline void
|
| 341 |
|
|
use_reg (rtx *fusage, rtx reg)
|
| 342 |
|
|
{
|
| 343 |
|
|
use_reg_mode (fusage, reg, VOIDmode);
|
| 344 |
|
|
}
|
| 345 |
|
|
|
| 346 |
|
|
/* Mark NREGS consecutive regs, starting at REGNO, as holding parameters
|
| 347 |
|
|
for the next CALL_INSN. */
|
| 348 |
|
|
extern void use_regs (rtx *, int, int);
|
| 349 |
|
|
|
| 350 |
|
|
/* Mark a PARALLEL as holding a parameter for the next CALL_INSN. */
|
| 351 |
|
|
extern void use_group_regs (rtx *, rtx);
|
| 352 |
|
|
|
| 353 |
|
|
/* Write zeros through the storage of OBJECT.
|
| 354 |
|
|
If OBJECT has BLKmode, SIZE is its length in bytes. */
|
| 355 |
|
|
extern rtx clear_storage (rtx, rtx, enum block_op_methods);
|
| 356 |
|
|
extern rtx clear_storage_hints (rtx, rtx, enum block_op_methods,
|
| 357 |
|
|
unsigned int, HOST_WIDE_INT);
|
| 358 |
|
|
/* The same, but always output an library call. */
|
| 359 |
|
|
rtx set_storage_via_libcall (rtx, rtx, rtx, bool);
|
| 360 |
|
|
|
| 361 |
|
|
/* Expand a setmem pattern; return true if successful. */
|
| 362 |
|
|
extern bool set_storage_via_setmem (rtx, rtx, rtx, unsigned int,
|
| 363 |
|
|
unsigned int, HOST_WIDE_INT);
|
| 364 |
|
|
|
| 365 |
|
|
/* Determine whether the LEN bytes can be moved by using several move
|
| 366 |
|
|
instructions. Return nonzero if a call to move_by_pieces should
|
| 367 |
|
|
succeed. */
|
| 368 |
|
|
extern int can_move_by_pieces (unsigned HOST_WIDE_INT, unsigned int);
|
| 369 |
|
|
|
| 370 |
|
|
extern unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
|
| 371 |
|
|
unsigned int,
|
| 372 |
|
|
unsigned int);
|
| 373 |
|
|
|
| 374 |
|
|
/* Return nonzero if it is desirable to store LEN bytes generated by
|
| 375 |
|
|
CONSTFUN with several move instructions by store_by_pieces
|
| 376 |
|
|
function. CONSTFUNDATA is a pointer which will be passed as argument
|
| 377 |
|
|
in every CONSTFUN call.
|
| 378 |
|
|
ALIGN is maximum alignment we can assume.
|
| 379 |
|
|
MEMSETP is true if this is a real memset/bzero, not a copy
|
| 380 |
|
|
of a const string. */
|
| 381 |
|
|
extern int can_store_by_pieces (unsigned HOST_WIDE_INT,
|
| 382 |
|
|
rtx (*) (void *, HOST_WIDE_INT,
|
| 383 |
|
|
enum machine_mode),
|
| 384 |
|
|
void *, unsigned int, bool);
|
| 385 |
|
|
|
| 386 |
|
|
/* Generate several move instructions to store LEN bytes generated by
|
| 387 |
|
|
CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
|
| 388 |
|
|
pointer which will be passed as argument in every CONSTFUN call.
|
| 389 |
|
|
ALIGN is maximum alignment we can assume.
|
| 390 |
|
|
MEMSETP is true if this is a real memset/bzero, not a copy.
|
| 391 |
|
|
Returns TO + LEN. */
|
| 392 |
|
|
extern rtx store_by_pieces (rtx, unsigned HOST_WIDE_INT,
|
| 393 |
|
|
rtx (*) (void *, HOST_WIDE_INT, enum machine_mode),
|
| 394 |
|
|
void *, unsigned int, bool, int);
|
| 395 |
|
|
|
| 396 |
|
|
/* Emit insns to set X from Y. */
|
| 397 |
|
|
extern rtx emit_move_insn (rtx, rtx);
|
| 398 |
|
|
|
| 399 |
|
|
/* Emit insns to set X from Y, with no frills. */
|
| 400 |
|
|
extern rtx emit_move_insn_1 (rtx, rtx);
|
| 401 |
|
|
|
| 402 |
|
|
extern rtx emit_move_complex_push (enum machine_mode, rtx, rtx);
|
| 403 |
|
|
extern rtx emit_move_complex_parts (rtx, rtx);
|
| 404 |
|
|
|
| 405 |
|
|
/* Push a block of length SIZE (perhaps variable)
|
| 406 |
|
|
and return an rtx to address the beginning of the block. */
|
| 407 |
|
|
extern rtx push_block (rtx, int, int);
|
| 408 |
|
|
|
| 409 |
|
|
/* Generate code to push something onto the stack, given its mode and type. */
|
| 410 |
|
|
extern void emit_push_insn (rtx, enum machine_mode, tree, rtx, unsigned int,
|
| 411 |
|
|
int, rtx, int, rtx, rtx, int, rtx);
|
| 412 |
|
|
|
| 413 |
|
|
/* Expand an assignment that stores the value of FROM into TO. */
|
| 414 |
|
|
extern void expand_assignment (tree, tree, bool);
|
| 415 |
|
|
|
| 416 |
|
|
/* Generate code for computing expression EXP,
|
| 417 |
|
|
and storing the value into TARGET.
|
| 418 |
|
|
If SUGGEST_REG is nonzero, copy the value through a register
|
| 419 |
|
|
and return that register, if that is possible. */
|
| 420 |
|
|
extern rtx store_expr (tree, rtx, int, bool);
|
| 421 |
|
|
|
| 422 |
|
|
/* Given an rtx that may include add and multiply operations,
|
| 423 |
|
|
generate them as insns and return a pseudo-reg containing the value.
|
| 424 |
|
|
Useful after calling expand_expr with 1 as sum_ok. */
|
| 425 |
|
|
extern rtx force_operand (rtx, rtx);
|
| 426 |
|
|
|
| 427 |
|
|
/* Work horses for expand_expr. */
|
| 428 |
|
|
extern rtx expand_expr_real (tree, rtx, enum machine_mode,
|
| 429 |
|
|
enum expand_modifier, rtx *);
|
| 430 |
|
|
extern rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
|
| 431 |
|
|
enum expand_modifier, rtx *);
|
| 432 |
|
|
extern rtx expand_expr_real_2 (sepops, rtx, enum machine_mode,
|
| 433 |
|
|
enum expand_modifier);
|
| 434 |
|
|
|
| 435 |
|
|
/* Generate code for computing expression EXP.
|
| 436 |
|
|
An rtx for the computed value is returned. The value is never null.
|
| 437 |
|
|
In the case of a void EXP, const0_rtx is returned. */
|
| 438 |
|
|
static inline rtx
|
| 439 |
|
|
expand_expr (tree exp, rtx target, enum machine_mode mode,
|
| 440 |
|
|
enum expand_modifier modifier)
|
| 441 |
|
|
{
|
| 442 |
|
|
return expand_expr_real (exp, target, mode, modifier, NULL);
|
| 443 |
|
|
}
|
| 444 |
|
|
|
| 445 |
|
|
static inline rtx
|
| 446 |
|
|
expand_normal (tree exp)
|
| 447 |
|
|
{
|
| 448 |
|
|
return expand_expr_real (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL, NULL);
|
| 449 |
|
|
}
|
| 450 |
|
|
|
| 451 |
|
|
/* At the start of a function, record that we have no previously-pushed
|
| 452 |
|
|
arguments waiting to be popped. */
|
| 453 |
|
|
extern void init_pending_stack_adjust (void);
|
| 454 |
|
|
|
| 455 |
|
|
/* Discard any pending stack adjustment. */
|
| 456 |
|
|
extern void discard_pending_stack_adjust (void);
|
| 457 |
|
|
|
| 458 |
|
|
/* When exiting from function, if safe, clear out any pending stack adjust
|
| 459 |
|
|
so the adjustment won't get done. */
|
| 460 |
|
|
extern void clear_pending_stack_adjust (void);
|
| 461 |
|
|
|
| 462 |
|
|
/* Pop any previously-pushed arguments that have not been popped yet. */
|
| 463 |
|
|
extern void do_pending_stack_adjust (void);
|
| 464 |
|
|
|
| 465 |
|
|
/* Return the tree node and offset if a given argument corresponds to
|
| 466 |
|
|
a string constant. */
|
| 467 |
|
|
extern tree string_constant (tree, tree *);
|
| 468 |
|
|
|
| 469 |
|
|
/* Generate code to evaluate EXP and jump to LABEL if the value is zero. */
|
| 470 |
|
|
extern void jumpifnot (tree, rtx, int);
|
| 471 |
|
|
extern void jumpifnot_1 (enum tree_code, tree, tree, rtx, int);
|
| 472 |
|
|
|
| 473 |
|
|
/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
|
| 474 |
|
|
extern void jumpif (tree, rtx, int);
|
| 475 |
|
|
extern void jumpif_1 (enum tree_code, tree, tree, rtx, int);
|
| 476 |
|
|
|
| 477 |
|
|
/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
|
| 478 |
|
|
the result is zero, or IF_TRUE_LABEL if the result is one. */
|
| 479 |
|
|
extern void do_jump (tree, rtx, rtx, int);
|
| 480 |
|
|
extern void do_jump_1 (enum tree_code, tree, tree, rtx, rtx, int);
|
| 481 |
|
|
|
| 482 |
|
|
extern void do_compare_rtx_and_jump (rtx, rtx, enum rtx_code, int,
|
| 483 |
|
|
enum machine_mode, rtx, rtx, rtx, int);
|
| 484 |
|
|
|
| 485 |
|
|
/* Two different ways of generating switch statements. */
|
| 486 |
|
|
extern int try_casesi (tree, tree, tree, tree, rtx, rtx, rtx);
|
| 487 |
|
|
extern int try_tablejump (tree, tree, tree, tree, rtx, rtx);
|
| 488 |
|
|
|
| 489 |
|
|
/* Functions from alias.c */
|
| 490 |
|
|
#include "alias.h"
|
| 491 |
|
|
|
| 492 |
|
|
|
| 493 |
|
|
/* rtl.h and tree.h were included. */
|
| 494 |
|
|
/* Return an rtx for the size in bytes of the value of an expr. */
|
| 495 |
|
|
extern rtx expr_size (tree);
|
| 496 |
|
|
|
| 497 |
|
|
/* Return a wide integer for the size in bytes of the value of EXP, or -1
|
| 498 |
|
|
if the size can vary or is larger than an integer. */
|
| 499 |
|
|
extern HOST_WIDE_INT int_expr_size (tree);
|
| 500 |
|
|
|
| 501 |
|
|
/* Return an rtx that refers to the value returned by a function
|
| 502 |
|
|
in its original home. This becomes invalid if any more code is emitted. */
|
| 503 |
|
|
extern rtx hard_function_value (const_tree, const_tree, const_tree, int);
|
| 504 |
|
|
|
| 505 |
|
|
extern rtx prepare_call_address (tree, rtx, rtx, rtx *, int, int);
|
| 506 |
|
|
|
| 507 |
|
|
extern bool shift_return_value (enum machine_mode, bool, rtx);
|
| 508 |
|
|
|
| 509 |
|
|
extern rtx expand_call (tree, rtx, int);
|
| 510 |
|
|
|
| 511 |
|
|
extern void fixup_tail_calls (void);
|
| 512 |
|
|
|
| 513 |
|
|
#ifdef TREE_CODE
|
| 514 |
|
|
extern rtx expand_variable_shift (enum tree_code, enum machine_mode,
|
| 515 |
|
|
rtx, tree, rtx, int);
|
| 516 |
|
|
extern rtx expand_shift (enum tree_code, enum machine_mode, rtx, int, rtx,
|
| 517 |
|
|
int);
|
| 518 |
|
|
extern rtx expand_divmod (int, enum tree_code, enum machine_mode, rtx, rtx,
|
| 519 |
|
|
rtx, int);
|
| 520 |
|
|
#endif
|
| 521 |
|
|
|
| 522 |
|
|
extern void locate_and_pad_parm (enum machine_mode, tree, int, int, tree,
|
| 523 |
|
|
struct args_size *,
|
| 524 |
|
|
struct locate_and_pad_arg_data *);
|
| 525 |
|
|
|
| 526 |
|
|
/* Return the CODE_LABEL rtx for a LABEL_DECL, creating it if necessary. */
|
| 527 |
|
|
extern rtx label_rtx (tree);
|
| 528 |
|
|
|
| 529 |
|
|
/* As label_rtx, but additionally the label is placed on the forced label
|
| 530 |
|
|
list of its containing function (i.e. it is treated as reachable even
|
| 531 |
|
|
if how is not obvious). */
|
| 532 |
|
|
extern rtx force_label_rtx (tree);
|
| 533 |
|
|
|
| 534 |
|
|
/* Return an rtx like arg but sans any constant terms.
|
| 535 |
|
|
Returns the original rtx if it has no constant terms.
|
| 536 |
|
|
The constant terms are added and stored via a second arg. */
|
| 537 |
|
|
extern rtx eliminate_constant_term (rtx, rtx *);
|
| 538 |
|
|
|
| 539 |
|
|
/* Convert arg to a valid memory address for specified machine mode that points
|
| 540 |
|
|
to a specific named address space, by emitting insns to perform arithmetic
|
| 541 |
|
|
if necessary. */
|
| 542 |
|
|
extern rtx memory_address_addr_space (enum machine_mode, rtx, addr_space_t);
|
| 543 |
|
|
|
| 544 |
|
|
/* Like memory_address_addr_space, except assume the memory address points to
|
| 545 |
|
|
the generic named address space. */
|
| 546 |
|
|
#define memory_address(MODE,RTX) \
|
| 547 |
|
|
memory_address_addr_space ((MODE), (RTX), ADDR_SPACE_GENERIC)
|
| 548 |
|
|
|
| 549 |
|
|
/* Return a memory reference like MEMREF, but with its mode changed
|
| 550 |
|
|
to MODE and its address changed to ADDR.
|
| 551 |
|
|
(VOIDmode means don't change the mode.
|
| 552 |
|
|
NULL for ADDR means don't change the address.) */
|
| 553 |
|
|
extern rtx change_address (rtx, enum machine_mode, rtx);
|
| 554 |
|
|
|
| 555 |
|
|
/* Return a memory reference like MEMREF, but with its mode changed
|
| 556 |
|
|
to MODE and its address offset by OFFSET bytes. */
|
| 557 |
|
|
#define adjust_address(MEMREF, MODE, OFFSET) \
|
| 558 |
|
|
adjust_address_1 (MEMREF, MODE, OFFSET, 1, 1)
|
| 559 |
|
|
|
| 560 |
|
|
/* Likewise, but the reference is not required to be valid. */
|
| 561 |
|
|
#define adjust_address_nv(MEMREF, MODE, OFFSET) \
|
| 562 |
|
|
adjust_address_1 (MEMREF, MODE, OFFSET, 0, 1)
|
| 563 |
|
|
|
| 564 |
|
|
/* Return a memory reference like MEMREF, but with its mode changed
|
| 565 |
|
|
to MODE and its address changed to ADDR, which is assumed to be
|
| 566 |
|
|
increased by OFFSET bytes from MEMREF. */
|
| 567 |
|
|
#define adjust_automodify_address(MEMREF, MODE, ADDR, OFFSET) \
|
| 568 |
|
|
adjust_automodify_address_1 (MEMREF, MODE, ADDR, OFFSET, 1)
|
| 569 |
|
|
|
| 570 |
|
|
/* Likewise, but the reference is not required to be valid. */
|
| 571 |
|
|
#define adjust_automodify_address_nv(MEMREF, MODE, ADDR, OFFSET) \
|
| 572 |
|
|
adjust_automodify_address_1 (MEMREF, MODE, ADDR, OFFSET, 0)
|
| 573 |
|
|
|
| 574 |
|
|
extern rtx adjust_address_1 (rtx, enum machine_mode, HOST_WIDE_INT, int, int);
|
| 575 |
|
|
extern rtx adjust_automodify_address_1 (rtx, enum machine_mode, rtx,
|
| 576 |
|
|
HOST_WIDE_INT, int);
|
| 577 |
|
|
|
| 578 |
|
|
/* Return a memory reference like MEMREF, but whose address is changed by
|
| 579 |
|
|
adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
|
| 580 |
|
|
known to be in OFFSET (possibly 1). */
|
| 581 |
|
|
extern rtx offset_address (rtx, rtx, unsigned HOST_WIDE_INT);
|
| 582 |
|
|
|
| 583 |
|
|
/* Definitions from emit-rtl.c */
|
| 584 |
|
|
#include "emit-rtl.h"
|
| 585 |
|
|
|
| 586 |
|
|
/* Return a memory reference like MEMREF, but with its mode widened to
|
| 587 |
|
|
MODE and adjusted by OFFSET. */
|
| 588 |
|
|
extern rtx widen_memory_access (rtx, enum machine_mode, HOST_WIDE_INT);
|
| 589 |
|
|
|
| 590 |
|
|
/* Return a memory reference like MEMREF, but which is known to have a
|
| 591 |
|
|
valid address. */
|
| 592 |
|
|
extern rtx validize_mem (rtx);
|
| 593 |
|
|
|
| 594 |
|
|
extern rtx use_anchored_address (rtx);
|
| 595 |
|
|
|
| 596 |
|
|
/* Given REF, a MEM, and T, either the type of X or the expression
|
| 597 |
|
|
corresponding to REF, set the memory attributes. OBJECTP is nonzero
|
| 598 |
|
|
if we are making a new object of this type. */
|
| 599 |
|
|
extern void set_mem_attributes (rtx, tree, int);
|
| 600 |
|
|
|
| 601 |
|
|
/* Similar, except that BITPOS has not yet been applied to REF, so if
|
| 602 |
|
|
we alter MEM_OFFSET according to T then we should subtract BITPOS
|
| 603 |
|
|
expecting that it'll be added back in later. */
|
| 604 |
|
|
extern void set_mem_attributes_minus_bitpos (rtx, tree, int, HOST_WIDE_INT);
|
| 605 |
|
|
|
| 606 |
|
|
/* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
|
| 607 |
|
|
bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
|
| 608 |
|
|
-1 if not known. */
|
| 609 |
|
|
extern int get_mem_align_offset (rtx, unsigned int);
|
| 610 |
|
|
|
| 611 |
|
|
/* Assemble the static constant template for function entry trampolines. */
|
| 612 |
|
|
extern rtx assemble_trampoline_template (void);
|
| 613 |
|
|
|
| 614 |
|
|
/* Copy given rtx to a new temp reg and return that. */
|
| 615 |
|
|
extern rtx copy_to_reg (rtx);
|
| 616 |
|
|
|
| 617 |
|
|
/* Like copy_to_reg but always make the reg Pmode. */
|
| 618 |
|
|
extern rtx copy_addr_to_reg (rtx);
|
| 619 |
|
|
|
| 620 |
|
|
/* Like copy_to_reg but always make the reg the specified mode MODE. */
|
| 621 |
|
|
extern rtx copy_to_mode_reg (enum machine_mode, rtx);
|
| 622 |
|
|
|
| 623 |
|
|
/* Copy given rtx to given temp reg and return that. */
|
| 624 |
|
|
extern rtx copy_to_suggested_reg (rtx, rtx, enum machine_mode);
|
| 625 |
|
|
|
| 626 |
|
|
/* Copy a value to a register if it isn't already a register.
|
| 627 |
|
|
Args are mode (in case value is a constant) and the value. */
|
| 628 |
|
|
extern rtx force_reg (enum machine_mode, rtx);
|
| 629 |
|
|
|
| 630 |
|
|
/* Return given rtx, copied into a new temp reg if it was in memory. */
|
| 631 |
|
|
extern rtx force_not_mem (rtx);
|
| 632 |
|
|
|
| 633 |
|
|
/* Return mode and signedness to use when an argument or result in the
|
| 634 |
|
|
given mode is promoted. */
|
| 635 |
|
|
extern enum machine_mode promote_function_mode (const_tree, enum machine_mode, int *,
|
| 636 |
|
|
const_tree, int);
|
| 637 |
|
|
|
| 638 |
|
|
/* Return mode and signedness to use when an object in the given mode
|
| 639 |
|
|
is promoted. */
|
| 640 |
|
|
extern enum machine_mode promote_mode (const_tree, enum machine_mode, int *);
|
| 641 |
|
|
|
| 642 |
|
|
/* Return mode and signedness to use when object is promoted. */
|
| 643 |
|
|
enum machine_mode promote_decl_mode (const_tree, int *);
|
| 644 |
|
|
|
| 645 |
|
|
/* Remove some bytes from the stack. An rtx says how many. */
|
| 646 |
|
|
extern void adjust_stack (rtx);
|
| 647 |
|
|
|
| 648 |
|
|
/* Add some bytes to the stack. An rtx says how many. */
|
| 649 |
|
|
extern void anti_adjust_stack (rtx);
|
| 650 |
|
|
|
| 651 |
|
|
/* Add some bytes to the stack while probing it. An rtx says how many. */
|
| 652 |
|
|
extern void anti_adjust_stack_and_probe (rtx, bool);
|
| 653 |
|
|
|
| 654 |
|
|
/* This enum is used for the following two functions. */
|
| 655 |
|
|
enum save_level {SAVE_BLOCK, SAVE_FUNCTION, SAVE_NONLOCAL};
|
| 656 |
|
|
|
| 657 |
|
|
/* Save the stack pointer at the specified level. */
|
| 658 |
|
|
extern void emit_stack_save (enum save_level, rtx *);
|
| 659 |
|
|
|
| 660 |
|
|
/* Restore the stack pointer from a save area of the specified level. */
|
| 661 |
|
|
extern void emit_stack_restore (enum save_level, rtx);
|
| 662 |
|
|
|
| 663 |
|
|
/* Invoke emit_stack_save for the nonlocal_goto_save_area. */
|
| 664 |
|
|
extern void update_nonlocal_goto_save_area (void);
|
| 665 |
|
|
|
| 666 |
|
|
/* Allocate some space on the stack dynamically and return its address. */
|
| 667 |
|
|
extern rtx allocate_dynamic_stack_space (rtx, unsigned, unsigned, bool);
|
| 668 |
|
|
|
| 669 |
|
|
/* Emit one stack probe at ADDRESS, an address within the stack. */
|
| 670 |
|
|
extern void emit_stack_probe (rtx);
|
| 671 |
|
|
|
| 672 |
|
|
/* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
|
| 673 |
|
|
FIRST is a constant and size is a Pmode RTX. These are offsets from
|
| 674 |
|
|
the current stack pointer. STACK_GROWS_DOWNWARD says whether to add
|
| 675 |
|
|
or subtract them from the stack pointer. */
|
| 676 |
|
|
extern void probe_stack_range (HOST_WIDE_INT, rtx);
|
| 677 |
|
|
|
| 678 |
|
|
/* Return an rtx that refers to the value returned by a library call
|
| 679 |
|
|
in its original home. This becomes invalid if any more code is emitted. */
|
| 680 |
|
|
extern rtx hard_libcall_value (enum machine_mode, rtx);
|
| 681 |
|
|
|
| 682 |
|
|
/* Return the mode desired by operand N of a particular bitfield
|
| 683 |
|
|
insert/extract insn, or MAX_MACHINE_MODE if no such insn is
|
| 684 |
|
|
available. */
|
| 685 |
|
|
|
| 686 |
|
|
enum extraction_pattern { EP_insv, EP_extv, EP_extzv };
|
| 687 |
|
|
extern enum machine_mode
|
| 688 |
|
|
mode_for_extraction (enum extraction_pattern, int);
|
| 689 |
|
|
|
| 690 |
|
|
extern void store_bit_field (rtx, unsigned HOST_WIDE_INT,
|
| 691 |
|
|
unsigned HOST_WIDE_INT,
|
| 692 |
|
|
unsigned HOST_WIDE_INT,
|
| 693 |
|
|
unsigned HOST_WIDE_INT,
|
| 694 |
|
|
enum machine_mode, rtx);
|
| 695 |
|
|
extern rtx extract_bit_field (rtx, unsigned HOST_WIDE_INT,
|
| 696 |
|
|
unsigned HOST_WIDE_INT, int, bool, rtx,
|
| 697 |
|
|
enum machine_mode, enum machine_mode);
|
| 698 |
|
|
extern rtx extract_low_bits (enum machine_mode, enum machine_mode, rtx);
|
| 699 |
|
|
extern rtx expand_mult (enum machine_mode, rtx, rtx, rtx, int);
|
| 700 |
|
|
extern rtx expand_mult_highpart_adjust (enum machine_mode, rtx, rtx, rtx, rtx, int);
|
| 701 |
|
|
|
| 702 |
|
|
extern rtx assemble_static_space (unsigned HOST_WIDE_INT);
|
| 703 |
|
|
extern int safe_from_p (const_rtx, tree, int);
|
| 704 |
|
|
extern bool split_comparison (enum rtx_code, enum machine_mode,
|
| 705 |
|
|
enum rtx_code *, enum rtx_code *);
|
| 706 |
|
|
|
| 707 |
|
|
/* Call this once to initialize the contents of the optabs
|
| 708 |
|
|
appropriately for the current target machine. */
|
| 709 |
|
|
extern void init_optabs (void);
|
| 710 |
|
|
extern void init_all_optabs (void);
|
| 711 |
|
|
|
| 712 |
|
|
/* Call this to initialize an optab function entry. */
|
| 713 |
|
|
extern rtx init_one_libfunc (const char *);
|
| 714 |
|
|
extern rtx set_user_assembler_libfunc (const char *, const char *);
|
| 715 |
|
|
|
| 716 |
|
|
/* Build a decl for a libfunc named NAME. */
|
| 717 |
|
|
extern tree build_libfunc_function (const char *);
|
| 718 |
|
|
|
| 719 |
|
|
/* Get the personality libfunc for a function decl. */
|
| 720 |
|
|
rtx get_personality_function (tree);
|
| 721 |
|
|
|
| 722 |
|
|
#endif /* GCC_EXPR_H */
|