1 |
38 |
julius |
/* Definitions for code generation pass of GNU compiler.
|
2 |
|
|
Copyright (C) 1987, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
|
3 |
|
|
1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
|
4 |
|
|
Free Software Foundation, Inc.
|
5 |
|
|
|
6 |
|
|
This file is part of GCC.
|
7 |
|
|
|
8 |
|
|
GCC is free software; you can redistribute it and/or modify it under
|
9 |
|
|
the terms of the GNU General Public License as published by the Free
|
10 |
|
|
Software Foundation; either version 3, or (at your option) any later
|
11 |
|
|
version.
|
12 |
|
|
|
13 |
|
|
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
|
14 |
|
|
WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
15 |
|
|
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
16 |
|
|
for more details.
|
17 |
|
|
|
18 |
|
|
You should have received a copy of the GNU General Public License
|
19 |
|
|
along with GCC; see the file COPYING3. If not see
|
20 |
|
|
<http://www.gnu.org/licenses/>. */
|
21 |
|
|
|
22 |
|
|
#ifndef GCC_EXPR_H
|
23 |
|
|
#define GCC_EXPR_H
|
24 |
|
|
|
25 |
|
|
/* For inhibit_defer_pop */
|
26 |
|
|
#include "function.h"
|
27 |
|
|
/* For XEXP, GEN_INT, rtx_code */
|
28 |
|
|
#include "rtl.h"
|
29 |
|
|
/* For optimize_size */
|
30 |
|
|
#include "flags.h"
|
31 |
|
|
/* For host_integerp, tree_low_cst, fold_convert, size_binop, ssize_int,
|
32 |
|
|
TREE_CODE, TYPE_SIZE, int_size_in_bytes, */
|
33 |
|
|
#include "tree.h"
|
34 |
|
|
/* For GET_MODE_BITSIZE, word_mode */
|
35 |
|
|
#include "machmode.h"
|
36 |
|
|
|
37 |
|
|
/* The default branch cost is 1. */
|
38 |
|
|
#ifndef BRANCH_COST
|
39 |
|
|
#define BRANCH_COST 1
|
40 |
|
|
#endif
|
41 |
|
|
|
42 |
|
|
/* This is the 4th arg to `expand_expr'.
|
43 |
|
|
EXPAND_STACK_PARM means we are possibly expanding a call param onto
|
44 |
|
|
the stack.
|
45 |
|
|
EXPAND_SUM means it is ok to return a PLUS rtx or MULT rtx.
|
46 |
|
|
EXPAND_INITIALIZER is similar but also record any labels on forced_labels.
|
47 |
|
|
EXPAND_CONST_ADDRESS means it is ok to return a MEM whose address
|
48 |
|
|
is a constant that is not a legitimate address.
|
49 |
|
|
EXPAND_WRITE means we are only going to write to the resulting rtx.
|
50 |
|
|
EXPAND_MEMORY means we are interested in a memory result, even if
|
51 |
|
|
the memory is constant and we could have propagated a constant value. */
|
52 |
|
|
enum expand_modifier {EXPAND_NORMAL = 0, EXPAND_STACK_PARM, EXPAND_SUM,
|
53 |
|
|
EXPAND_CONST_ADDRESS, EXPAND_INITIALIZER, EXPAND_WRITE,
|
54 |
|
|
EXPAND_MEMORY};
|
55 |
|
|
|
56 |
|
|
/* Prevent the compiler from deferring stack pops. See
|
57 |
|
|
inhibit_defer_pop for more information. */
|
58 |
|
|
#define NO_DEFER_POP (inhibit_defer_pop += 1)
|
59 |
|
|
|
60 |
|
|
/* Allow the compiler to defer stack pops. See inhibit_defer_pop for
|
61 |
|
|
more information. */
|
62 |
|
|
#define OK_DEFER_POP (inhibit_defer_pop -= 1)
|
63 |
|
|
|
64 |
|
|
/* If a memory-to-memory move would take MOVE_RATIO or more simple
|
65 |
|
|
move-instruction sequences, we will do a movmem or libcall instead. */
|
66 |
|
|
|
67 |
|
|
#ifndef MOVE_RATIO
|
68 |
|
|
#if defined (HAVE_movmemqi) || defined (HAVE_movmemhi) || defined (HAVE_movmemsi) || defined (HAVE_movmemdi) || defined (HAVE_movmemti)
|
69 |
|
|
#define MOVE_RATIO 2
|
70 |
|
|
#else
|
71 |
|
|
/* If we are optimizing for space (-Os), cut down the default move ratio. */
|
72 |
|
|
#define MOVE_RATIO (optimize_size ? 3 : 15)
|
73 |
|
|
#endif
|
74 |
|
|
#endif
|
75 |
|
|
|
76 |
|
|
/* If a clear memory operation would take CLEAR_RATIO or more simple
|
77 |
|
|
move-instruction sequences, we will do a setmem or libcall instead. */
|
78 |
|
|
|
79 |
|
|
#ifndef CLEAR_RATIO
|
80 |
|
|
#if defined (HAVE_setmemqi) || defined (HAVE_setmemhi) || defined (HAVE_setmemsi) || defined (HAVE_setmemdi) || defined (HAVE_setmemti)
|
81 |
|
|
#define CLEAR_RATIO 2
|
82 |
|
|
#else
|
83 |
|
|
/* If we are optimizing for space, cut down the default clear ratio. */
|
84 |
|
|
#define CLEAR_RATIO (optimize_size ? 3 : 15)
|
85 |
|
|
#endif
|
86 |
|
|
#endif
|
87 |
|
|
|
88 |
|
|
enum direction {none, upward, downward};
|
89 |
|
|
|
90 |
|
|
/* Structure to record the size of a sequence of arguments
|
91 |
|
|
as the sum of a tree-expression and a constant. This structure is
|
92 |
|
|
also used to store offsets from the stack, which might be negative,
|
93 |
|
|
so the variable part must be ssizetype, not sizetype. */
|
94 |
|
|
|
95 |
|
|
struct args_size
|
96 |
|
|
{
|
97 |
|
|
HOST_WIDE_INT constant;
|
98 |
|
|
tree var;
|
99 |
|
|
};
|
100 |
|
|
|
101 |
|
|
/* Package up various arg related fields of struct args for
|
102 |
|
|
locate_and_pad_parm. */
|
103 |
|
|
struct locate_and_pad_arg_data
|
104 |
|
|
{
|
105 |
|
|
/* Size of this argument on the stack, rounded up for any padding it
|
106 |
|
|
gets. If REG_PARM_STACK_SPACE is defined, then register parms are
|
107 |
|
|
counted here, otherwise they aren't. */
|
108 |
|
|
struct args_size size;
|
109 |
|
|
/* Offset of this argument from beginning of stack-args. */
|
110 |
|
|
struct args_size offset;
|
111 |
|
|
/* Offset to the start of the stack slot. Different from OFFSET
|
112 |
|
|
if this arg pads downward. */
|
113 |
|
|
struct args_size slot_offset;
|
114 |
|
|
/* The amount that the stack pointer needs to be adjusted to
|
115 |
|
|
force alignment for the next argument. */
|
116 |
|
|
struct args_size alignment_pad;
|
117 |
|
|
/* Which way we should pad this arg. */
|
118 |
|
|
enum direction where_pad;
|
119 |
|
|
/* slot_offset is at least this aligned. */
|
120 |
|
|
unsigned int boundary;
|
121 |
|
|
};
|
122 |
|
|
|
123 |
|
|
/* Add the value of the tree INC to the `struct args_size' TO. */
|
124 |
|
|
|
125 |
|
|
#define ADD_PARM_SIZE(TO, INC) \
|
126 |
|
|
do { \
|
127 |
|
|
tree inc = (INC); \
|
128 |
|
|
if (host_integerp (inc, 0)) \
|
129 |
|
|
(TO).constant += tree_low_cst (inc, 0); \
|
130 |
|
|
else if ((TO).var == 0) \
|
131 |
|
|
(TO).var = fold_convert (ssizetype, inc); \
|
132 |
|
|
else \
|
133 |
|
|
(TO).var = size_binop (PLUS_EXPR, (TO).var, \
|
134 |
|
|
fold_convert (ssizetype, inc)); \
|
135 |
|
|
} while (0)
|
136 |
|
|
|
137 |
|
|
#define SUB_PARM_SIZE(TO, DEC) \
|
138 |
|
|
do { \
|
139 |
|
|
tree dec = (DEC); \
|
140 |
|
|
if (host_integerp (dec, 0)) \
|
141 |
|
|
(TO).constant -= tree_low_cst (dec, 0); \
|
142 |
|
|
else if ((TO).var == 0) \
|
143 |
|
|
(TO).var = size_binop (MINUS_EXPR, ssize_int (0), \
|
144 |
|
|
fold_convert (ssizetype, dec)); \
|
145 |
|
|
else \
|
146 |
|
|
(TO).var = size_binop (MINUS_EXPR, (TO).var, \
|
147 |
|
|
fold_convert (ssizetype, dec)); \
|
148 |
|
|
} while (0)
|
149 |
|
|
|
150 |
|
|
/* Convert the implicit sum in a `struct args_size' into a tree
|
151 |
|
|
of type ssizetype. */
|
152 |
|
|
#define ARGS_SIZE_TREE(SIZE) \
|
153 |
|
|
((SIZE).var == 0 ? ssize_int ((SIZE).constant) \
|
154 |
|
|
: size_binop (PLUS_EXPR, fold_convert (ssizetype, (SIZE).var), \
|
155 |
|
|
ssize_int ((SIZE).constant)))
|
156 |
|
|
|
157 |
|
|
/* Convert the implicit sum in a `struct args_size' into an rtx. */
|
158 |
|
|
#define ARGS_SIZE_RTX(SIZE) \
|
159 |
|
|
((SIZE).var == 0 ? GEN_INT ((SIZE).constant) \
|
160 |
|
|
: expand_normal (ARGS_SIZE_TREE (SIZE)))
|
161 |
|
|
|
162 |
|
|
/* Supply a default definition for FUNCTION_ARG_PADDING:
|
163 |
|
|
usually pad upward, but pad short args downward on
|
164 |
|
|
big-endian machines. */
|
165 |
|
|
|
166 |
|
|
#define DEFAULT_FUNCTION_ARG_PADDING(MODE, TYPE) \
|
167 |
|
|
(! BYTES_BIG_ENDIAN \
|
168 |
|
|
? upward \
|
169 |
|
|
: (((MODE) == BLKmode \
|
170 |
|
|
? ((TYPE) && TREE_CODE (TYPE_SIZE (TYPE)) == INTEGER_CST \
|
171 |
|
|
&& int_size_in_bytes (TYPE) < (PARM_BOUNDARY / BITS_PER_UNIT)) \
|
172 |
|
|
: GET_MODE_BITSIZE (MODE) < PARM_BOUNDARY) \
|
173 |
|
|
? downward : upward))
|
174 |
|
|
|
175 |
|
|
#ifndef FUNCTION_ARG_PADDING
|
176 |
|
|
#define FUNCTION_ARG_PADDING(MODE, TYPE) \
|
177 |
|
|
DEFAULT_FUNCTION_ARG_PADDING ((MODE), (TYPE))
|
178 |
|
|
#endif
|
179 |
|
|
|
180 |
|
|
/* Supply a default definition for FUNCTION_ARG_BOUNDARY. Normally, we let
|
181 |
|
|
FUNCTION_ARG_PADDING, which also pads the length, handle any needed
|
182 |
|
|
alignment. */
|
183 |
|
|
|
184 |
|
|
#ifndef FUNCTION_ARG_BOUNDARY
|
185 |
|
|
#define FUNCTION_ARG_BOUNDARY(MODE, TYPE) PARM_BOUNDARY
|
186 |
|
|
#endif
|
187 |
|
|
|
188 |
|
|
/* Supply a default definition of STACK_SAVEAREA_MODE for emit_stack_save.
|
189 |
|
|
Normally move_insn, so Pmode stack pointer. */
|
190 |
|
|
|
191 |
|
|
#ifndef STACK_SAVEAREA_MODE
|
192 |
|
|
#define STACK_SAVEAREA_MODE(LEVEL) Pmode
|
193 |
|
|
#endif
|
194 |
|
|
|
195 |
|
|
/* Supply a default definition of STACK_SIZE_MODE for
|
196 |
|
|
allocate_dynamic_stack_space. Normally PLUS/MINUS, so word_mode. */
|
197 |
|
|
|
198 |
|
|
#ifndef STACK_SIZE_MODE
|
199 |
|
|
#define STACK_SIZE_MODE word_mode
|
200 |
|
|
#endif
|
201 |
|
|
|
202 |
|
|
/* Provide default values for the macros controlling stack checking. */
|
203 |
|
|
|
204 |
|
|
#ifndef STACK_CHECK_BUILTIN
|
205 |
|
|
#define STACK_CHECK_BUILTIN 0
|
206 |
|
|
#endif
|
207 |
|
|
|
208 |
|
|
/* The default interval is one page. */
|
209 |
|
|
#ifndef STACK_CHECK_PROBE_INTERVAL
|
210 |
|
|
#define STACK_CHECK_PROBE_INTERVAL 4096
|
211 |
|
|
#endif
|
212 |
|
|
|
213 |
|
|
/* The default is to do a store into the stack. */
|
214 |
|
|
#ifndef STACK_CHECK_PROBE_LOAD
|
215 |
|
|
#define STACK_CHECK_PROBE_LOAD 0
|
216 |
|
|
#endif
|
217 |
|
|
|
218 |
|
|
/* This value is arbitrary, but should be sufficient for most machines. */
|
219 |
|
|
#ifndef STACK_CHECK_PROTECT
|
220 |
|
|
#define STACK_CHECK_PROTECT (75 * UNITS_PER_WORD)
|
221 |
|
|
#endif
|
222 |
|
|
|
223 |
|
|
/* Make the maximum frame size be the largest we can and still only need
|
224 |
|
|
one probe per function. */
|
225 |
|
|
#ifndef STACK_CHECK_MAX_FRAME_SIZE
|
226 |
|
|
#define STACK_CHECK_MAX_FRAME_SIZE \
|
227 |
|
|
(STACK_CHECK_PROBE_INTERVAL - UNITS_PER_WORD)
|
228 |
|
|
#endif
|
229 |
|
|
|
230 |
|
|
/* This is arbitrary, but should be large enough everywhere. */
|
231 |
|
|
#ifndef STACK_CHECK_FIXED_FRAME_SIZE
|
232 |
|
|
#define STACK_CHECK_FIXED_FRAME_SIZE (4 * UNITS_PER_WORD)
|
233 |
|
|
#endif
|
234 |
|
|
|
235 |
|
|
/* Provide a reasonable default for the maximum size of an object to
|
236 |
|
|
allocate in the fixed frame. We may need to be able to make this
|
237 |
|
|
controllable by the user at some point. */
|
238 |
|
|
#ifndef STACK_CHECK_MAX_VAR_SIZE
|
239 |
|
|
#define STACK_CHECK_MAX_VAR_SIZE (STACK_CHECK_MAX_FRAME_SIZE / 100)
|
240 |
|
|
#endif
|
241 |
|
|
|
242 |
|
|
/* Functions from optabs.c, commonly used, and without need for the optabs
|
243 |
|
|
tables: */
|
244 |
|
|
|
245 |
|
|
/* Passed to expand_simple_binop and expand_binop to say which options
|
246 |
|
|
to try to use if the requested operation can't be open-coded on the
|
247 |
|
|
requisite mode. Either OPTAB_LIB or OPTAB_LIB_WIDEN says try using
|
248 |
|
|
a library call. Either OPTAB_WIDEN or OPTAB_LIB_WIDEN says try
|
249 |
|
|
using a wider mode. OPTAB_MUST_WIDEN says try widening and don't
|
250 |
|
|
try anything else. */
|
251 |
|
|
|
252 |
|
|
enum optab_methods
|
253 |
|
|
{
|
254 |
|
|
OPTAB_DIRECT,
|
255 |
|
|
OPTAB_LIB,
|
256 |
|
|
OPTAB_WIDEN,
|
257 |
|
|
OPTAB_LIB_WIDEN,
|
258 |
|
|
OPTAB_MUST_WIDEN
|
259 |
|
|
};
|
260 |
|
|
|
261 |
|
|
/* Generate code for a simple binary or unary operation. "Simple" in
|
262 |
|
|
this case means "can be unambiguously described by a (mode, code)
|
263 |
|
|
pair and mapped to a single optab." */
|
264 |
|
|
extern rtx expand_simple_binop (enum machine_mode, enum rtx_code, rtx,
|
265 |
|
|
rtx, rtx, int, enum optab_methods);
|
266 |
|
|
extern rtx expand_simple_unop (enum machine_mode, enum rtx_code, rtx, rtx,
|
267 |
|
|
int);
|
268 |
|
|
|
269 |
|
|
/* Report whether the machine description contains an insn which can
|
270 |
|
|
perform the operation described by CODE and MODE. */
|
271 |
|
|
extern int have_insn_for (enum rtx_code, enum machine_mode);
|
272 |
|
|
|
273 |
|
|
/* Emit code to make a call to a constant function or a library call. */
|
274 |
|
|
extern void emit_libcall_block (rtx, rtx, rtx, rtx);
|
275 |
|
|
|
276 |
|
|
/* Create but don't emit one rtl instruction to perform certain operations.
|
277 |
|
|
Modes must match; operands must meet the operation's predicates.
|
278 |
|
|
Likewise for subtraction and for just copying. */
|
279 |
|
|
extern rtx gen_add2_insn (rtx, rtx);
|
280 |
|
|
extern rtx gen_add3_insn (rtx, rtx, rtx);
|
281 |
|
|
extern rtx gen_sub2_insn (rtx, rtx);
|
282 |
|
|
extern rtx gen_sub3_insn (rtx, rtx, rtx);
|
283 |
|
|
extern rtx gen_move_insn (rtx, rtx);
|
284 |
|
|
extern int have_add2_insn (rtx, rtx);
|
285 |
|
|
extern int have_sub2_insn (rtx, rtx);
|
286 |
|
|
|
287 |
|
|
/* Emit a pair of rtl insns to compare two rtx's and to jump
|
288 |
|
|
to a label if the comparison is true. */
|
289 |
|
|
extern void emit_cmp_and_jump_insns (rtx, rtx, enum rtx_code, rtx,
|
290 |
|
|
enum machine_mode, int, rtx);
|
291 |
|
|
|
292 |
|
|
/* Generate code to indirectly jump to a location given in the rtx LOC. */
|
293 |
|
|
extern void emit_indirect_jump (rtx);
|
294 |
|
|
|
295 |
|
|
/* Generate a conditional trap instruction. */
|
296 |
|
|
extern rtx gen_cond_trap (enum rtx_code, rtx, rtx, rtx);
|
297 |
|
|
|
298 |
|
|
#include "insn-config.h"
|
299 |
|
|
|
300 |
|
|
#ifdef HAVE_conditional_move
|
301 |
|
|
/* Emit a conditional move operation. */
|
302 |
|
|
rtx emit_conditional_move (rtx, enum rtx_code, rtx, rtx, enum machine_mode,
|
303 |
|
|
rtx, rtx, enum machine_mode, int);
|
304 |
|
|
|
305 |
|
|
/* Return nonzero if the conditional move is supported. */
|
306 |
|
|
int can_conditionally_move_p (enum machine_mode mode);
|
307 |
|
|
|
308 |
|
|
#endif
|
309 |
|
|
rtx emit_conditional_add (rtx, enum rtx_code, rtx, rtx, enum machine_mode,
|
310 |
|
|
rtx, rtx, enum machine_mode, int);
|
311 |
|
|
|
312 |
|
|
rtx expand_val_compare_and_swap (rtx, rtx, rtx, rtx);
|
313 |
|
|
rtx expand_bool_compare_and_swap (rtx, rtx, rtx, rtx);
|
314 |
|
|
rtx expand_sync_operation (rtx, rtx, enum rtx_code);
|
315 |
|
|
rtx expand_sync_fetch_operation (rtx, rtx, enum rtx_code, bool, rtx);
|
316 |
|
|
rtx expand_sync_lock_test_and_set (rtx, rtx, rtx);
|
317 |
|
|
|
318 |
|
|
/* Functions from expmed.c: */
|
319 |
|
|
|
320 |
|
|
/* Arguments MODE, RTX: return an rtx for the negation of that value.
|
321 |
|
|
May emit insns. */
|
322 |
|
|
extern rtx negate_rtx (enum machine_mode, rtx);
|
323 |
|
|
|
324 |
|
|
/* Expand a logical AND operation. */
|
325 |
|
|
extern rtx expand_and (enum machine_mode, rtx, rtx, rtx);
|
326 |
|
|
|
327 |
|
|
/* Emit a store-flag operation. */
|
328 |
|
|
extern rtx emit_store_flag (rtx, enum rtx_code, rtx, rtx, enum machine_mode,
|
329 |
|
|
int, int);
|
330 |
|
|
|
331 |
|
|
/* Like emit_store_flag, but always succeeds. */
|
332 |
|
|
extern rtx emit_store_flag_force (rtx, enum rtx_code, rtx, rtx,
|
333 |
|
|
enum machine_mode, int, int);
|
334 |
|
|
|
335 |
|
|
/* Functions from builtins.c: */
|
336 |
|
|
extern rtx expand_builtin (tree, rtx, rtx, enum machine_mode, int);
|
337 |
|
|
extern tree std_build_builtin_va_list (void);
|
338 |
|
|
extern void std_expand_builtin_va_start (tree, rtx);
|
339 |
|
|
extern rtx default_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
|
340 |
|
|
extern void expand_builtin_setjmp_setup (rtx, rtx);
|
341 |
|
|
extern void expand_builtin_setjmp_receiver (rtx);
|
342 |
|
|
extern rtx expand_builtin_saveregs (void);
|
343 |
|
|
extern void expand_builtin_trap (void);
|
344 |
|
|
|
345 |
|
|
/* Functions from expr.c: */
|
346 |
|
|
|
347 |
|
|
/* This is run once per compilation to set up which modes can be used
|
348 |
|
|
directly in memory and to initialize the block move optab. */
|
349 |
|
|
extern void init_expr_once (void);
|
350 |
|
|
|
351 |
|
|
/* This is run at the start of compiling a function. */
|
352 |
|
|
extern void init_expr (void);
|
353 |
|
|
|
354 |
|
|
/* Emit some rtl insns to move data between rtx's, converting machine modes.
|
355 |
|
|
Both modes must be floating or both fixed. */
|
356 |
|
|
extern void convert_move (rtx, rtx, int);
|
357 |
|
|
|
358 |
|
|
/* Convert an rtx to specified machine mode and return the result. */
|
359 |
|
|
extern rtx convert_to_mode (enum machine_mode, rtx, int);
|
360 |
|
|
|
361 |
|
|
/* Convert an rtx to MODE from OLDMODE and return the result. */
|
362 |
|
|
extern rtx convert_modes (enum machine_mode, enum machine_mode, rtx, int);
|
363 |
|
|
|
364 |
|
|
/* Emit code to move a block Y to a block X. */
|
365 |
|
|
|
366 |
|
|
enum block_op_methods
|
367 |
|
|
{
|
368 |
|
|
BLOCK_OP_NORMAL,
|
369 |
|
|
BLOCK_OP_NO_LIBCALL,
|
370 |
|
|
BLOCK_OP_CALL_PARM,
|
371 |
|
|
/* Like BLOCK_OP_NORMAL, but the libcall can be tail call optimized. */
|
372 |
|
|
BLOCK_OP_TAILCALL
|
373 |
|
|
};
|
374 |
|
|
|
375 |
|
|
extern void init_block_move_fn (const char *);
|
376 |
|
|
extern void init_block_clear_fn (const char *);
|
377 |
|
|
|
378 |
|
|
extern rtx emit_block_move (rtx, rtx, rtx, enum block_op_methods);
|
379 |
|
|
|
380 |
|
|
/* Copy all or part of a value X into registers starting at REGNO.
|
381 |
|
|
The number of registers to be filled is NREGS. */
|
382 |
|
|
extern void move_block_to_reg (int, rtx, int, enum machine_mode);
|
383 |
|
|
|
384 |
|
|
/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
|
385 |
|
|
The number of registers to be filled is NREGS. */
|
386 |
|
|
extern void move_block_from_reg (int, rtx, int);
|
387 |
|
|
|
388 |
|
|
/* Generate a non-consecutive group of registers represented by a PARALLEL. */
|
389 |
|
|
extern rtx gen_group_rtx (rtx);
|
390 |
|
|
|
391 |
|
|
/* Load a BLKmode value into non-consecutive registers represented by a
|
392 |
|
|
PARALLEL. */
|
393 |
|
|
extern void emit_group_load (rtx, rtx, tree, int);
|
394 |
|
|
|
395 |
|
|
/* Similarly, but load into new temporaries. */
|
396 |
|
|
extern rtx emit_group_load_into_temps (rtx, rtx, tree, int);
|
397 |
|
|
|
398 |
|
|
/* Move a non-consecutive group of registers represented by a PARALLEL into
|
399 |
|
|
a non-consecutive group of registers represented by a PARALLEL. */
|
400 |
|
|
extern void emit_group_move (rtx, rtx);
|
401 |
|
|
|
402 |
|
|
/* Move a group of registers represented by a PARALLEL into pseudos. */
|
403 |
|
|
extern rtx emit_group_move_into_temps (rtx);
|
404 |
|
|
|
405 |
|
|
/* Store a BLKmode value from non-consecutive registers represented by a
|
406 |
|
|
PARALLEL. */
|
407 |
|
|
extern void emit_group_store (rtx, rtx, tree, int);
|
408 |
|
|
|
409 |
|
|
/* Copy BLKmode object from a set of registers. */
|
410 |
|
|
extern rtx copy_blkmode_from_reg (rtx, rtx, tree);
|
411 |
|
|
|
412 |
|
|
/* Mark REG as holding a parameter for the next CALL_INSN. */
|
413 |
|
|
extern void use_reg (rtx *, rtx);
|
414 |
|
|
|
415 |
|
|
/* Mark NREGS consecutive regs, starting at REGNO, as holding parameters
|
416 |
|
|
for the next CALL_INSN. */
|
417 |
|
|
extern void use_regs (rtx *, int, int);
|
418 |
|
|
|
419 |
|
|
/* Mark a PARALLEL as holding a parameter for the next CALL_INSN. */
|
420 |
|
|
extern void use_group_regs (rtx *, rtx);
|
421 |
|
|
|
422 |
|
|
/* Write zeros through the storage of OBJECT.
|
423 |
|
|
If OBJECT has BLKmode, SIZE is its length in bytes. */
|
424 |
|
|
extern rtx clear_storage (rtx, rtx, enum block_op_methods);
|
425 |
|
|
|
426 |
|
|
/* Expand a setmem pattern; return true if successful. */
|
427 |
|
|
extern bool set_storage_via_setmem (rtx, rtx, rtx, unsigned int);
|
428 |
|
|
|
429 |
|
|
/* Determine whether the LEN bytes can be moved by using several move
|
430 |
|
|
instructions. Return nonzero if a call to move_by_pieces should
|
431 |
|
|
succeed. */
|
432 |
|
|
extern int can_move_by_pieces (unsigned HOST_WIDE_INT, unsigned int);
|
433 |
|
|
|
434 |
|
|
/* Return nonzero if it is desirable to store LEN bytes generated by
|
435 |
|
|
CONSTFUN with several move instructions by store_by_pieces
|
436 |
|
|
function. CONSTFUNDATA is a pointer which will be passed as argument
|
437 |
|
|
in every CONSTFUN call.
|
438 |
|
|
ALIGN is maximum alignment we can assume. */
|
439 |
|
|
extern int can_store_by_pieces (unsigned HOST_WIDE_INT,
|
440 |
|
|
rtx (*) (void *, HOST_WIDE_INT,
|
441 |
|
|
enum machine_mode),
|
442 |
|
|
void *, unsigned int);
|
443 |
|
|
|
444 |
|
|
/* Generate several move instructions to store LEN bytes generated by
|
445 |
|
|
CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
|
446 |
|
|
pointer which will be passed as argument in every CONSTFUN call.
|
447 |
|
|
ALIGN is maximum alignment we can assume.
|
448 |
|
|
Returns TO + LEN. */
|
449 |
|
|
extern rtx store_by_pieces (rtx, unsigned HOST_WIDE_INT,
|
450 |
|
|
rtx (*) (void *, HOST_WIDE_INT, enum machine_mode),
|
451 |
|
|
void *, unsigned int, int);
|
452 |
|
|
|
453 |
|
|
/* Emit insns to set X from Y. */
|
454 |
|
|
extern rtx emit_move_insn (rtx, rtx);
|
455 |
|
|
|
456 |
|
|
/* Emit insns to set X from Y, with no frills. */
|
457 |
|
|
extern rtx emit_move_insn_1 (rtx, rtx);
|
458 |
|
|
|
459 |
|
|
/* Push a block of length SIZE (perhaps variable)
|
460 |
|
|
and return an rtx to address the beginning of the block. */
|
461 |
|
|
extern rtx push_block (rtx, int, int);
|
462 |
|
|
|
463 |
|
|
/* Generate code to push something onto the stack, given its mode and type. */
|
464 |
|
|
extern void emit_push_insn (rtx, enum machine_mode, tree, rtx, unsigned int,
|
465 |
|
|
int, rtx, int, rtx, rtx, int, rtx);
|
466 |
|
|
|
467 |
|
|
/* Expand an assignment that stores the value of FROM into TO. */
|
468 |
|
|
extern void expand_assignment (tree, tree);
|
469 |
|
|
|
470 |
|
|
/* Generate code for computing expression EXP,
|
471 |
|
|
and storing the value into TARGET.
|
472 |
|
|
If SUGGEST_REG is nonzero, copy the value through a register
|
473 |
|
|
and return that register, if that is possible. */
|
474 |
|
|
extern rtx store_expr (tree, rtx, int);
|
475 |
|
|
|
476 |
|
|
/* Given an rtx that may include add and multiply operations,
|
477 |
|
|
generate them as insns and return a pseudo-reg containing the value.
|
478 |
|
|
Useful after calling expand_expr with 1 as sum_ok. */
|
479 |
|
|
extern rtx force_operand (rtx, rtx);
|
480 |
|
|
|
481 |
|
|
/* Work horse for expand_expr. */
|
482 |
|
|
extern rtx expand_expr_real (tree, rtx, enum machine_mode,
|
483 |
|
|
enum expand_modifier, rtx *);
|
484 |
|
|
|
485 |
|
|
/* Generate code for computing expression EXP.
|
486 |
|
|
An rtx for the computed value is returned. The value is never null.
|
487 |
|
|
In the case of a void EXP, const0_rtx is returned. */
|
488 |
|
|
static inline rtx
|
489 |
|
|
expand_expr (tree exp, rtx target, enum machine_mode mode,
|
490 |
|
|
enum expand_modifier modifier)
|
491 |
|
|
{
|
492 |
|
|
return expand_expr_real (exp, target, mode, modifier, NULL);
|
493 |
|
|
}
|
494 |
|
|
|
495 |
|
|
static inline rtx
|
496 |
|
|
expand_normal (tree exp)
|
497 |
|
|
{
|
498 |
|
|
return expand_expr_real (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL, NULL);
|
499 |
|
|
}
|
500 |
|
|
|
501 |
|
|
extern void expand_var (tree);
|
502 |
|
|
|
503 |
|
|
/* At the start of a function, record that we have no previously-pushed
|
504 |
|
|
arguments waiting to be popped. */
|
505 |
|
|
extern void init_pending_stack_adjust (void);
|
506 |
|
|
|
507 |
|
|
/* Discard any pending stack adjustment. */
|
508 |
|
|
extern void discard_pending_stack_adjust (void);
|
509 |
|
|
|
510 |
|
|
/* When exiting from function, if safe, clear out any pending stack adjust
|
511 |
|
|
so the adjustment won't get done. */
|
512 |
|
|
extern void clear_pending_stack_adjust (void);
|
513 |
|
|
|
514 |
|
|
/* Pop any previously-pushed arguments that have not been popped yet. */
|
515 |
|
|
extern void do_pending_stack_adjust (void);
|
516 |
|
|
|
517 |
|
|
/* Return the tree node and offset if a given argument corresponds to
|
518 |
|
|
a string constant. */
|
519 |
|
|
extern tree string_constant (tree, tree *);
|
520 |
|
|
|
521 |
|
|
/* Generate code to evaluate EXP and jump to LABEL if the value is zero. */
|
522 |
|
|
extern void jumpifnot (tree, rtx);
|
523 |
|
|
|
524 |
|
|
/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
|
525 |
|
|
extern void jumpif (tree, rtx);
|
526 |
|
|
|
527 |
|
|
/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
|
528 |
|
|
the result is zero, or IF_TRUE_LABEL if the result is one. */
|
529 |
|
|
extern void do_jump (tree, rtx, rtx);
|
530 |
|
|
|
531 |
|
|
/* Generate rtl to compare two rtx's, will call emit_cmp_insn. */
|
532 |
|
|
extern rtx compare_from_rtx (rtx, rtx, enum rtx_code, int, enum machine_mode,
|
533 |
|
|
rtx);
|
534 |
|
|
extern void do_compare_rtx_and_jump (rtx, rtx, enum rtx_code, int,
|
535 |
|
|
enum machine_mode, rtx, rtx, rtx);
|
536 |
|
|
|
537 |
|
|
/* Two different ways of generating switch statements. */
|
538 |
|
|
extern int try_casesi (tree, tree, tree, tree, rtx, rtx);
|
539 |
|
|
extern int try_tablejump (tree, tree, tree, tree, rtx, rtx);
|
540 |
|
|
|
541 |
|
|
/* Smallest number of adjacent cases before we use a jump table.
|
542 |
|
|
XXX Should be a target hook. */
|
543 |
|
|
extern unsigned int case_values_threshold (void);
|
544 |
|
|
|
545 |
|
|
/* Functions from alias.c */
|
546 |
|
|
#include "alias.h"
|
547 |
|
|
|
548 |
|
|
|
549 |
|
|
/* rtl.h and tree.h were included. */
|
550 |
|
|
/* Return an rtx for the size in bytes of the value of an expr. */
|
551 |
|
|
extern rtx expr_size (tree);
|
552 |
|
|
|
553 |
|
|
/* Return a wide integer for the size in bytes of the value of EXP, or -1
|
554 |
|
|
if the size can vary or is larger than an integer. */
|
555 |
|
|
extern HOST_WIDE_INT int_expr_size (tree);
|
556 |
|
|
|
557 |
|
|
/* Return an rtx that refers to the value returned by a function
|
558 |
|
|
in its original home. This becomes invalid if any more code is emitted. */
|
559 |
|
|
extern rtx hard_function_value (tree, tree, tree, int);
|
560 |
|
|
|
561 |
|
|
extern rtx prepare_call_address (rtx, rtx, rtx *, int, int);
|
562 |
|
|
|
563 |
|
|
extern bool shift_return_value (enum machine_mode, bool, rtx);
|
564 |
|
|
|
565 |
|
|
extern rtx expand_call (tree, rtx, int);
|
566 |
|
|
|
567 |
|
|
extern void fixup_tail_calls (void);
|
568 |
|
|
|
569 |
|
|
#ifdef TREE_CODE
|
570 |
|
|
extern rtx expand_shift (enum tree_code, enum machine_mode, rtx, tree, rtx,
|
571 |
|
|
int);
|
572 |
|
|
extern rtx expand_divmod (int, enum tree_code, enum machine_mode, rtx, rtx,
|
573 |
|
|
rtx, int);
|
574 |
|
|
#endif
|
575 |
|
|
|
576 |
|
|
extern void locate_and_pad_parm (enum machine_mode, tree, int, int, tree,
|
577 |
|
|
struct args_size *,
|
578 |
|
|
struct locate_and_pad_arg_data *);
|
579 |
|
|
|
580 |
|
|
/* Return the CODE_LABEL rtx for a LABEL_DECL, creating it if necessary. */
|
581 |
|
|
extern rtx label_rtx (tree);
|
582 |
|
|
|
583 |
|
|
/* As label_rtx, but additionally the label is placed on the forced label
|
584 |
|
|
list of its containing function (i.e. it is treated as reachable even
|
585 |
|
|
if how is not obvious). */
|
586 |
|
|
extern rtx force_label_rtx (tree);
|
587 |
|
|
|
588 |
|
|
/* Indicate how an input argument register was promoted. */
|
589 |
|
|
extern rtx promoted_input_arg (unsigned int, enum machine_mode *, int *);
|
590 |
|
|
|
591 |
|
|
/* Return an rtx like arg but sans any constant terms.
|
592 |
|
|
Returns the original rtx if it has no constant terms.
|
593 |
|
|
The constant terms are added and stored via a second arg. */
|
594 |
|
|
extern rtx eliminate_constant_term (rtx, rtx *);
|
595 |
|
|
|
596 |
|
|
/* Convert arg to a valid memory address for specified machine mode,
|
597 |
|
|
by emitting insns to perform arithmetic if nec. */
|
598 |
|
|
extern rtx memory_address (enum machine_mode, rtx);
|
599 |
|
|
|
600 |
|
|
/* Like `memory_address' but pretend `flag_force_addr' is 0. */
|
601 |
|
|
extern rtx memory_address_noforce (enum machine_mode, rtx);
|
602 |
|
|
|
603 |
|
|
/* Return a memory reference like MEMREF, but with its mode changed
|
604 |
|
|
to MODE and its address changed to ADDR.
|
605 |
|
|
(VOIDmode means don't change the mode.
|
606 |
|
|
NULL for ADDR means don't change the address.) */
|
607 |
|
|
extern rtx change_address (rtx, enum machine_mode, rtx);
|
608 |
|
|
|
609 |
|
|
/* Return a memory reference like MEMREF, but with its mode changed
|
610 |
|
|
to MODE and its address offset by OFFSET bytes. */
|
611 |
|
|
#define adjust_address(MEMREF, MODE, OFFSET) \
|
612 |
|
|
adjust_address_1 (MEMREF, MODE, OFFSET, 1, 1)
|
613 |
|
|
|
614 |
|
|
/* Likewise, but the reference is not required to be valid. */
|
615 |
|
|
#define adjust_address_nv(MEMREF, MODE, OFFSET) \
|
616 |
|
|
adjust_address_1 (MEMREF, MODE, OFFSET, 0, 1)
|
617 |
|
|
|
618 |
|
|
/* Return a memory reference like MEMREF, but with its mode changed
|
619 |
|
|
to MODE and its address changed to ADDR, which is assumed to be
|
620 |
|
|
increased by OFFSET bytes from MEMREF. */
|
621 |
|
|
#define adjust_automodify_address(MEMREF, MODE, ADDR, OFFSET) \
|
622 |
|
|
adjust_automodify_address_1 (MEMREF, MODE, ADDR, OFFSET, 1)
|
623 |
|
|
|
624 |
|
|
/* Likewise, but the reference is not required to be valid. */
|
625 |
|
|
#define adjust_automodify_address_nv(MEMREF, MODE, ADDR, OFFSET) \
|
626 |
|
|
adjust_automodify_address_1 (MEMREF, MODE, ADDR, OFFSET, 0)
|
627 |
|
|
|
628 |
|
|
extern rtx adjust_address_1 (rtx, enum machine_mode, HOST_WIDE_INT, int, int);
|
629 |
|
|
extern rtx adjust_automodify_address_1 (rtx, enum machine_mode, rtx,
|
630 |
|
|
HOST_WIDE_INT, int);
|
631 |
|
|
|
632 |
|
|
/* Return a memory reference like MEMREF, but whose address is changed by
|
633 |
|
|
adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
|
634 |
|
|
known to be in OFFSET (possibly 1). */
|
635 |
|
|
extern rtx offset_address (rtx, rtx, unsigned HOST_WIDE_INT);
|
636 |
|
|
|
637 |
|
|
/* Definitions from emit-rtl.c */
|
638 |
|
|
#include "emit-rtl.h"
|
639 |
|
|
|
640 |
|
|
/* Return a memory reference like MEMREF, but with its mode widened to
|
641 |
|
|
MODE and adjusted by OFFSET. */
|
642 |
|
|
extern rtx widen_memory_access (rtx, enum machine_mode, HOST_WIDE_INT);
|
643 |
|
|
|
644 |
|
|
/* Return a memory reference like MEMREF, but which is known to have a
|
645 |
|
|
valid address. */
|
646 |
|
|
extern rtx validize_mem (rtx);
|
647 |
|
|
|
648 |
|
|
extern rtx use_anchored_address (rtx);
|
649 |
|
|
|
650 |
|
|
/* Given REF, a MEM, and T, either the type of X or the expression
|
651 |
|
|
corresponding to REF, set the memory attributes. OBJECTP is nonzero
|
652 |
|
|
if we are making a new object of this type. */
|
653 |
|
|
extern void set_mem_attributes (rtx, tree, int);
|
654 |
|
|
|
655 |
|
|
/* Similar, except that BITPOS has not yet been applied to REF, so if
|
656 |
|
|
we alter MEM_OFFSET according to T then we should subtract BITPOS
|
657 |
|
|
expecting that it'll be added back in later. */
|
658 |
|
|
extern void set_mem_attributes_minus_bitpos (rtx, tree, int, HOST_WIDE_INT);
|
659 |
|
|
|
660 |
|
|
/* Assemble the static constant template for function entry trampolines. */
|
661 |
|
|
extern rtx assemble_trampoline_template (void);
|
662 |
|
|
|
663 |
|
|
/* Copy given rtx to a new temp reg and return that. */
|
664 |
|
|
extern rtx copy_to_reg (rtx);
|
665 |
|
|
|
666 |
|
|
/* Like copy_to_reg but always make the reg Pmode. */
|
667 |
|
|
extern rtx copy_addr_to_reg (rtx);
|
668 |
|
|
|
669 |
|
|
/* Like copy_to_reg but always make the reg the specified mode MODE. */
|
670 |
|
|
extern rtx copy_to_mode_reg (enum machine_mode, rtx);
|
671 |
|
|
|
672 |
|
|
/* Copy given rtx to given temp reg and return that. */
|
673 |
|
|
extern rtx copy_to_suggested_reg (rtx, rtx, enum machine_mode);
|
674 |
|
|
|
675 |
|
|
/* Copy a value to a register if it isn't already a register.
|
676 |
|
|
Args are mode (in case value is a constant) and the value. */
|
677 |
|
|
extern rtx force_reg (enum machine_mode, rtx);
|
678 |
|
|
|
679 |
|
|
/* Return given rtx, copied into a new temp reg if it was in memory. */
|
680 |
|
|
extern rtx force_not_mem (rtx);
|
681 |
|
|
|
682 |
|
|
/* Return mode and signedness to use when object is promoted. */
|
683 |
|
|
extern enum machine_mode promote_mode (tree, enum machine_mode, int *, int);
|
684 |
|
|
|
685 |
|
|
/* Remove some bytes from the stack. An rtx says how many. */
|
686 |
|
|
extern void adjust_stack (rtx);
|
687 |
|
|
|
688 |
|
|
/* Add some bytes to the stack. An rtx says how many. */
|
689 |
|
|
extern void anti_adjust_stack (rtx);
|
690 |
|
|
|
691 |
|
|
/* This enum is used for the following two functions. */
|
692 |
|
|
enum save_level {SAVE_BLOCK, SAVE_FUNCTION, SAVE_NONLOCAL};
|
693 |
|
|
|
694 |
|
|
/* Save the stack pointer at the specified level. */
|
695 |
|
|
extern void emit_stack_save (enum save_level, rtx *, rtx);
|
696 |
|
|
|
697 |
|
|
/* Restore the stack pointer from a save area of the specified level. */
|
698 |
|
|
extern void emit_stack_restore (enum save_level, rtx, rtx);
|
699 |
|
|
|
700 |
|
|
/* Invoke emit_stack_save for the nonlocal_goto_save_area. */
|
701 |
|
|
extern void update_nonlocal_goto_save_area (void);
|
702 |
|
|
|
703 |
|
|
/* Allocate some space on the stack dynamically and return its address. An rtx
|
704 |
|
|
says how many bytes. */
|
705 |
|
|
extern rtx allocate_dynamic_stack_space (rtx, rtx, int);
|
706 |
|
|
|
707 |
|
|
/* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
|
708 |
|
|
FIRST is a constant and size is a Pmode RTX. These are offsets from the
|
709 |
|
|
current stack pointer. STACK_GROWS_DOWNWARD says whether to add or
|
710 |
|
|
subtract from the stack. If SIZE is constant, this is done
|
711 |
|
|
with a fixed number of probes. Otherwise, we must make a loop. */
|
712 |
|
|
extern void probe_stack_range (HOST_WIDE_INT, rtx);
|
713 |
|
|
|
714 |
|
|
/* Return an rtx that refers to the value returned by a library call
|
715 |
|
|
in its original home. This becomes invalid if any more code is emitted. */
|
716 |
|
|
extern rtx hard_libcall_value (enum machine_mode);
|
717 |
|
|
|
718 |
|
|
/* Return the mode desired by operand N of a particular bitfield
|
719 |
|
|
insert/extract insn, or MAX_MACHINE_MODE if no such insn is
|
720 |
|
|
available. */
|
721 |
|
|
|
722 |
|
|
enum extraction_pattern { EP_insv, EP_extv, EP_extzv };
|
723 |
|
|
extern enum machine_mode
|
724 |
|
|
mode_for_extraction (enum extraction_pattern, int);
|
725 |
|
|
|
726 |
|
|
extern rtx store_bit_field (rtx, unsigned HOST_WIDE_INT,
|
727 |
|
|
unsigned HOST_WIDE_INT, enum machine_mode, rtx);
|
728 |
|
|
extern rtx extract_bit_field (rtx, unsigned HOST_WIDE_INT,
|
729 |
|
|
unsigned HOST_WIDE_INT, int, rtx,
|
730 |
|
|
enum machine_mode, enum machine_mode);
|
731 |
|
|
extern rtx expand_mult (enum machine_mode, rtx, rtx, rtx, int);
|
732 |
|
|
extern rtx expand_mult_highpart_adjust (enum machine_mode, rtx, rtx, rtx, rtx, int);
|
733 |
|
|
|
734 |
|
|
extern rtx assemble_static_space (unsigned HOST_WIDE_INT);
|
735 |
|
|
extern int safe_from_p (rtx, tree, int);
|
736 |
|
|
|
737 |
|
|
/* Call this once to initialize the contents of the optabs
|
738 |
|
|
appropriately for the current target machine. */
|
739 |
|
|
extern void init_optabs (void);
|
740 |
|
|
extern void init_all_optabs (void);
|
741 |
|
|
|
742 |
|
|
/* Call this to initialize an optab function entry. */
|
743 |
|
|
extern rtx init_one_libfunc (const char *);
|
744 |
|
|
|
745 |
|
|
extern int vector_mode_valid_p (enum machine_mode);
|
746 |
|
|
|
747 |
|
|
#endif /* GCC_EXPR_H */
|