1 |
684 |
jeremybenn |
/* Structure for saving state for a nested function.
|
2 |
|
|
Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
|
3 |
|
|
1999, 2000, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
|
4 |
|
|
Free Software Foundation, Inc.
|
5 |
|
|
|
6 |
|
|
This file is part of GCC.
|
7 |
|
|
|
8 |
|
|
GCC is free software; you can redistribute it and/or modify it under
|
9 |
|
|
the terms of the GNU General Public License as published by the Free
|
10 |
|
|
Software Foundation; either version 3, or (at your option) any later
|
11 |
|
|
version.
|
12 |
|
|
|
13 |
|
|
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
|
14 |
|
|
WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
15 |
|
|
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
16 |
|
|
for more details.
|
17 |
|
|
|
18 |
|
|
You should have received a copy of the GNU General Public License
|
19 |
|
|
along with GCC; see the file COPYING3. If not see
|
20 |
|
|
<http://www.gnu.org/licenses/>. */
|
21 |
|
|
|
22 |
|
|
#ifndef GCC_FUNCTION_H
|
23 |
|
|
#define GCC_FUNCTION_H
|
24 |
|
|
|
25 |
|
|
#include "tree.h"
|
26 |
|
|
#include "hashtab.h"
|
27 |
|
|
#include "vecprim.h"
|
28 |
|
|
#include "tm.h" /* For CUMULATIVE_ARGS. */
|
29 |
|
|
#include "hard-reg-set.h"
|
30 |
|
|
|
31 |
|
|
/* Stack of pending (incomplete) sequences saved by `start_sequence'.
|
32 |
|
|
Each element describes one pending sequence.
|
33 |
|
|
The main insn-chain is saved in the last element of the chain,
|
34 |
|
|
unless the chain is empty. */
|
35 |
|
|
|
36 |
|
|
struct GTY(()) sequence_stack {
|
37 |
|
|
/* First and last insns in the chain of the saved sequence. */
|
38 |
|
|
rtx first;
|
39 |
|
|
rtx last;
|
40 |
|
|
struct sequence_stack *next;
|
41 |
|
|
};
|
42 |
|
|
|
43 |
|
|
struct GTY(()) emit_status {
|
44 |
|
|
/* This is reset to LAST_VIRTUAL_REGISTER + 1 at the start of each function.
|
45 |
|
|
After rtl generation, it is 1 plus the largest register number used. */
|
46 |
|
|
int x_reg_rtx_no;
|
47 |
|
|
|
48 |
|
|
/* Lowest label number in current function. */
|
49 |
|
|
int x_first_label_num;
|
50 |
|
|
|
51 |
|
|
/* The ends of the doubly-linked chain of rtl for the current function.
|
52 |
|
|
Both are reset to null at the start of rtl generation for the function.
|
53 |
|
|
|
54 |
|
|
start_sequence saves both of these on `sequence_stack' and then starts
|
55 |
|
|
a new, nested sequence of insns. */
|
56 |
|
|
rtx x_first_insn;
|
57 |
|
|
rtx x_last_insn;
|
58 |
|
|
|
59 |
|
|
/* Stack of pending (incomplete) sequences saved by `start_sequence'.
|
60 |
|
|
Each element describes one pending sequence.
|
61 |
|
|
The main insn-chain is saved in the last element of the chain,
|
62 |
|
|
unless the chain is empty. */
|
63 |
|
|
struct sequence_stack *sequence_stack;
|
64 |
|
|
|
65 |
|
|
/* INSN_UID for next insn emitted.
|
66 |
|
|
Reset to 1 for each function compiled. */
|
67 |
|
|
int x_cur_insn_uid;
|
68 |
|
|
|
69 |
|
|
/* INSN_UID for next debug insn emitted. Only used if
|
70 |
|
|
--param min-nondebug-insn-uid=<value> is given with nonzero value. */
|
71 |
|
|
int x_cur_debug_insn_uid;
|
72 |
|
|
|
73 |
|
|
/* Location the last line-number NOTE emitted.
|
74 |
|
|
This is used to avoid generating duplicates. */
|
75 |
|
|
location_t x_last_location;
|
76 |
|
|
|
77 |
|
|
/* The length of the regno_pointer_align, regno_decl, and x_regno_reg_rtx
|
78 |
|
|
vectors. Since these vectors are needed during the expansion phase when
|
79 |
|
|
the total number of registers in the function is not yet known, the
|
80 |
|
|
vectors are copied and made bigger when necessary. */
|
81 |
|
|
int regno_pointer_align_length;
|
82 |
|
|
|
83 |
|
|
/* Indexed by pseudo register number, if nonzero gives the known alignment
|
84 |
|
|
for that pseudo (if REG_POINTER is set in x_regno_reg_rtx).
|
85 |
|
|
Allocated in parallel with x_regno_reg_rtx. */
|
86 |
|
|
unsigned char * GTY((skip)) regno_pointer_align;
|
87 |
|
|
};
|
88 |
|
|
|
89 |
|
|
|
90 |
|
|
/* Indexed by register number, gives an rtx for that register (and only
|
91 |
|
|
that register). For pseudo registers, it is the unique rtx for
|
92 |
|
|
that pseudo. For hard registers, it is an rtx of the mode specified
|
93 |
|
|
by reg_raw_mode.
|
94 |
|
|
|
95 |
|
|
FIXME: We could put it into emit_status struct, but gengtype is not
|
96 |
|
|
able to deal with length attribute nested in top level structures. */
|
97 |
|
|
|
98 |
|
|
extern GTY ((length ("crtl->emit.x_reg_rtx_no"))) rtx * regno_reg_rtx;
|
99 |
|
|
|
100 |
|
|
/* For backward compatibility... eventually these should all go away. */
|
101 |
|
|
#define reg_rtx_no (crtl->emit.x_reg_rtx_no)
|
102 |
|
|
#define seq_stack (crtl->emit.sequence_stack)
|
103 |
|
|
|
104 |
|
|
#define REGNO_POINTER_ALIGN(REGNO) (crtl->emit.regno_pointer_align[REGNO])
|
105 |
|
|
|
106 |
|
|
struct GTY(()) expr_status {
|
107 |
|
|
/* Number of units that we should eventually pop off the stack.
|
108 |
|
|
These are the arguments to function calls that have already returned. */
|
109 |
|
|
int x_pending_stack_adjust;
|
110 |
|
|
|
111 |
|
|
/* Under some ABIs, it is the caller's responsibility to pop arguments
|
112 |
|
|
pushed for function calls. A naive implementation would simply pop
|
113 |
|
|
the arguments immediately after each call. However, if several
|
114 |
|
|
function calls are made in a row, it is typically cheaper to pop
|
115 |
|
|
all the arguments after all of the calls are complete since a
|
116 |
|
|
single pop instruction can be used. Therefore, GCC attempts to
|
117 |
|
|
defer popping the arguments until absolutely necessary. (For
|
118 |
|
|
example, at the end of a conditional, the arguments must be popped,
|
119 |
|
|
since code outside the conditional won't know whether or not the
|
120 |
|
|
arguments need to be popped.)
|
121 |
|
|
|
122 |
|
|
When INHIBIT_DEFER_POP is nonzero, however, the compiler does not
|
123 |
|
|
attempt to defer pops. Instead, the stack is popped immediately
|
124 |
|
|
after each call. Rather then setting this variable directly, use
|
125 |
|
|
NO_DEFER_POP and OK_DEFER_POP. */
|
126 |
|
|
int x_inhibit_defer_pop;
|
127 |
|
|
|
128 |
|
|
/* If PREFERRED_STACK_BOUNDARY and PUSH_ROUNDING are defined, the stack
|
129 |
|
|
boundary can be momentarily unaligned while pushing the arguments.
|
130 |
|
|
Record the delta since last aligned boundary here in order to get
|
131 |
|
|
stack alignment in the nested function calls working right. */
|
132 |
|
|
int x_stack_pointer_delta;
|
133 |
|
|
|
134 |
|
|
/* Nonzero means __builtin_saveregs has already been done in this function.
|
135 |
|
|
The value is the pseudoreg containing the value __builtin_saveregs
|
136 |
|
|
returned. */
|
137 |
|
|
rtx x_saveregs_value;
|
138 |
|
|
|
139 |
|
|
/* Similarly for __builtin_apply_args. */
|
140 |
|
|
rtx x_apply_args_value;
|
141 |
|
|
|
142 |
|
|
/* List of labels that must never be deleted. */
|
143 |
|
|
rtx x_forced_labels;
|
144 |
|
|
};
|
145 |
|
|
|
146 |
|
|
typedef struct call_site_record_d *call_site_record;
|
147 |
|
|
DEF_VEC_P(call_site_record);
|
148 |
|
|
DEF_VEC_ALLOC_P(call_site_record, gc);
|
149 |
|
|
|
150 |
|
|
/* RTL representation of exception handling. */
|
151 |
|
|
struct GTY(()) rtl_eh {
|
152 |
|
|
rtx ehr_stackadj;
|
153 |
|
|
rtx ehr_handler;
|
154 |
|
|
rtx ehr_label;
|
155 |
|
|
|
156 |
|
|
rtx sjlj_fc;
|
157 |
|
|
rtx sjlj_exit_after;
|
158 |
|
|
|
159 |
|
|
VEC(uchar,gc) *action_record_data;
|
160 |
|
|
|
161 |
|
|
VEC(call_site_record,gc) *call_site_record[2];
|
162 |
|
|
};
|
163 |
|
|
|
164 |
|
|
#define pending_stack_adjust (crtl->expr.x_pending_stack_adjust)
|
165 |
|
|
#define inhibit_defer_pop (crtl->expr.x_inhibit_defer_pop)
|
166 |
|
|
#define saveregs_value (crtl->expr.x_saveregs_value)
|
167 |
|
|
#define apply_args_value (crtl->expr.x_apply_args_value)
|
168 |
|
|
#define forced_labels (crtl->expr.x_forced_labels)
|
169 |
|
|
#define stack_pointer_delta (crtl->expr.x_stack_pointer_delta)
|
170 |
|
|
|
171 |
|
|
struct gimple_df;
|
172 |
|
|
struct temp_slot;
|
173 |
|
|
typedef struct temp_slot *temp_slot_p;
|
174 |
|
|
struct call_site_record_d;
|
175 |
|
|
struct dw_fde_struct;
|
176 |
|
|
|
177 |
|
|
DEF_VEC_P(temp_slot_p);
|
178 |
|
|
DEF_VEC_ALLOC_P(temp_slot_p,gc);
|
179 |
|
|
struct ipa_opt_pass_d;
|
180 |
|
|
typedef struct ipa_opt_pass_d *ipa_opt_pass;
|
181 |
|
|
|
182 |
|
|
DEF_VEC_P(ipa_opt_pass);
|
183 |
|
|
DEF_VEC_ALLOC_P(ipa_opt_pass,heap);
|
184 |
|
|
|
185 |
|
|
struct GTY(()) varasm_status {
|
186 |
|
|
/* If we're using a per-function constant pool, this is it. */
|
187 |
|
|
struct rtx_constant_pool *pool;
|
188 |
|
|
|
189 |
|
|
/* Number of tree-constants deferred during the expansion of this
|
190 |
|
|
function. */
|
191 |
|
|
unsigned int deferred_constants;
|
192 |
|
|
};
|
193 |
|
|
|
194 |
|
|
/* Information mainlined about RTL representation of incoming arguments. */
|
195 |
|
|
struct GTY(()) incoming_args {
|
196 |
|
|
/* Number of bytes of args popped by function being compiled on its return.
|
197 |
|
|
Zero if no bytes are to be popped.
|
198 |
|
|
May affect compilation of return insn or of function epilogue. */
|
199 |
|
|
int pops_args;
|
200 |
|
|
|
201 |
|
|
/* If function's args have a fixed size, this is that size, in bytes.
|
202 |
|
|
Otherwise, it is -1.
|
203 |
|
|
May affect compilation of return insn or of function epilogue. */
|
204 |
|
|
int size;
|
205 |
|
|
|
206 |
|
|
/* # bytes the prologue should push and pretend that the caller pushed them.
|
207 |
|
|
The prologue must do this, but only if parms can be passed in
|
208 |
|
|
registers. */
|
209 |
|
|
int pretend_args_size;
|
210 |
|
|
|
211 |
|
|
/* This is the offset from the arg pointer to the place where the first
|
212 |
|
|
anonymous arg can be found, if there is one. */
|
213 |
|
|
rtx arg_offset_rtx;
|
214 |
|
|
|
215 |
|
|
/* Quantities of various kinds of registers
|
216 |
|
|
used for the current function's args. */
|
217 |
|
|
CUMULATIVE_ARGS info;
|
218 |
|
|
|
219 |
|
|
/* The arg pointer hard register, or the pseudo into which it was copied. */
|
220 |
|
|
rtx internal_arg_pointer;
|
221 |
|
|
};
|
222 |
|
|
|
223 |
|
|
/* Data for function partitioning. */
|
224 |
|
|
struct GTY(()) function_subsections {
|
225 |
|
|
/* Assembly labels for the hot and cold text sections, to
|
226 |
|
|
be used by debugger functions for determining the size of text
|
227 |
|
|
sections. */
|
228 |
|
|
|
229 |
|
|
const char *hot_section_label;
|
230 |
|
|
const char *cold_section_label;
|
231 |
|
|
const char *hot_section_end_label;
|
232 |
|
|
const char *cold_section_end_label;
|
233 |
|
|
};
|
234 |
|
|
|
235 |
|
|
/* Describe an empty area of space in the stack frame. These can be chained
|
236 |
|
|
into a list; this is used to keep track of space wasted for alignment
|
237 |
|
|
reasons. */
|
238 |
|
|
struct GTY(()) frame_space
|
239 |
|
|
{
|
240 |
|
|
struct frame_space *next;
|
241 |
|
|
|
242 |
|
|
HOST_WIDE_INT start;
|
243 |
|
|
HOST_WIDE_INT length;
|
244 |
|
|
};
|
245 |
|
|
|
246 |
|
|
/* Datastructures maintained for currently processed function in RTL form. */
|
247 |
|
|
struct GTY(()) rtl_data {
|
248 |
|
|
struct expr_status expr;
|
249 |
|
|
struct emit_status emit;
|
250 |
|
|
struct varasm_status varasm;
|
251 |
|
|
struct incoming_args args;
|
252 |
|
|
struct function_subsections subsections;
|
253 |
|
|
struct rtl_eh eh;
|
254 |
|
|
|
255 |
|
|
/* For function.c */
|
256 |
|
|
|
257 |
|
|
/* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is
|
258 |
|
|
defined, the needed space is pushed by the prologue. */
|
259 |
|
|
int outgoing_args_size;
|
260 |
|
|
|
261 |
|
|
/* If nonzero, an RTL expression for the location at which the current
|
262 |
|
|
function returns its result. If the current function returns its
|
263 |
|
|
result in a register, current_function_return_rtx will always be
|
264 |
|
|
the hard register containing the result. */
|
265 |
|
|
rtx return_rtx;
|
266 |
|
|
|
267 |
|
|
/* Opaque pointer used by get_hard_reg_initial_val and
|
268 |
|
|
has_hard_reg_initial_val (see integrate.[hc]). */
|
269 |
|
|
struct initial_value_struct *hard_reg_initial_vals;
|
270 |
|
|
|
271 |
|
|
/* A variable living at the top of the frame that holds a known value.
|
272 |
|
|
Used for detecting stack clobbers. */
|
273 |
|
|
tree stack_protect_guard;
|
274 |
|
|
|
275 |
|
|
/* List (chain of EXPR_LIST) of labels heading the current handlers for
|
276 |
|
|
nonlocal gotos. */
|
277 |
|
|
rtx x_nonlocal_goto_handler_labels;
|
278 |
|
|
|
279 |
|
|
/* Label that will go on function epilogue.
|
280 |
|
|
Jumping to this label serves as a "return" instruction
|
281 |
|
|
on machines which require execution of the epilogue on all returns. */
|
282 |
|
|
rtx x_return_label;
|
283 |
|
|
|
284 |
|
|
/* Label that will go on the end of function epilogue.
|
285 |
|
|
Jumping to this label serves as a "naked return" instruction
|
286 |
|
|
on machines which require execution of the epilogue on all returns. */
|
287 |
|
|
rtx x_naked_return_label;
|
288 |
|
|
|
289 |
|
|
/* List (chain of EXPR_LISTs) of all stack slots in this function.
|
290 |
|
|
Made for the sake of unshare_all_rtl. */
|
291 |
|
|
rtx x_stack_slot_list;
|
292 |
|
|
|
293 |
|
|
/* List of empty areas in the stack frame. */
|
294 |
|
|
struct frame_space *frame_space_list;
|
295 |
|
|
|
296 |
|
|
/* Place after which to insert the tail_recursion_label if we need one. */
|
297 |
|
|
rtx x_stack_check_probe_note;
|
298 |
|
|
|
299 |
|
|
/* Location at which to save the argument pointer if it will need to be
|
300 |
|
|
referenced. There are two cases where this is done: if nonlocal gotos
|
301 |
|
|
exist, or if vars stored at an offset from the argument pointer will be
|
302 |
|
|
needed by inner routines. */
|
303 |
|
|
rtx x_arg_pointer_save_area;
|
304 |
|
|
|
305 |
|
|
/* Dynamic Realign Argument Pointer used for realigning stack. */
|
306 |
|
|
rtx drap_reg;
|
307 |
|
|
|
308 |
|
|
/* Offset to end of allocated area of stack frame.
|
309 |
|
|
If stack grows down, this is the address of the last stack slot allocated.
|
310 |
|
|
If stack grows up, this is the address for the next slot. */
|
311 |
|
|
HOST_WIDE_INT x_frame_offset;
|
312 |
|
|
|
313 |
|
|
/* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */
|
314 |
|
|
rtx x_parm_birth_insn;
|
315 |
|
|
|
316 |
|
|
/* List of all used temporaries allocated, by level. */
|
317 |
|
|
VEC(temp_slot_p,gc) *x_used_temp_slots;
|
318 |
|
|
|
319 |
|
|
/* List of available temp slots. */
|
320 |
|
|
struct temp_slot *x_avail_temp_slots;
|
321 |
|
|
|
322 |
|
|
/* Current nesting level for temporaries. */
|
323 |
|
|
int x_temp_slot_level;
|
324 |
|
|
|
325 |
|
|
/* The largest alignment needed on the stack, including requirement
|
326 |
|
|
for outgoing stack alignment. */
|
327 |
|
|
unsigned int stack_alignment_needed;
|
328 |
|
|
|
329 |
|
|
/* Preferred alignment of the end of stack frame, which is preferred
|
330 |
|
|
to call other functions. */
|
331 |
|
|
unsigned int preferred_stack_boundary;
|
332 |
|
|
|
333 |
|
|
/* The minimum alignment of parameter stack. */
|
334 |
|
|
unsigned int parm_stack_boundary;
|
335 |
|
|
|
336 |
|
|
/* The largest alignment of slot allocated on the stack. */
|
337 |
|
|
unsigned int max_used_stack_slot_alignment;
|
338 |
|
|
|
339 |
|
|
/* The stack alignment estimated before reload, with consideration of
|
340 |
|
|
following factors:
|
341 |
|
|
1. Alignment of local stack variables (max_used_stack_slot_alignment)
|
342 |
|
|
2. Alignment requirement to call other functions
|
343 |
|
|
(preferred_stack_boundary)
|
344 |
|
|
3. Alignment of non-local stack variables but might be spilled in
|
345 |
|
|
local stack. */
|
346 |
|
|
unsigned int stack_alignment_estimated;
|
347 |
|
|
|
348 |
|
|
/* For reorg. */
|
349 |
|
|
|
350 |
|
|
/* If some insns can be deferred to the delay slots of the epilogue, the
|
351 |
|
|
delay list for them is recorded here. */
|
352 |
|
|
rtx epilogue_delay_list;
|
353 |
|
|
|
354 |
|
|
/* Nonzero if function being compiled called builtin_return_addr or
|
355 |
|
|
builtin_frame_address with nonzero count. */
|
356 |
|
|
bool accesses_prior_frames;
|
357 |
|
|
|
358 |
|
|
/* Nonzero if the function calls __builtin_eh_return. */
|
359 |
|
|
bool calls_eh_return;
|
360 |
|
|
|
361 |
|
|
/* Nonzero if function saves all registers, e.g. if it has a nonlocal
|
362 |
|
|
label that can reach the exit block via non-exceptional paths. */
|
363 |
|
|
bool saves_all_registers;
|
364 |
|
|
|
365 |
|
|
/* Nonzero if function being compiled has nonlocal gotos to parent
|
366 |
|
|
function. */
|
367 |
|
|
bool has_nonlocal_goto;
|
368 |
|
|
|
369 |
|
|
/* Nonzero if function being compiled has an asm statement. */
|
370 |
|
|
bool has_asm_statement;
|
371 |
|
|
|
372 |
|
|
/* This bit is used by the exception handling logic. It is set if all
|
373 |
|
|
calls (if any) are sibling calls. Such functions do not have to
|
374 |
|
|
have EH tables generated, as they cannot throw. A call to such a
|
375 |
|
|
function, however, should be treated as throwing if any of its callees
|
376 |
|
|
can throw. */
|
377 |
|
|
bool all_throwers_are_sibcalls;
|
378 |
|
|
|
379 |
|
|
/* Nonzero if stack limit checking should be enabled in the current
|
380 |
|
|
function. */
|
381 |
|
|
bool limit_stack;
|
382 |
|
|
|
383 |
|
|
/* Nonzero if profiling code should be generated. */
|
384 |
|
|
bool profile;
|
385 |
|
|
|
386 |
|
|
/* Nonzero if the current function uses the constant pool. */
|
387 |
|
|
bool uses_const_pool;
|
388 |
|
|
|
389 |
|
|
/* Nonzero if the current function uses pic_offset_table_rtx. */
|
390 |
|
|
bool uses_pic_offset_table;
|
391 |
|
|
|
392 |
|
|
/* Nonzero if the current function needs an lsda for exception handling. */
|
393 |
|
|
bool uses_eh_lsda;
|
394 |
|
|
|
395 |
|
|
/* Set when the tail call has been produced. */
|
396 |
|
|
bool tail_call_emit;
|
397 |
|
|
|
398 |
|
|
/* Nonzero if code to initialize arg_pointer_save_area has been emitted. */
|
399 |
|
|
bool arg_pointer_save_area_init;
|
400 |
|
|
|
401 |
|
|
/* Nonzero if current function must be given a frame pointer.
|
402 |
|
|
Set in global.c if anything is allocated on the stack there. */
|
403 |
|
|
bool frame_pointer_needed;
|
404 |
|
|
|
405 |
|
|
/* When set, expand should optimize for speed. */
|
406 |
|
|
bool maybe_hot_insn_p;
|
407 |
|
|
|
408 |
|
|
/* Nonzero if function stack realignment is needed. This flag may be
|
409 |
|
|
set twice: before and after reload. It is set before reload wrt
|
410 |
|
|
stack alignment estimation before reload. It will be changed after
|
411 |
|
|
reload if by then criteria of stack realignment is different.
|
412 |
|
|
The value set after reload is the accurate one and is finalized. */
|
413 |
|
|
bool stack_realign_needed;
|
414 |
|
|
|
415 |
|
|
/* Nonzero if function stack realignment is tried. This flag is set
|
416 |
|
|
only once before reload. It affects register elimination. This
|
417 |
|
|
is used to generate DWARF debug info for stack variables. */
|
418 |
|
|
bool stack_realign_tried;
|
419 |
|
|
|
420 |
|
|
/* Nonzero if function being compiled needs dynamic realigned
|
421 |
|
|
argument pointer (drap) if stack needs realigning. */
|
422 |
|
|
bool need_drap;
|
423 |
|
|
|
424 |
|
|
/* Nonzero if function stack realignment estimation is done, namely
|
425 |
|
|
stack_realign_needed flag has been set before reload wrt estimated
|
426 |
|
|
stack alignment info. */
|
427 |
|
|
bool stack_realign_processed;
|
428 |
|
|
|
429 |
|
|
/* Nonzero if function stack realignment has been finalized, namely
|
430 |
|
|
stack_realign_needed flag has been set and finalized after reload. */
|
431 |
|
|
bool stack_realign_finalized;
|
432 |
|
|
|
433 |
|
|
/* True if dbr_schedule has already been called for this function. */
|
434 |
|
|
bool dbr_scheduled_p;
|
435 |
|
|
|
436 |
|
|
/* True if current function can not throw. Unlike
|
437 |
|
|
TREE_NOTHROW (current_function_decl) it is set even for overwritable
|
438 |
|
|
function where currently compiled version of it is nothrow. */
|
439 |
|
|
bool nothrow;
|
440 |
|
|
|
441 |
|
|
/* True if we performed shrink-wrapping for the current function. */
|
442 |
|
|
bool shrink_wrapped;
|
443 |
|
|
|
444 |
|
|
/* Like regs_ever_live, but 1 if a reg is set or clobbered from an
|
445 |
|
|
asm. Unlike regs_ever_live, elements of this array corresponding
|
446 |
|
|
to eliminable regs (like the frame pointer) are set if an asm
|
447 |
|
|
sets them. */
|
448 |
|
|
HARD_REG_SET asm_clobbers;
|
449 |
|
|
};
|
450 |
|
|
|
451 |
|
|
#define return_label (crtl->x_return_label)
|
452 |
|
|
#define naked_return_label (crtl->x_naked_return_label)
|
453 |
|
|
#define stack_slot_list (crtl->x_stack_slot_list)
|
454 |
|
|
#define parm_birth_insn (crtl->x_parm_birth_insn)
|
455 |
|
|
#define frame_offset (crtl->x_frame_offset)
|
456 |
|
|
#define stack_check_probe_note (crtl->x_stack_check_probe_note)
|
457 |
|
|
#define arg_pointer_save_area (crtl->x_arg_pointer_save_area)
|
458 |
|
|
#define used_temp_slots (crtl->x_used_temp_slots)
|
459 |
|
|
#define avail_temp_slots (crtl->x_avail_temp_slots)
|
460 |
|
|
#define temp_slot_level (crtl->x_temp_slot_level)
|
461 |
|
|
#define nonlocal_goto_handler_labels (crtl->x_nonlocal_goto_handler_labels)
|
462 |
|
|
#define frame_pointer_needed (crtl->frame_pointer_needed)
|
463 |
|
|
#define stack_realign_fp (crtl->stack_realign_needed && !crtl->need_drap)
|
464 |
|
|
#define stack_realign_drap (crtl->stack_realign_needed && crtl->need_drap)
|
465 |
|
|
|
466 |
|
|
extern GTY(()) struct rtl_data x_rtl;
|
467 |
|
|
|
468 |
|
|
/* Accessor to RTL datastructures. We keep them statically allocated now since
|
469 |
|
|
we never keep multiple functions. For threaded compiler we might however
|
470 |
|
|
want to do differently. */
|
471 |
|
|
#define crtl (&x_rtl)
|
472 |
|
|
|
473 |
|
|
struct GTY(()) stack_usage
|
474 |
|
|
{
|
475 |
|
|
/* # of bytes of static stack space allocated by the function. */
|
476 |
|
|
HOST_WIDE_INT static_stack_size;
|
477 |
|
|
|
478 |
|
|
/* # of bytes of dynamic stack space allocated by the function. This is
|
479 |
|
|
meaningful only if has_unbounded_dynamic_stack_size is zero. */
|
480 |
|
|
HOST_WIDE_INT dynamic_stack_size;
|
481 |
|
|
|
482 |
|
|
/* # of bytes of space pushed onto the stack after the prologue. If
|
483 |
|
|
!ACCUMULATE_OUTGOING_ARGS, it contains the outgoing arguments. */
|
484 |
|
|
int pushed_stack_size;
|
485 |
|
|
|
486 |
|
|
/* Nonzero if the amount of stack space allocated dynamically cannot
|
487 |
|
|
be bounded at compile-time. */
|
488 |
|
|
unsigned int has_unbounded_dynamic_stack_size : 1;
|
489 |
|
|
};
|
490 |
|
|
|
491 |
|
|
#define current_function_static_stack_size (cfun->su->static_stack_size)
|
492 |
|
|
#define current_function_dynamic_stack_size (cfun->su->dynamic_stack_size)
|
493 |
|
|
#define current_function_pushed_stack_size (cfun->su->pushed_stack_size)
|
494 |
|
|
#define current_function_has_unbounded_dynamic_stack_size \
|
495 |
|
|
(cfun->su->has_unbounded_dynamic_stack_size)
|
496 |
|
|
#define current_function_allocates_dynamic_stack_space \
|
497 |
|
|
(current_function_dynamic_stack_size != 0 \
|
498 |
|
|
|| current_function_has_unbounded_dynamic_stack_size)
|
499 |
|
|
|
500 |
|
|
/* This structure can save all the important global and static variables
|
501 |
|
|
describing the status of the current function. */
|
502 |
|
|
|
503 |
|
|
struct GTY(()) function {
|
504 |
|
|
struct eh_status *eh;
|
505 |
|
|
|
506 |
|
|
/* The control flow graph for this function. */
|
507 |
|
|
struct control_flow_graph *cfg;
|
508 |
|
|
|
509 |
|
|
/* GIMPLE body for this function. */
|
510 |
|
|
struct gimple_seq_d *gimple_body;
|
511 |
|
|
|
512 |
|
|
/* SSA and dataflow information. */
|
513 |
|
|
struct gimple_df *gimple_df;
|
514 |
|
|
|
515 |
|
|
/* The loops in this function. */
|
516 |
|
|
struct loops *x_current_loops;
|
517 |
|
|
|
518 |
|
|
/* The stack usage of this function. */
|
519 |
|
|
struct stack_usage *su;
|
520 |
|
|
|
521 |
|
|
/* Value histograms attached to particular statements. */
|
522 |
|
|
htab_t GTY((skip)) value_histograms;
|
523 |
|
|
|
524 |
|
|
/* For function.c. */
|
525 |
|
|
|
526 |
|
|
/* Points to the FUNCTION_DECL of this function. */
|
527 |
|
|
tree decl;
|
528 |
|
|
|
529 |
|
|
/* A PARM_DECL that should contain the static chain for this function.
|
530 |
|
|
It will be initialized at the beginning of the function. */
|
531 |
|
|
tree static_chain_decl;
|
532 |
|
|
|
533 |
|
|
/* An expression that contains the non-local goto save area. The first
|
534 |
|
|
word is the saved frame pointer and the second is the saved stack
|
535 |
|
|
pointer. */
|
536 |
|
|
tree nonlocal_goto_save_area;
|
537 |
|
|
|
538 |
|
|
/* Vector of function local variables, functions, types and constants. */
|
539 |
|
|
VEC(tree,gc) *local_decls;
|
540 |
|
|
|
541 |
|
|
/* For md files. */
|
542 |
|
|
|
543 |
|
|
/* tm.h can use this to store whatever it likes. */
|
544 |
|
|
struct machine_function * GTY ((maybe_undef)) machine;
|
545 |
|
|
|
546 |
|
|
/* Language-specific code can use this to store whatever it likes. */
|
547 |
|
|
struct language_function * language;
|
548 |
|
|
|
549 |
|
|
/* Used types hash table. */
|
550 |
|
|
htab_t GTY ((param_is (union tree_node))) used_types_hash;
|
551 |
|
|
|
552 |
|
|
/* Dwarf2 Frame Description Entry, containing the Call Frame Instructions
|
553 |
|
|
used for unwinding. Only set when either dwarf2 unwinding or dwarf2
|
554 |
|
|
debugging is enabled. */
|
555 |
|
|
struct dw_fde_struct *fde;
|
556 |
|
|
|
557 |
|
|
/* Last statement uid. */
|
558 |
|
|
int last_stmt_uid;
|
559 |
|
|
|
560 |
|
|
/* Function sequence number for profiling, debugging, etc. */
|
561 |
|
|
int funcdef_no;
|
562 |
|
|
|
563 |
|
|
/* Line number of the start of the function for debugging purposes. */
|
564 |
|
|
location_t function_start_locus;
|
565 |
|
|
|
566 |
|
|
/* Line number of the end of the function. */
|
567 |
|
|
location_t function_end_locus;
|
568 |
|
|
|
569 |
|
|
/* Properties used by the pass manager. */
|
570 |
|
|
unsigned int curr_properties;
|
571 |
|
|
unsigned int last_verified;
|
572 |
|
|
|
573 |
|
|
/* Non-null if the function does something that would prevent it from
|
574 |
|
|
being copied; this applies to both versioning and inlining. Set to
|
575 |
|
|
a string describing the reason for failure. */
|
576 |
|
|
const char * GTY((skip)) cannot_be_copied_reason;
|
577 |
|
|
|
578 |
|
|
/* Collected bit flags. */
|
579 |
|
|
|
580 |
|
|
/* Number of units of general registers that need saving in stdarg
|
581 |
|
|
function. What unit is depends on the backend, either it is number
|
582 |
|
|
of bytes, or it can be number of registers. */
|
583 |
|
|
unsigned int va_list_gpr_size : 8;
|
584 |
|
|
|
585 |
|
|
/* Number of units of floating point registers that need saving in stdarg
|
586 |
|
|
function. */
|
587 |
|
|
unsigned int va_list_fpr_size : 8;
|
588 |
|
|
|
589 |
|
|
/* Nonzero if function being compiled can call setjmp. */
|
590 |
|
|
unsigned int calls_setjmp : 1;
|
591 |
|
|
|
592 |
|
|
/* Nonzero if function being compiled can call alloca,
|
593 |
|
|
either as a subroutine or builtin. */
|
594 |
|
|
unsigned int calls_alloca : 1;
|
595 |
|
|
|
596 |
|
|
/* Nonzero if function being compiled receives nonlocal gotos
|
597 |
|
|
from nested functions. */
|
598 |
|
|
unsigned int has_nonlocal_label : 1;
|
599 |
|
|
|
600 |
|
|
/* Nonzero if we've set cannot_be_copied_reason. I.e. if
|
601 |
|
|
(cannot_be_copied_set && !cannot_be_copied_reason), the function
|
602 |
|
|
can in fact be copied. */
|
603 |
|
|
unsigned int cannot_be_copied_set : 1;
|
604 |
|
|
|
605 |
|
|
/* Nonzero if current function uses stdarg.h or equivalent. */
|
606 |
|
|
unsigned int stdarg : 1;
|
607 |
|
|
|
608 |
|
|
unsigned int after_inlining : 1;
|
609 |
|
|
unsigned int always_inline_functions_inlined : 1;
|
610 |
|
|
|
611 |
|
|
/* Nonzero if function being compiled can throw synchronous non-call
|
612 |
|
|
exceptions. */
|
613 |
|
|
unsigned int can_throw_non_call_exceptions : 1;
|
614 |
|
|
|
615 |
|
|
/* Fields below this point are not set for abstract functions; see
|
616 |
|
|
allocate_struct_function. */
|
617 |
|
|
|
618 |
|
|
/* Nonzero if function being compiled needs to be given an address
|
619 |
|
|
where the value should be stored. */
|
620 |
|
|
unsigned int returns_struct : 1;
|
621 |
|
|
|
622 |
|
|
/* Nonzero if function being compiled needs to
|
623 |
|
|
return the address of where it has put a structure value. */
|
624 |
|
|
unsigned int returns_pcc_struct : 1;
|
625 |
|
|
|
626 |
|
|
/* Nonzero if pass_tree_profile was run on this function. */
|
627 |
|
|
unsigned int after_tree_profile : 1;
|
628 |
|
|
|
629 |
|
|
/* Nonzero if this function has local DECL_HARD_REGISTER variables.
|
630 |
|
|
In this case code motion has to be done more carefully. */
|
631 |
|
|
unsigned int has_local_explicit_reg_vars : 1;
|
632 |
|
|
|
633 |
|
|
/* Nonzero if the current function is a thunk, i.e., a lightweight
|
634 |
|
|
function implemented by the output_mi_thunk hook) that just
|
635 |
|
|
adjusts one of its arguments and forwards to another
|
636 |
|
|
function. */
|
637 |
|
|
unsigned int is_thunk : 1;
|
638 |
|
|
};
|
639 |
|
|
|
640 |
|
|
/* Add the decl D to the local_decls list of FUN. */
|
641 |
|
|
|
642 |
|
|
static inline void
|
643 |
|
|
add_local_decl (struct function *fun, tree d)
|
644 |
|
|
{
|
645 |
|
|
VEC_safe_push (tree, gc, fun->local_decls, d);
|
646 |
|
|
}
|
647 |
|
|
|
648 |
|
|
#define FOR_EACH_LOCAL_DECL(FUN, I, D) \
|
649 |
|
|
FOR_EACH_VEC_ELT_REVERSE (tree, (FUN)->local_decls, I, D)
|
650 |
|
|
|
651 |
|
|
/* If va_list_[gf]pr_size is set to this, it means we don't know how
|
652 |
|
|
many units need to be saved. */
|
653 |
|
|
#define VA_LIST_MAX_GPR_SIZE 255
|
654 |
|
|
#define VA_LIST_MAX_FPR_SIZE 255
|
655 |
|
|
|
656 |
|
|
/* The function currently being compiled. */
|
657 |
|
|
extern GTY(()) struct function *cfun;
|
658 |
|
|
|
659 |
|
|
/* In order to ensure that cfun is not set directly, we redefine it so
|
660 |
|
|
that it is not an lvalue. Rather than assign to cfun, use
|
661 |
|
|
push_cfun or set_cfun. */
|
662 |
|
|
#define cfun (cfun + 0)
|
663 |
|
|
|
664 |
|
|
/* Nonzero if we've already converted virtual regs to hard regs. */
|
665 |
|
|
extern int virtuals_instantiated;
|
666 |
|
|
|
667 |
|
|
/* Nonzero if at least one trampoline has been created. */
|
668 |
|
|
extern int trampolines_created;
|
669 |
|
|
|
670 |
|
|
struct GTY(()) types_used_by_vars_entry {
|
671 |
|
|
tree type;
|
672 |
|
|
tree var_decl;
|
673 |
|
|
};
|
674 |
|
|
|
675 |
|
|
/* Hash table making the relationship between a global variable
|
676 |
|
|
and the types it references in its initializer. The key of the
|
677 |
|
|
entry is a referenced type, and the value is the DECL of the global
|
678 |
|
|
variable. types_use_by_vars_do_hash and types_used_by_vars_eq below are
|
679 |
|
|
the hash and equality functions to use for this hash table. */
|
680 |
|
|
extern GTY((param_is (struct types_used_by_vars_entry))) htab_t
|
681 |
|
|
types_used_by_vars_hash;
|
682 |
|
|
|
683 |
|
|
hashval_t types_used_by_vars_do_hash (const void*);
|
684 |
|
|
int types_used_by_vars_eq (const void *, const void *);
|
685 |
|
|
void types_used_by_var_decl_insert (tree type, tree var_decl);
|
686 |
|
|
|
687 |
|
|
/* During parsing of a global variable, this vector contains the types
|
688 |
|
|
referenced by the global variable. */
|
689 |
|
|
extern GTY(()) VEC(tree,gc) *types_used_by_cur_var_decl;
|
690 |
|
|
|
691 |
|
|
|
692 |
|
|
/* cfun shouldn't be set directly; use one of these functions instead. */
|
693 |
|
|
extern void set_cfun (struct function *new_cfun);
|
694 |
|
|
extern void push_cfun (struct function *new_cfun);
|
695 |
|
|
extern void pop_cfun (void);
|
696 |
|
|
extern void instantiate_decl_rtl (rtx x);
|
697 |
|
|
|
698 |
|
|
/* For backward compatibility... eventually these should all go away. */
|
699 |
|
|
#define current_function_funcdef_no (cfun->funcdef_no)
|
700 |
|
|
|
701 |
|
|
#define current_loops (cfun->x_current_loops)
|
702 |
|
|
#define dom_computed (cfun->cfg->x_dom_computed)
|
703 |
|
|
#define n_bbs_in_dom_tree (cfun->cfg->x_n_bbs_in_dom_tree)
|
704 |
|
|
#define VALUE_HISTOGRAMS(fun) (fun)->value_histograms
|
705 |
|
|
|
706 |
|
|
/* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
|
707 |
|
|
and create duplicate blocks. */
|
708 |
|
|
extern void reorder_blocks (void);
|
709 |
|
|
|
710 |
|
|
/* Set BLOCK_NUMBER for all the blocks in FN. */
|
711 |
|
|
extern void number_blocks (tree);
|
712 |
|
|
|
713 |
|
|
extern void clear_block_marks (tree);
|
714 |
|
|
extern tree blocks_nreverse (tree);
|
715 |
|
|
extern tree block_chainon (tree, tree);
|
716 |
|
|
|
717 |
|
|
/* Return size needed for stack frame based on slots so far allocated.
|
718 |
|
|
This size counts from zero. It is not rounded to STACK_BOUNDARY;
|
719 |
|
|
the caller may have to do that. */
|
720 |
|
|
extern HOST_WIDE_INT get_frame_size (void);
|
721 |
|
|
|
722 |
|
|
/* Issue an error message and return TRUE if frame OFFSET overflows in
|
723 |
|
|
the signed target pointer arithmetics for function FUNC. Otherwise
|
724 |
|
|
return FALSE. */
|
725 |
|
|
extern bool frame_offset_overflow (HOST_WIDE_INT, tree);
|
726 |
|
|
|
727 |
|
|
/* A pointer to a function to create target specific, per-function
|
728 |
|
|
data structures. */
|
729 |
|
|
extern struct machine_function * (*init_machine_status) (void);
|
730 |
|
|
|
731 |
|
|
/* Save and restore status information for a nested function. */
|
732 |
|
|
extern void free_after_parsing (struct function *);
|
733 |
|
|
extern void free_after_compilation (struct function *);
|
734 |
|
|
|
735 |
|
|
extern void init_varasm_status (void);
|
736 |
|
|
|
737 |
|
|
#ifdef RTX_CODE
|
738 |
|
|
extern void diddle_return_value (void (*)(rtx, void*), void*);
|
739 |
|
|
extern void clobber_return_register (void);
|
740 |
|
|
#endif
|
741 |
|
|
|
742 |
|
|
extern rtx get_arg_pointer_save_area (void);
|
743 |
|
|
|
744 |
|
|
/* Returns the name of the current function. */
|
745 |
|
|
extern const char *current_function_name (void);
|
746 |
|
|
|
747 |
|
|
extern void do_warn_unused_parameter (tree);
|
748 |
|
|
|
749 |
|
|
extern bool pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
|
750 |
|
|
tree, bool);
|
751 |
|
|
extern bool reference_callee_copied (CUMULATIVE_ARGS *, enum machine_mode,
|
752 |
|
|
tree, bool);
|
753 |
|
|
|
754 |
|
|
extern void used_types_insert (tree);
|
755 |
|
|
|
756 |
|
|
extern int get_next_funcdef_no (void);
|
757 |
|
|
extern int get_last_funcdef_no (void);
|
758 |
|
|
|
759 |
|
|
#ifdef HAVE_simple_return
|
760 |
|
|
extern bool requires_stack_frame_p (rtx, HARD_REG_SET, HARD_REG_SET);
|
761 |
|
|
#endif
|
762 |
|
|
|
763 |
|
|
/* In predict.c */
|
764 |
|
|
extern bool optimize_function_for_size_p (struct function *);
|
765 |
|
|
extern bool optimize_function_for_speed_p (struct function *);
|
766 |
|
|
|
767 |
|
|
#endif /* GCC_FUNCTION_H */
|