1 |
38 |
julius |
/* Define control and data flow tables, and regsets.
|
2 |
|
|
Copyright (C) 1987, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
|
3 |
|
|
2007 Free Software Foundation, Inc.
|
4 |
|
|
|
5 |
|
|
This file is part of GCC.
|
6 |
|
|
|
7 |
|
|
GCC is free software; you can redistribute it and/or modify it under
|
8 |
|
|
the terms of the GNU General Public License as published by the Free
|
9 |
|
|
Software Foundation; either version 3, or (at your option) any later
|
10 |
|
|
version.
|
11 |
|
|
|
12 |
|
|
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
|
13 |
|
|
WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
14 |
|
|
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
15 |
|
|
for more details.
|
16 |
|
|
|
17 |
|
|
You should have received a copy of the GNU General Public License
|
18 |
|
|
along with GCC; see the file COPYING3. If not see
|
19 |
|
|
<http://www.gnu.org/licenses/>. */
|
20 |
|
|
|
21 |
|
|
#ifndef GCC_BASIC_BLOCK_H
|
22 |
|
|
#define GCC_BASIC_BLOCK_H
|
23 |
|
|
|
24 |
|
|
#include "bitmap.h"
|
25 |
|
|
#include "sbitmap.h"
|
26 |
|
|
#include "varray.h"
|
27 |
|
|
#include "partition.h"
|
28 |
|
|
#include "hard-reg-set.h"
|
29 |
|
|
#include "predict.h"
|
30 |
|
|
#include "vec.h"
|
31 |
|
|
#include "function.h"
|
32 |
|
|
|
33 |
|
|
/* Head of register set linked list. */
|
34 |
|
|
typedef bitmap_head regset_head;
|
35 |
|
|
|
36 |
|
|
/* A pointer to a regset_head. */
|
37 |
|
|
typedef bitmap regset;
|
38 |
|
|
|
39 |
|
|
/* Allocate a register set with oballoc. */
|
40 |
|
|
#define ALLOC_REG_SET(OBSTACK) BITMAP_ALLOC (OBSTACK)
|
41 |
|
|
|
42 |
|
|
/* Do any cleanup needed on a regset when it is no longer used. */
|
43 |
|
|
#define FREE_REG_SET(REGSET) BITMAP_FREE (REGSET)
|
44 |
|
|
|
45 |
|
|
/* Initialize a new regset. */
|
46 |
|
|
#define INIT_REG_SET(HEAD) bitmap_initialize (HEAD, ®_obstack)
|
47 |
|
|
|
48 |
|
|
/* Clear a register set by freeing up the linked list. */
|
49 |
|
|
#define CLEAR_REG_SET(HEAD) bitmap_clear (HEAD)
|
50 |
|
|
|
51 |
|
|
/* Copy a register set to another register set. */
|
52 |
|
|
#define COPY_REG_SET(TO, FROM) bitmap_copy (TO, FROM)
|
53 |
|
|
|
54 |
|
|
/* Compare two register sets. */
|
55 |
|
|
#define REG_SET_EQUAL_P(A, B) bitmap_equal_p (A, B)
|
56 |
|
|
|
57 |
|
|
/* `and' a register set with a second register set. */
|
58 |
|
|
#define AND_REG_SET(TO, FROM) bitmap_and_into (TO, FROM)
|
59 |
|
|
|
60 |
|
|
/* `and' the complement of a register set with a register set. */
|
61 |
|
|
#define AND_COMPL_REG_SET(TO, FROM) bitmap_and_compl_into (TO, FROM)
|
62 |
|
|
|
63 |
|
|
/* Inclusive or a register set with a second register set. */
|
64 |
|
|
#define IOR_REG_SET(TO, FROM) bitmap_ior_into (TO, FROM)
|
65 |
|
|
|
66 |
|
|
/* Exclusive or a register set with a second register set. */
|
67 |
|
|
#define XOR_REG_SET(TO, FROM) bitmap_xor_into (TO, FROM)
|
68 |
|
|
|
69 |
|
|
/* Or into TO the register set FROM1 `and'ed with the complement of FROM2. */
|
70 |
|
|
#define IOR_AND_COMPL_REG_SET(TO, FROM1, FROM2) \
|
71 |
|
|
bitmap_ior_and_compl_into (TO, FROM1, FROM2)
|
72 |
|
|
|
73 |
|
|
/* Clear a single register in a register set. */
|
74 |
|
|
#define CLEAR_REGNO_REG_SET(HEAD, REG) bitmap_clear_bit (HEAD, REG)
|
75 |
|
|
|
76 |
|
|
/* Set a single register in a register set. */
|
77 |
|
|
#define SET_REGNO_REG_SET(HEAD, REG) bitmap_set_bit (HEAD, REG)
|
78 |
|
|
|
79 |
|
|
/* Return true if a register is set in a register set. */
|
80 |
|
|
#define REGNO_REG_SET_P(TO, REG) bitmap_bit_p (TO, REG)
|
81 |
|
|
|
82 |
|
|
/* Copy the hard registers in a register set to the hard register set. */
|
83 |
|
|
extern void reg_set_to_hard_reg_set (HARD_REG_SET *, bitmap);
|
84 |
|
|
#define REG_SET_TO_HARD_REG_SET(TO, FROM) \
|
85 |
|
|
do { \
|
86 |
|
|
CLEAR_HARD_REG_SET (TO); \
|
87 |
|
|
reg_set_to_hard_reg_set (&TO, FROM); \
|
88 |
|
|
} while (0)
|
89 |
|
|
|
90 |
|
|
typedef bitmap_iterator reg_set_iterator;
|
91 |
|
|
|
92 |
|
|
/* Loop over all registers in REGSET, starting with MIN, setting REGNUM to the
|
93 |
|
|
register number and executing CODE for all registers that are set. */
|
94 |
|
|
#define EXECUTE_IF_SET_IN_REG_SET(REGSET, MIN, REGNUM, RSI) \
|
95 |
|
|
EXECUTE_IF_SET_IN_BITMAP (REGSET, MIN, REGNUM, RSI)
|
96 |
|
|
|
97 |
|
|
/* Loop over all registers in REGSET1 and REGSET2, starting with MIN, setting
|
98 |
|
|
REGNUM to the register number and executing CODE for all registers that are
|
99 |
|
|
set in the first regset and not set in the second. */
|
100 |
|
|
#define EXECUTE_IF_AND_COMPL_IN_REG_SET(REGSET1, REGSET2, MIN, REGNUM, RSI) \
|
101 |
|
|
EXECUTE_IF_AND_COMPL_IN_BITMAP (REGSET1, REGSET2, MIN, REGNUM, RSI)
|
102 |
|
|
|
103 |
|
|
/* Loop over all registers in REGSET1 and REGSET2, starting with MIN, setting
|
104 |
|
|
REGNUM to the register number and executing CODE for all registers that are
|
105 |
|
|
set in both regsets. */
|
106 |
|
|
#define EXECUTE_IF_AND_IN_REG_SET(REGSET1, REGSET2, MIN, REGNUM, RSI) \
|
107 |
|
|
EXECUTE_IF_AND_IN_BITMAP (REGSET1, REGSET2, MIN, REGNUM, RSI) \
|
108 |
|
|
|
109 |
|
|
/* Type we use to hold basic block counters. Should be at least
|
110 |
|
|
64bit. Although a counter cannot be negative, we use a signed
|
111 |
|
|
type, because erroneous negative counts can be generated when the
|
112 |
|
|
flow graph is manipulated by various optimizations. A signed type
|
113 |
|
|
makes those easy to detect. */
|
114 |
|
|
typedef HOST_WIDEST_INT gcov_type;
|
115 |
|
|
|
116 |
|
|
/* Control flow edge information. */
|
117 |
|
|
struct edge_def GTY(())
|
118 |
|
|
{
|
119 |
|
|
/* The two blocks at the ends of the edge. */
|
120 |
|
|
struct basic_block_def *src;
|
121 |
|
|
struct basic_block_def *dest;
|
122 |
|
|
|
123 |
|
|
/* Instructions queued on the edge. */
|
124 |
|
|
union edge_def_insns {
|
125 |
|
|
rtx GTY ((tag ("0"))) r;
|
126 |
|
|
tree GTY ((tag ("1"))) t;
|
127 |
|
|
} GTY ((desc ("ir_type ()"))) insns;
|
128 |
|
|
|
129 |
|
|
/* Auxiliary info specific to a pass. */
|
130 |
|
|
PTR GTY ((skip (""))) aux;
|
131 |
|
|
|
132 |
|
|
/* Location of any goto implicit in the edge, during tree-ssa. */
|
133 |
|
|
source_locus goto_locus;
|
134 |
|
|
|
135 |
|
|
int flags; /* see EDGE_* below */
|
136 |
|
|
int probability; /* biased by REG_BR_PROB_BASE */
|
137 |
|
|
gcov_type count; /* Expected number of executions calculated
|
138 |
|
|
in profile.c */
|
139 |
|
|
|
140 |
|
|
/* The index number corresponding to this edge in the edge vector
|
141 |
|
|
dest->preds. */
|
142 |
|
|
unsigned int dest_idx;
|
143 |
|
|
};
|
144 |
|
|
|
145 |
|
|
typedef struct edge_def *edge;
|
146 |
|
|
DEF_VEC_P(edge);
|
147 |
|
|
DEF_VEC_ALLOC_P(edge,gc);
|
148 |
|
|
|
149 |
|
|
#define EDGE_FALLTHRU 1 /* 'Straight line' flow */
|
150 |
|
|
#define EDGE_ABNORMAL 2 /* Strange flow, like computed
|
151 |
|
|
label, or eh */
|
152 |
|
|
#define EDGE_ABNORMAL_CALL 4 /* Call with abnormal exit
|
153 |
|
|
like an exception, or sibcall */
|
154 |
|
|
#define EDGE_EH 8 /* Exception throw */
|
155 |
|
|
#define EDGE_FAKE 16 /* Not a real edge (profile.c) */
|
156 |
|
|
#define EDGE_DFS_BACK 32 /* A backwards edge */
|
157 |
|
|
#define EDGE_CAN_FALLTHRU 64 /* Candidate for straight line
|
158 |
|
|
flow. */
|
159 |
|
|
#define EDGE_IRREDUCIBLE_LOOP 128 /* Part of irreducible loop. */
|
160 |
|
|
#define EDGE_SIBCALL 256 /* Edge from sibcall to exit. */
|
161 |
|
|
#define EDGE_LOOP_EXIT 512 /* Exit of a loop. */
|
162 |
|
|
#define EDGE_TRUE_VALUE 1024 /* Edge taken when controlling
|
163 |
|
|
predicate is nonzero. */
|
164 |
|
|
#define EDGE_FALSE_VALUE 2048 /* Edge taken when controlling
|
165 |
|
|
predicate is zero. */
|
166 |
|
|
#define EDGE_EXECUTABLE 4096 /* Edge is executable. Only
|
167 |
|
|
valid during SSA-CCP. */
|
168 |
|
|
#define EDGE_CROSSING 8192 /* Edge crosses between hot
|
169 |
|
|
and cold sections, when we
|
170 |
|
|
do partitioning. */
|
171 |
|
|
#define EDGE_ALL_FLAGS 16383
|
172 |
|
|
|
173 |
|
|
#define EDGE_COMPLEX (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL | EDGE_EH)
|
174 |
|
|
|
175 |
|
|
/* Counter summary from the last set of coverage counts read by
|
176 |
|
|
profile.c. */
|
177 |
|
|
extern const struct gcov_ctr_summary *profile_info;
|
178 |
|
|
|
179 |
|
|
/* Declared in cfgloop.h. */
|
180 |
|
|
struct loop;
|
181 |
|
|
struct loops;
|
182 |
|
|
|
183 |
|
|
/* Declared in tree-flow.h. */
|
184 |
|
|
struct edge_prediction;
|
185 |
|
|
struct rtl_bb_info;
|
186 |
|
|
|
187 |
|
|
/* A basic block is a sequence of instructions with only entry and
|
188 |
|
|
only one exit. If any one of the instructions are executed, they
|
189 |
|
|
will all be executed, and in sequence from first to last.
|
190 |
|
|
|
191 |
|
|
There may be COND_EXEC instructions in the basic block. The
|
192 |
|
|
COND_EXEC *instructions* will be executed -- but if the condition
|
193 |
|
|
is false the conditionally executed *expressions* will of course
|
194 |
|
|
not be executed. We don't consider the conditionally executed
|
195 |
|
|
expression (which might have side-effects) to be in a separate
|
196 |
|
|
basic block because the program counter will always be at the same
|
197 |
|
|
location after the COND_EXEC instruction, regardless of whether the
|
198 |
|
|
condition is true or not.
|
199 |
|
|
|
200 |
|
|
Basic blocks need not start with a label nor end with a jump insn.
|
201 |
|
|
For example, a previous basic block may just "conditionally fall"
|
202 |
|
|
into the succeeding basic block, and the last basic block need not
|
203 |
|
|
end with a jump insn. Block 0 is a descendant of the entry block.
|
204 |
|
|
|
205 |
|
|
A basic block beginning with two labels cannot have notes between
|
206 |
|
|
the labels.
|
207 |
|
|
|
208 |
|
|
Data for jump tables are stored in jump_insns that occur in no
|
209 |
|
|
basic block even though these insns can follow or precede insns in
|
210 |
|
|
basic blocks. */
|
211 |
|
|
|
212 |
|
|
/* Basic block information indexed by block number. */
|
213 |
|
|
struct basic_block_def GTY((chain_next ("%h.next_bb"), chain_prev ("%h.prev_bb")))
|
214 |
|
|
{
|
215 |
|
|
/* Pointers to the first and last trees of the block. */
|
216 |
|
|
tree stmt_list;
|
217 |
|
|
|
218 |
|
|
/* The edges into and out of the block. */
|
219 |
|
|
VEC(edge,gc) *preds;
|
220 |
|
|
VEC(edge,gc) *succs;
|
221 |
|
|
|
222 |
|
|
/* Auxiliary info specific to a pass. */
|
223 |
|
|
PTR GTY ((skip (""))) aux;
|
224 |
|
|
|
225 |
|
|
/* Innermost loop containing the block. */
|
226 |
|
|
struct loop * GTY ((skip (""))) loop_father;
|
227 |
|
|
|
228 |
|
|
/* The dominance and postdominance information node. */
|
229 |
|
|
struct et_node * GTY ((skip (""))) dom[2];
|
230 |
|
|
|
231 |
|
|
/* Previous and next blocks in the chain. */
|
232 |
|
|
struct basic_block_def *prev_bb;
|
233 |
|
|
struct basic_block_def *next_bb;
|
234 |
|
|
|
235 |
|
|
union basic_block_il_dependent {
|
236 |
|
|
struct rtl_bb_info * GTY ((tag ("1"))) rtl;
|
237 |
|
|
} GTY ((desc ("((%1.flags & BB_RTL) != 0)"))) il;
|
238 |
|
|
|
239 |
|
|
/* Chain of PHI nodes for this block. */
|
240 |
|
|
tree phi_nodes;
|
241 |
|
|
|
242 |
|
|
/* A list of predictions. */
|
243 |
|
|
struct edge_prediction *predictions;
|
244 |
|
|
|
245 |
|
|
/* Expected number of executions: calculated in profile.c. */
|
246 |
|
|
gcov_type count;
|
247 |
|
|
|
248 |
|
|
/* The index of this block. */
|
249 |
|
|
int index;
|
250 |
|
|
|
251 |
|
|
/* The loop depth of this block. */
|
252 |
|
|
int loop_depth;
|
253 |
|
|
|
254 |
|
|
/* Expected frequency. Normalized to be in range 0 to BB_FREQ_MAX. */
|
255 |
|
|
int frequency;
|
256 |
|
|
|
257 |
|
|
/* Various flags. See BB_* below. */
|
258 |
|
|
int flags;
|
259 |
|
|
};
|
260 |
|
|
|
261 |
|
|
struct rtl_bb_info GTY(())
|
262 |
|
|
{
|
263 |
|
|
/* The first and last insns of the block. */
|
264 |
|
|
rtx head_;
|
265 |
|
|
rtx end_;
|
266 |
|
|
|
267 |
|
|
/* The registers that are live on entry to this block. */
|
268 |
|
|
bitmap GTY ((skip (""))) global_live_at_start;
|
269 |
|
|
|
270 |
|
|
/* The registers that are live on exit from this block. */
|
271 |
|
|
bitmap GTY ((skip (""))) global_live_at_end;
|
272 |
|
|
|
273 |
|
|
/* In CFGlayout mode points to insn notes/jumptables to be placed just before
|
274 |
|
|
and after the block. */
|
275 |
|
|
rtx header;
|
276 |
|
|
rtx footer;
|
277 |
|
|
|
278 |
|
|
/* This field is used by the bb-reorder and tracer passes. */
|
279 |
|
|
int visited;
|
280 |
|
|
};
|
281 |
|
|
|
282 |
|
|
typedef struct basic_block_def *basic_block;
|
283 |
|
|
|
284 |
|
|
DEF_VEC_P(basic_block);
|
285 |
|
|
DEF_VEC_ALLOC_P(basic_block,gc);
|
286 |
|
|
DEF_VEC_ALLOC_P(basic_block,heap);
|
287 |
|
|
|
288 |
|
|
#define BB_FREQ_MAX 10000
|
289 |
|
|
|
290 |
|
|
/* Masks for basic_block.flags.
|
291 |
|
|
|
292 |
|
|
BB_HOT_PARTITION and BB_COLD_PARTITION should be preserved throughout
|
293 |
|
|
the compilation, so they are never cleared.
|
294 |
|
|
|
295 |
|
|
All other flags may be cleared by clear_bb_flags(). It is generally
|
296 |
|
|
a bad idea to rely on any flags being up-to-date. */
|
297 |
|
|
|
298 |
|
|
enum bb_flags
|
299 |
|
|
{
|
300 |
|
|
|
301 |
|
|
/* Set if insns in BB have are modified. Used for updating liveness info. */
|
302 |
|
|
BB_DIRTY = 1,
|
303 |
|
|
|
304 |
|
|
/* Only set on blocks that have just been created by create_bb. */
|
305 |
|
|
BB_NEW = 2,
|
306 |
|
|
|
307 |
|
|
/* Set by find_unreachable_blocks. Do not rely on this being set in any
|
308 |
|
|
pass. */
|
309 |
|
|
BB_REACHABLE = 4,
|
310 |
|
|
|
311 |
|
|
/* Set for blocks in an irreducible loop by loop analysis. */
|
312 |
|
|
BB_IRREDUCIBLE_LOOP = 8,
|
313 |
|
|
|
314 |
|
|
/* Set on blocks that may actually not be single-entry single-exit block. */
|
315 |
|
|
BB_SUPERBLOCK = 16,
|
316 |
|
|
|
317 |
|
|
/* Set on basic blocks that the scheduler should not touch. This is used
|
318 |
|
|
by SMS to prevent other schedulers from messing with the loop schedule. */
|
319 |
|
|
BB_DISABLE_SCHEDULE = 32,
|
320 |
|
|
|
321 |
|
|
/* Set on blocks that should be put in a hot section. */
|
322 |
|
|
BB_HOT_PARTITION = 64,
|
323 |
|
|
|
324 |
|
|
/* Set on blocks that should be put in a cold section. */
|
325 |
|
|
BB_COLD_PARTITION = 128,
|
326 |
|
|
|
327 |
|
|
/* Set on block that was duplicated. */
|
328 |
|
|
BB_DUPLICATED = 256,
|
329 |
|
|
|
330 |
|
|
/* Set on blocks that are in RTL format. */
|
331 |
|
|
BB_RTL = 1024,
|
332 |
|
|
|
333 |
|
|
/* Set on blocks that are forwarder blocks.
|
334 |
|
|
Only used in cfgcleanup.c. */
|
335 |
|
|
BB_FORWARDER_BLOCK = 2048,
|
336 |
|
|
|
337 |
|
|
/* Set on blocks that cannot be threaded through.
|
338 |
|
|
Only used in cfgcleanup.c. */
|
339 |
|
|
BB_NONTHREADABLE_BLOCK = 4096
|
340 |
|
|
};
|
341 |
|
|
|
342 |
|
|
/* Dummy flag for convenience in the hot/cold partitioning code. */
|
343 |
|
|
#define BB_UNPARTITIONED 0
|
344 |
|
|
|
345 |
|
|
/* Partitions, to be used when partitioning hot and cold basic blocks into
|
346 |
|
|
separate sections. */
|
347 |
|
|
#define BB_PARTITION(bb) ((bb)->flags & (BB_HOT_PARTITION|BB_COLD_PARTITION))
|
348 |
|
|
#define BB_SET_PARTITION(bb, part) do { \
|
349 |
|
|
basic_block bb_ = (bb); \
|
350 |
|
|
bb_->flags = ((bb_->flags & ~(BB_HOT_PARTITION|BB_COLD_PARTITION)) \
|
351 |
|
|
| (part)); \
|
352 |
|
|
} while (0)
|
353 |
|
|
|
354 |
|
|
#define BB_COPY_PARTITION(dstbb, srcbb) \
|
355 |
|
|
BB_SET_PARTITION (dstbb, BB_PARTITION (srcbb))
|
356 |
|
|
|
357 |
|
|
/* A structure to group all the per-function control flow graph data.
|
358 |
|
|
The x_* prefixing is necessary because otherwise references to the
|
359 |
|
|
fields of this struct are interpreted as the defines for backward
|
360 |
|
|
source compatibility following the definition of this struct. */
|
361 |
|
|
struct control_flow_graph GTY(())
|
362 |
|
|
{
|
363 |
|
|
/* Block pointers for the exit and entry of a function.
|
364 |
|
|
These are always the head and tail of the basic block list. */
|
365 |
|
|
basic_block x_entry_block_ptr;
|
366 |
|
|
basic_block x_exit_block_ptr;
|
367 |
|
|
|
368 |
|
|
/* Index by basic block number, get basic block struct info. */
|
369 |
|
|
VEC(basic_block,gc) *x_basic_block_info;
|
370 |
|
|
|
371 |
|
|
/* Number of basic blocks in this flow graph. */
|
372 |
|
|
int x_n_basic_blocks;
|
373 |
|
|
|
374 |
|
|
/* Number of edges in this flow graph. */
|
375 |
|
|
int x_n_edges;
|
376 |
|
|
|
377 |
|
|
/* The first free basic block number. */
|
378 |
|
|
int x_last_basic_block;
|
379 |
|
|
|
380 |
|
|
/* Mapping of labels to their associated blocks. At present
|
381 |
|
|
only used for the tree CFG. */
|
382 |
|
|
VEC(basic_block,gc) *x_label_to_block_map;
|
383 |
|
|
|
384 |
|
|
enum profile_status {
|
385 |
|
|
PROFILE_ABSENT,
|
386 |
|
|
PROFILE_GUESSED,
|
387 |
|
|
PROFILE_READ
|
388 |
|
|
} x_profile_status;
|
389 |
|
|
};
|
390 |
|
|
|
391 |
|
|
/* Defines for accessing the fields of the CFG structure for function FN. */
|
392 |
|
|
#define ENTRY_BLOCK_PTR_FOR_FUNCTION(FN) ((FN)->cfg->x_entry_block_ptr)
|
393 |
|
|
#define EXIT_BLOCK_PTR_FOR_FUNCTION(FN) ((FN)->cfg->x_exit_block_ptr)
|
394 |
|
|
#define basic_block_info_for_function(FN) ((FN)->cfg->x_basic_block_info)
|
395 |
|
|
#define n_basic_blocks_for_function(FN) ((FN)->cfg->x_n_basic_blocks)
|
396 |
|
|
#define n_edges_for_function(FN) ((FN)->cfg->x_n_edges)
|
397 |
|
|
#define last_basic_block_for_function(FN) ((FN)->cfg->x_last_basic_block)
|
398 |
|
|
#define label_to_block_map_for_function(FN) ((FN)->cfg->x_label_to_block_map)
|
399 |
|
|
|
400 |
|
|
#define BASIC_BLOCK_FOR_FUNCTION(FN,N) \
|
401 |
|
|
(VEC_index (basic_block, basic_block_info_for_function(FN), (N)))
|
402 |
|
|
|
403 |
|
|
/* Defines for textual backward source compatibility. */
|
404 |
|
|
#define ENTRY_BLOCK_PTR (cfun->cfg->x_entry_block_ptr)
|
405 |
|
|
#define EXIT_BLOCK_PTR (cfun->cfg->x_exit_block_ptr)
|
406 |
|
|
#define basic_block_info (cfun->cfg->x_basic_block_info)
|
407 |
|
|
#define n_basic_blocks (cfun->cfg->x_n_basic_blocks)
|
408 |
|
|
#define n_edges (cfun->cfg->x_n_edges)
|
409 |
|
|
#define last_basic_block (cfun->cfg->x_last_basic_block)
|
410 |
|
|
#define label_to_block_map (cfun->cfg->x_label_to_block_map)
|
411 |
|
|
#define profile_status (cfun->cfg->x_profile_status)
|
412 |
|
|
|
413 |
|
|
#define BASIC_BLOCK(N) (VEC_index (basic_block, basic_block_info, (N)))
|
414 |
|
|
#define SET_BASIC_BLOCK(N,BB) (VEC_replace (basic_block, basic_block_info, (N), (BB)))
|
415 |
|
|
|
416 |
|
|
/* For iterating over basic blocks. */
|
417 |
|
|
#define FOR_BB_BETWEEN(BB, FROM, TO, DIR) \
|
418 |
|
|
for (BB = FROM; BB != TO; BB = BB->DIR)
|
419 |
|
|
|
420 |
|
|
#define FOR_EACH_BB_FN(BB, FN) \
|
421 |
|
|
FOR_BB_BETWEEN (BB, (FN)->cfg->x_entry_block_ptr->next_bb, (FN)->cfg->x_exit_block_ptr, next_bb)
|
422 |
|
|
|
423 |
|
|
#define FOR_EACH_BB(BB) FOR_EACH_BB_FN (BB, cfun)
|
424 |
|
|
|
425 |
|
|
#define FOR_EACH_BB_REVERSE_FN(BB, FN) \
|
426 |
|
|
FOR_BB_BETWEEN (BB, (FN)->cfg->x_exit_block_ptr->prev_bb, (FN)->cfg->x_entry_block_ptr, prev_bb)
|
427 |
|
|
|
428 |
|
|
#define FOR_EACH_BB_REVERSE(BB) FOR_EACH_BB_REVERSE_FN(BB, cfun)
|
429 |
|
|
|
430 |
|
|
/* For iterating over insns in basic block. */
|
431 |
|
|
#define FOR_BB_INSNS(BB, INSN) \
|
432 |
|
|
for ((INSN) = BB_HEAD (BB); \
|
433 |
|
|
(INSN) && (INSN) != NEXT_INSN (BB_END (BB)); \
|
434 |
|
|
(INSN) = NEXT_INSN (INSN))
|
435 |
|
|
|
436 |
|
|
#define FOR_BB_INSNS_REVERSE(BB, INSN) \
|
437 |
|
|
for ((INSN) = BB_END (BB); \
|
438 |
|
|
(INSN) && (INSN) != PREV_INSN (BB_HEAD (BB)); \
|
439 |
|
|
(INSN) = PREV_INSN (INSN))
|
440 |
|
|
|
441 |
|
|
/* Cycles through _all_ basic blocks, even the fake ones (entry and
|
442 |
|
|
exit block). */
|
443 |
|
|
|
444 |
|
|
#define FOR_ALL_BB(BB) \
|
445 |
|
|
for (BB = ENTRY_BLOCK_PTR; BB; BB = BB->next_bb)
|
446 |
|
|
|
447 |
|
|
#define FOR_ALL_BB_FN(BB, FN) \
|
448 |
|
|
for (BB = ENTRY_BLOCK_PTR_FOR_FUNCTION (FN); BB; BB = BB->next_bb)
|
449 |
|
|
|
450 |
|
|
extern bitmap_obstack reg_obstack;
|
451 |
|
|
|
452 |
|
|
/* Indexed by n, gives number of basic block that (REG n) is used in.
|
453 |
|
|
If the value is REG_BLOCK_GLOBAL (-2),
|
454 |
|
|
it means (REG n) is used in more than one basic block.
|
455 |
|
|
REG_BLOCK_UNKNOWN (-1) means it hasn't been seen yet so we don't know.
|
456 |
|
|
This information remains valid for the rest of the compilation
|
457 |
|
|
of the current function; it is used to control register allocation. */
|
458 |
|
|
|
459 |
|
|
#define REG_BLOCK_UNKNOWN -1
|
460 |
|
|
#define REG_BLOCK_GLOBAL -2
|
461 |
|
|
|
462 |
|
|
#define REG_BASIC_BLOCK(N) \
|
463 |
|
|
(VEC_index (reg_info_p, reg_n_info, N)->basic_block)
|
464 |
|
|
|
465 |
|
|
/* Stuff for recording basic block info. */
|
466 |
|
|
|
467 |
|
|
#define BB_HEAD(B) (B)->il.rtl->head_
|
468 |
|
|
#define BB_END(B) (B)->il.rtl->end_
|
469 |
|
|
|
470 |
|
|
/* Special block numbers [markers] for entry and exit. */
|
471 |
|
|
#define ENTRY_BLOCK (0)
|
472 |
|
|
#define EXIT_BLOCK (1)
|
473 |
|
|
|
474 |
|
|
/* The two blocks that are always in the cfg. */
|
475 |
|
|
#define NUM_FIXED_BLOCKS (2)
|
476 |
|
|
|
477 |
|
|
|
478 |
|
|
#define BLOCK_NUM(INSN) (BLOCK_FOR_INSN (INSN)->index + 0)
|
479 |
|
|
#define set_block_for_insn(INSN, BB) (BLOCK_FOR_INSN (INSN) = BB)
|
480 |
|
|
|
481 |
|
|
extern void compute_bb_for_insn (void);
|
482 |
|
|
extern unsigned int free_bb_for_insn (void);
|
483 |
|
|
extern void update_bb_for_insn (basic_block);
|
484 |
|
|
|
485 |
|
|
extern void free_basic_block_vars (void);
|
486 |
|
|
|
487 |
|
|
extern void insert_insn_on_edge (rtx, edge);
|
488 |
|
|
|
489 |
|
|
extern void commit_edge_insertions (void);
|
490 |
|
|
extern void commit_edge_insertions_watch_calls (void);
|
491 |
|
|
|
492 |
|
|
extern void remove_fake_edges (void);
|
493 |
|
|
extern void remove_fake_exit_edges (void);
|
494 |
|
|
extern void add_noreturn_fake_exit_edges (void);
|
495 |
|
|
extern void connect_infinite_loops_to_exit (void);
|
496 |
|
|
extern edge unchecked_make_edge (basic_block, basic_block, int);
|
497 |
|
|
extern edge cached_make_edge (sbitmap, basic_block, basic_block, int);
|
498 |
|
|
extern edge make_edge (basic_block, basic_block, int);
|
499 |
|
|
extern edge make_single_succ_edge (basic_block, basic_block, int);
|
500 |
|
|
extern void remove_edge (edge);
|
501 |
|
|
extern void redirect_edge_succ (edge, basic_block);
|
502 |
|
|
extern edge redirect_edge_succ_nodup (edge, basic_block);
|
503 |
|
|
extern void redirect_edge_pred (edge, basic_block);
|
504 |
|
|
extern basic_block create_basic_block_structure (rtx, rtx, rtx, basic_block);
|
505 |
|
|
extern void clear_bb_flags (void);
|
506 |
|
|
extern int post_order_compute (int *, bool);
|
507 |
|
|
extern int pre_and_rev_post_order_compute (int *, int *, bool);
|
508 |
|
|
extern int dfs_enumerate_from (basic_block, int,
|
509 |
|
|
bool (*)(basic_block, void *),
|
510 |
|
|
basic_block *, int, void *);
|
511 |
|
|
extern void compute_dominance_frontiers (bitmap *);
|
512 |
|
|
extern void dump_bb_info (basic_block, bool, bool, int, const char *, FILE *);
|
513 |
|
|
extern void dump_edge_info (FILE *, edge, int);
|
514 |
|
|
extern void brief_dump_cfg (FILE *);
|
515 |
|
|
extern void clear_edges (void);
|
516 |
|
|
extern rtx first_insn_after_basic_block_note (basic_block);
|
517 |
|
|
extern void scale_bbs_frequencies_int (basic_block *, int, int, int);
|
518 |
|
|
extern void scale_bbs_frequencies_gcov_type (basic_block *, int, gcov_type,
|
519 |
|
|
gcov_type);
|
520 |
|
|
|
521 |
|
|
/* Structure to group all of the information to process IF-THEN and
|
522 |
|
|
IF-THEN-ELSE blocks for the conditional execution support. This
|
523 |
|
|
needs to be in a public file in case the IFCVT macros call
|
524 |
|
|
functions passing the ce_if_block data structure. */
|
525 |
|
|
|
526 |
|
|
typedef struct ce_if_block
|
527 |
|
|
{
|
528 |
|
|
basic_block test_bb; /* First test block. */
|
529 |
|
|
basic_block then_bb; /* THEN block. */
|
530 |
|
|
basic_block else_bb; /* ELSE block or NULL. */
|
531 |
|
|
basic_block join_bb; /* Join THEN/ELSE blocks. */
|
532 |
|
|
basic_block last_test_bb; /* Last bb to hold && or || tests. */
|
533 |
|
|
int num_multiple_test_blocks; /* # of && and || basic blocks. */
|
534 |
|
|
int num_and_and_blocks; /* # of && blocks. */
|
535 |
|
|
int num_or_or_blocks; /* # of || blocks. */
|
536 |
|
|
int num_multiple_test_insns; /* # of insns in && and || blocks. */
|
537 |
|
|
int and_and_p; /* Complex test is &&. */
|
538 |
|
|
int num_then_insns; /* # of insns in THEN block. */
|
539 |
|
|
int num_else_insns; /* # of insns in ELSE block. */
|
540 |
|
|
int pass; /* Pass number. */
|
541 |
|
|
|
542 |
|
|
#ifdef IFCVT_EXTRA_FIELDS
|
543 |
|
|
IFCVT_EXTRA_FIELDS /* Any machine dependent fields. */
|
544 |
|
|
#endif
|
545 |
|
|
|
546 |
|
|
} ce_if_block_t;
|
547 |
|
|
|
548 |
|
|
/* This structure maintains an edge list vector. */
|
549 |
|
|
struct edge_list
|
550 |
|
|
{
|
551 |
|
|
int num_blocks;
|
552 |
|
|
int num_edges;
|
553 |
|
|
edge *index_to_edge;
|
554 |
|
|
};
|
555 |
|
|
|
556 |
|
|
/* The base value for branch probability notes and edge probabilities. */
|
557 |
|
|
#define REG_BR_PROB_BASE 10000
|
558 |
|
|
|
559 |
|
|
/* This is the value which indicates no edge is present. */
|
560 |
|
|
#define EDGE_INDEX_NO_EDGE -1
|
561 |
|
|
|
562 |
|
|
/* EDGE_INDEX returns an integer index for an edge, or EDGE_INDEX_NO_EDGE
|
563 |
|
|
if there is no edge between the 2 basic blocks. */
|
564 |
|
|
#define EDGE_INDEX(el, pred, succ) (find_edge_index ((el), (pred), (succ)))
|
565 |
|
|
|
566 |
|
|
/* INDEX_EDGE_PRED_BB and INDEX_EDGE_SUCC_BB return a pointer to the basic
|
567 |
|
|
block which is either the pred or succ end of the indexed edge. */
|
568 |
|
|
#define INDEX_EDGE_PRED_BB(el, index) ((el)->index_to_edge[(index)]->src)
|
569 |
|
|
#define INDEX_EDGE_SUCC_BB(el, index) ((el)->index_to_edge[(index)]->dest)
|
570 |
|
|
|
571 |
|
|
/* INDEX_EDGE returns a pointer to the edge. */
|
572 |
|
|
#define INDEX_EDGE(el, index) ((el)->index_to_edge[(index)])
|
573 |
|
|
|
574 |
|
|
/* Number of edges in the compressed edge list. */
|
575 |
|
|
#define NUM_EDGES(el) ((el)->num_edges)
|
576 |
|
|
|
577 |
|
|
/* BB is assumed to contain conditional jump. Return the fallthru edge. */
|
578 |
|
|
#define FALLTHRU_EDGE(bb) (EDGE_SUCC ((bb), 0)->flags & EDGE_FALLTHRU \
|
579 |
|
|
? EDGE_SUCC ((bb), 0) : EDGE_SUCC ((bb), 1))
|
580 |
|
|
|
581 |
|
|
/* BB is assumed to contain conditional jump. Return the branch edge. */
|
582 |
|
|
#define BRANCH_EDGE(bb) (EDGE_SUCC ((bb), 0)->flags & EDGE_FALLTHRU \
|
583 |
|
|
? EDGE_SUCC ((bb), 1) : EDGE_SUCC ((bb), 0))
|
584 |
|
|
|
585 |
|
|
/* Return expected execution frequency of the edge E. */
|
586 |
|
|
#define EDGE_FREQUENCY(e) (((e)->src->frequency \
|
587 |
|
|
* (e)->probability \
|
588 |
|
|
+ REG_BR_PROB_BASE / 2) \
|
589 |
|
|
/ REG_BR_PROB_BASE)
|
590 |
|
|
|
591 |
|
|
/* Return nonzero if edge is critical. */
|
592 |
|
|
#define EDGE_CRITICAL_P(e) (EDGE_COUNT ((e)->src->succs) >= 2 \
|
593 |
|
|
&& EDGE_COUNT ((e)->dest->preds) >= 2)
|
594 |
|
|
|
595 |
|
|
#define EDGE_COUNT(ev) VEC_length (edge, (ev))
|
596 |
|
|
#define EDGE_I(ev,i) VEC_index (edge, (ev), (i))
|
597 |
|
|
#define EDGE_PRED(bb,i) VEC_index (edge, (bb)->preds, (i))
|
598 |
|
|
#define EDGE_SUCC(bb,i) VEC_index (edge, (bb)->succs, (i))
|
599 |
|
|
|
600 |
|
|
/* Returns true if BB has precisely one successor. */
|
601 |
|
|
|
602 |
|
|
static inline bool
|
603 |
|
|
single_succ_p (basic_block bb)
|
604 |
|
|
{
|
605 |
|
|
return EDGE_COUNT (bb->succs) == 1;
|
606 |
|
|
}
|
607 |
|
|
|
608 |
|
|
/* Returns true if BB has precisely one predecessor. */
|
609 |
|
|
|
610 |
|
|
static inline bool
|
611 |
|
|
single_pred_p (basic_block bb)
|
612 |
|
|
{
|
613 |
|
|
return EDGE_COUNT (bb->preds) == 1;
|
614 |
|
|
}
|
615 |
|
|
|
616 |
|
|
/* Returns the single successor edge of basic block BB. Aborts if
|
617 |
|
|
BB does not have exactly one successor. */
|
618 |
|
|
|
619 |
|
|
static inline edge
|
620 |
|
|
single_succ_edge (basic_block bb)
|
621 |
|
|
{
|
622 |
|
|
gcc_assert (single_succ_p (bb));
|
623 |
|
|
return EDGE_SUCC (bb, 0);
|
624 |
|
|
}
|
625 |
|
|
|
626 |
|
|
/* Returns the single predecessor edge of basic block BB. Aborts
|
627 |
|
|
if BB does not have exactly one predecessor. */
|
628 |
|
|
|
629 |
|
|
static inline edge
|
630 |
|
|
single_pred_edge (basic_block bb)
|
631 |
|
|
{
|
632 |
|
|
gcc_assert (single_pred_p (bb));
|
633 |
|
|
return EDGE_PRED (bb, 0);
|
634 |
|
|
}
|
635 |
|
|
|
636 |
|
|
/* Returns the single successor block of basic block BB. Aborts
|
637 |
|
|
if BB does not have exactly one successor. */
|
638 |
|
|
|
639 |
|
|
static inline basic_block
|
640 |
|
|
single_succ (basic_block bb)
|
641 |
|
|
{
|
642 |
|
|
return single_succ_edge (bb)->dest;
|
643 |
|
|
}
|
644 |
|
|
|
645 |
|
|
/* Returns the single predecessor block of basic block BB. Aborts
|
646 |
|
|
if BB does not have exactly one predecessor.*/
|
647 |
|
|
|
648 |
|
|
static inline basic_block
|
649 |
|
|
single_pred (basic_block bb)
|
650 |
|
|
{
|
651 |
|
|
return single_pred_edge (bb)->src;
|
652 |
|
|
}
|
653 |
|
|
|
654 |
|
|
/* Iterator object for edges. */
|
655 |
|
|
|
656 |
|
|
typedef struct {
|
657 |
|
|
unsigned index;
|
658 |
|
|
VEC(edge,gc) **container;
|
659 |
|
|
} edge_iterator;
|
660 |
|
|
|
661 |
|
|
static inline VEC(edge,gc) *
|
662 |
|
|
ei_container (edge_iterator i)
|
663 |
|
|
{
|
664 |
|
|
gcc_assert (i.container);
|
665 |
|
|
return *i.container;
|
666 |
|
|
}
|
667 |
|
|
|
668 |
|
|
#define ei_start(iter) ei_start_1 (&(iter))
|
669 |
|
|
#define ei_last(iter) ei_last_1 (&(iter))
|
670 |
|
|
|
671 |
|
|
/* Return an iterator pointing to the start of an edge vector. */
|
672 |
|
|
static inline edge_iterator
|
673 |
|
|
ei_start_1 (VEC(edge,gc) **ev)
|
674 |
|
|
{
|
675 |
|
|
edge_iterator i;
|
676 |
|
|
|
677 |
|
|
i.index = 0;
|
678 |
|
|
i.container = ev;
|
679 |
|
|
|
680 |
|
|
return i;
|
681 |
|
|
}
|
682 |
|
|
|
683 |
|
|
/* Return an iterator pointing to the last element of an edge
|
684 |
|
|
vector. */
|
685 |
|
|
static inline edge_iterator
|
686 |
|
|
ei_last_1 (VEC(edge,gc) **ev)
|
687 |
|
|
{
|
688 |
|
|
edge_iterator i;
|
689 |
|
|
|
690 |
|
|
i.index = EDGE_COUNT (*ev) - 1;
|
691 |
|
|
i.container = ev;
|
692 |
|
|
|
693 |
|
|
return i;
|
694 |
|
|
}
|
695 |
|
|
|
696 |
|
|
/* Is the iterator `i' at the end of the sequence? */
|
697 |
|
|
static inline bool
|
698 |
|
|
ei_end_p (edge_iterator i)
|
699 |
|
|
{
|
700 |
|
|
return (i.index == EDGE_COUNT (ei_container (i)));
|
701 |
|
|
}
|
702 |
|
|
|
703 |
|
|
/* Is the iterator `i' at one position before the end of the
|
704 |
|
|
sequence? */
|
705 |
|
|
static inline bool
|
706 |
|
|
ei_one_before_end_p (edge_iterator i)
|
707 |
|
|
{
|
708 |
|
|
return (i.index + 1 == EDGE_COUNT (ei_container (i)));
|
709 |
|
|
}
|
710 |
|
|
|
711 |
|
|
/* Advance the iterator to the next element. */
|
712 |
|
|
static inline void
|
713 |
|
|
ei_next (edge_iterator *i)
|
714 |
|
|
{
|
715 |
|
|
gcc_assert (i->index < EDGE_COUNT (ei_container (*i)));
|
716 |
|
|
i->index++;
|
717 |
|
|
}
|
718 |
|
|
|
719 |
|
|
/* Move the iterator to the previous element. */
|
720 |
|
|
static inline void
|
721 |
|
|
ei_prev (edge_iterator *i)
|
722 |
|
|
{
|
723 |
|
|
gcc_assert (i->index > 0);
|
724 |
|
|
i->index--;
|
725 |
|
|
}
|
726 |
|
|
|
727 |
|
|
/* Return the edge pointed to by the iterator `i'. */
|
728 |
|
|
static inline edge
|
729 |
|
|
ei_edge (edge_iterator i)
|
730 |
|
|
{
|
731 |
|
|
return EDGE_I (ei_container (i), i.index);
|
732 |
|
|
}
|
733 |
|
|
|
734 |
|
|
/* Return an edge pointed to by the iterator. Do it safely so that
|
735 |
|
|
NULL is returned when the iterator is pointing at the end of the
|
736 |
|
|
sequence. */
|
737 |
|
|
static inline edge
|
738 |
|
|
ei_safe_edge (edge_iterator i)
|
739 |
|
|
{
|
740 |
|
|
return !ei_end_p (i) ? ei_edge (i) : NULL;
|
741 |
|
|
}
|
742 |
|
|
|
743 |
|
|
/* Return 1 if we should continue to iterate. Return 0 otherwise.
|
744 |
|
|
*Edge P is set to the next edge if we are to continue to iterate
|
745 |
|
|
and NULL otherwise. */
|
746 |
|
|
|
747 |
|
|
static inline bool
|
748 |
|
|
ei_cond (edge_iterator ei, edge *p)
|
749 |
|
|
{
|
750 |
|
|
if (!ei_end_p (ei))
|
751 |
|
|
{
|
752 |
|
|
*p = ei_edge (ei);
|
753 |
|
|
return 1;
|
754 |
|
|
}
|
755 |
|
|
else
|
756 |
|
|
{
|
757 |
|
|
*p = NULL;
|
758 |
|
|
return 0;
|
759 |
|
|
}
|
760 |
|
|
}
|
761 |
|
|
|
762 |
|
|
/* This macro serves as a convenient way to iterate each edge in a
|
763 |
|
|
vector of predecessor or successor edges. It must not be used when
|
764 |
|
|
an element might be removed during the traversal, otherwise
|
765 |
|
|
elements will be missed. Instead, use a for-loop like that shown
|
766 |
|
|
in the following pseudo-code:
|
767 |
|
|
|
768 |
|
|
FOR (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
|
769 |
|
|
{
|
770 |
|
|
IF (e != taken_edge)
|
771 |
|
|
remove_edge (e);
|
772 |
|
|
ELSE
|
773 |
|
|
ei_next (&ei);
|
774 |
|
|
}
|
775 |
|
|
*/
|
776 |
|
|
|
777 |
|
|
#define FOR_EACH_EDGE(EDGE,ITER,EDGE_VEC) \
|
778 |
|
|
for ((ITER) = ei_start ((EDGE_VEC)); \
|
779 |
|
|
ei_cond ((ITER), &(EDGE)); \
|
780 |
|
|
ei_next (&(ITER)))
|
781 |
|
|
|
782 |
|
|
struct edge_list * create_edge_list (void);
|
783 |
|
|
void free_edge_list (struct edge_list *);
|
784 |
|
|
void print_edge_list (FILE *, struct edge_list *);
|
785 |
|
|
void verify_edge_list (FILE *, struct edge_list *);
|
786 |
|
|
int find_edge_index (struct edge_list *, basic_block, basic_block);
|
787 |
|
|
edge find_edge (basic_block, basic_block);
|
788 |
|
|
|
789 |
|
|
|
790 |
|
|
enum update_life_extent
|
791 |
|
|
{
|
792 |
|
|
UPDATE_LIFE_LOCAL = 0,
|
793 |
|
|
UPDATE_LIFE_GLOBAL = 1,
|
794 |
|
|
UPDATE_LIFE_GLOBAL_RM_NOTES = 2
|
795 |
|
|
};
|
796 |
|
|
|
797 |
|
|
/* Flags for life_analysis and update_life_info. */
|
798 |
|
|
|
799 |
|
|
#define PROP_DEATH_NOTES 1 /* Create DEAD and UNUSED notes. */
|
800 |
|
|
#define PROP_LOG_LINKS 2 /* Create LOG_LINKS. */
|
801 |
|
|
#define PROP_REG_INFO 4 /* Update regs_ever_live et al. */
|
802 |
|
|
#define PROP_KILL_DEAD_CODE 8 /* Remove dead code. */
|
803 |
|
|
#define PROP_SCAN_DEAD_CODE 16 /* Scan for dead code. */
|
804 |
|
|
#define PROP_ALLOW_CFG_CHANGES 32 /* Allow the CFG to be changed
|
805 |
|
|
by dead code removal. */
|
806 |
|
|
#define PROP_AUTOINC 64 /* Create autoinc mem references. */
|
807 |
|
|
#define PROP_SCAN_DEAD_STORES 128 /* Scan for dead code. */
|
808 |
|
|
#define PROP_ASM_SCAN 256 /* Internal flag used within flow.c
|
809 |
|
|
to flag analysis of asms. */
|
810 |
|
|
#define PROP_DEAD_INSN 1024 /* Internal flag used within flow.c
|
811 |
|
|
to flag analysis of dead insn. */
|
812 |
|
|
#define PROP_POST_REGSTACK 2048 /* We run after reg-stack and need
|
813 |
|
|
to preserve REG_DEAD notes for
|
814 |
|
|
stack regs. */
|
815 |
|
|
#define PROP_FINAL (PROP_DEATH_NOTES | PROP_LOG_LINKS \
|
816 |
|
|
| PROP_REG_INFO | PROP_KILL_DEAD_CODE \
|
817 |
|
|
| PROP_SCAN_DEAD_CODE | PROP_AUTOINC \
|
818 |
|
|
| PROP_ALLOW_CFG_CHANGES \
|
819 |
|
|
| PROP_SCAN_DEAD_STORES)
|
820 |
|
|
#define PROP_POSTRELOAD (PROP_DEATH_NOTES \
|
821 |
|
|
| PROP_KILL_DEAD_CODE \
|
822 |
|
|
| PROP_SCAN_DEAD_CODE \
|
823 |
|
|
| PROP_SCAN_DEAD_STORES)
|
824 |
|
|
|
825 |
|
|
#define CLEANUP_EXPENSIVE 1 /* Do relatively expensive optimizations
|
826 |
|
|
except for edge forwarding */
|
827 |
|
|
#define CLEANUP_CROSSJUMP 2 /* Do crossjumping. */
|
828 |
|
|
#define CLEANUP_POST_REGSTACK 4 /* We run after reg-stack and need
|
829 |
|
|
to care REG_DEAD notes. */
|
830 |
|
|
#define CLEANUP_UPDATE_LIFE 8 /* Keep life information up to date. */
|
831 |
|
|
#define CLEANUP_THREADING 16 /* Do jump threading. */
|
832 |
|
|
#define CLEANUP_NO_INSN_DEL 32 /* Do not try to delete trivially dead
|
833 |
|
|
insns. */
|
834 |
|
|
#define CLEANUP_CFGLAYOUT 64 /* Do cleanup in cfglayout mode. */
|
835 |
|
|
#define CLEANUP_LOG_LINKS 128 /* Update log links. */
|
836 |
|
|
|
837 |
|
|
/* The following are ORed in on top of the CLEANUP* flags in calls to
|
838 |
|
|
struct_equiv_block_eq. */
|
839 |
|
|
#define STRUCT_EQUIV_START 256 /* Initializes the search range. */
|
840 |
|
|
#define STRUCT_EQUIV_RERUN 512 /* Rerun to find register use in
|
841 |
|
|
found equivalence. */
|
842 |
|
|
#define STRUCT_EQUIV_FINAL 1024 /* Make any changes necessary to get
|
843 |
|
|
actual equivalence. */
|
844 |
|
|
#define STRUCT_EQUIV_NEED_FULL_BLOCK 2048 /* struct_equiv_block_eq is required
|
845 |
|
|
to match only full blocks */
|
846 |
|
|
#define STRUCT_EQUIV_MATCH_JUMPS 4096 /* Also include the jumps at the end of the block in the comparison. */
|
847 |
|
|
|
848 |
|
|
extern void life_analysis (int);
|
849 |
|
|
extern int update_life_info (sbitmap, enum update_life_extent, int);
|
850 |
|
|
extern int update_life_info_in_dirty_blocks (enum update_life_extent, int);
|
851 |
|
|
extern int count_or_remove_death_notes (sbitmap, int);
|
852 |
|
|
extern int propagate_block (basic_block, regset, regset, regset, int);
|
853 |
|
|
|
854 |
|
|
struct propagate_block_info;
|
855 |
|
|
extern rtx propagate_one_insn (struct propagate_block_info *, rtx);
|
856 |
|
|
extern struct propagate_block_info *init_propagate_block_info
|
857 |
|
|
(basic_block, regset, regset, regset, int);
|
858 |
|
|
extern void free_propagate_block_info (struct propagate_block_info *);
|
859 |
|
|
|
860 |
|
|
/* In lcm.c */
|
861 |
|
|
extern struct edge_list *pre_edge_lcm (int, sbitmap *, sbitmap *,
|
862 |
|
|
sbitmap *, sbitmap *, sbitmap **,
|
863 |
|
|
sbitmap **);
|
864 |
|
|
extern struct edge_list *pre_edge_rev_lcm (int, sbitmap *,
|
865 |
|
|
sbitmap *, sbitmap *,
|
866 |
|
|
sbitmap *, sbitmap **,
|
867 |
|
|
sbitmap **);
|
868 |
|
|
extern void compute_available (sbitmap *, sbitmap *, sbitmap *, sbitmap *);
|
869 |
|
|
|
870 |
|
|
/* In predict.c */
|
871 |
|
|
extern void expected_value_to_br_prob (void);
|
872 |
|
|
extern bool maybe_hot_bb_p (basic_block);
|
873 |
|
|
extern bool probably_cold_bb_p (basic_block);
|
874 |
|
|
extern bool probably_never_executed_bb_p (basic_block);
|
875 |
|
|
extern bool tree_predicted_by_p (basic_block, enum br_predictor);
|
876 |
|
|
extern bool rtl_predicted_by_p (basic_block, enum br_predictor);
|
877 |
|
|
extern void tree_predict_edge (edge, enum br_predictor, int);
|
878 |
|
|
extern void rtl_predict_edge (edge, enum br_predictor, int);
|
879 |
|
|
extern void predict_edge_def (edge, enum br_predictor, enum prediction);
|
880 |
|
|
extern void guess_outgoing_edge_probabilities (basic_block);
|
881 |
|
|
extern void remove_predictions_associated_with_edge (edge);
|
882 |
|
|
extern bool edge_probability_reliable_p (edge);
|
883 |
|
|
extern bool br_prob_note_reliable_p (rtx);
|
884 |
|
|
|
885 |
|
|
/* In flow.c */
|
886 |
|
|
extern void init_flow (void);
|
887 |
|
|
extern void debug_bb (basic_block);
|
888 |
|
|
extern basic_block debug_bb_n (int);
|
889 |
|
|
extern void dump_regset (regset, FILE *);
|
890 |
|
|
extern void debug_regset (regset);
|
891 |
|
|
extern void allocate_reg_life_data (void);
|
892 |
|
|
extern void expunge_block (basic_block);
|
893 |
|
|
extern void link_block (basic_block, basic_block);
|
894 |
|
|
extern void unlink_block (basic_block);
|
895 |
|
|
extern void compact_blocks (void);
|
896 |
|
|
extern basic_block alloc_block (void);
|
897 |
|
|
extern void find_unreachable_blocks (void);
|
898 |
|
|
extern int delete_noop_moves (void);
|
899 |
|
|
extern basic_block force_nonfallthru (edge);
|
900 |
|
|
extern rtx block_label (basic_block);
|
901 |
|
|
extern bool forwarder_block_p (basic_block);
|
902 |
|
|
extern bool purge_all_dead_edges (void);
|
903 |
|
|
extern bool purge_dead_edges (basic_block);
|
904 |
|
|
extern void find_many_sub_basic_blocks (sbitmap);
|
905 |
|
|
extern void rtl_make_eh_edge (sbitmap, basic_block, rtx);
|
906 |
|
|
extern bool can_fallthru (basic_block, basic_block);
|
907 |
|
|
extern bool could_fall_through (basic_block, basic_block);
|
908 |
|
|
extern void flow_nodes_print (const char *, const sbitmap, FILE *);
|
909 |
|
|
extern void flow_edge_list_print (const char *, const edge *, int, FILE *);
|
910 |
|
|
extern void alloc_aux_for_block (basic_block, int);
|
911 |
|
|
extern void alloc_aux_for_blocks (int);
|
912 |
|
|
extern void clear_aux_for_blocks (void);
|
913 |
|
|
extern void free_aux_for_blocks (void);
|
914 |
|
|
extern void alloc_aux_for_edge (edge, int);
|
915 |
|
|
extern void alloc_aux_for_edges (int);
|
916 |
|
|
extern void clear_aux_for_edges (void);
|
917 |
|
|
extern void free_aux_for_edges (void);
|
918 |
|
|
extern void find_basic_blocks (rtx);
|
919 |
|
|
extern bool cleanup_cfg (int);
|
920 |
|
|
extern bool delete_unreachable_blocks (void);
|
921 |
|
|
extern bool merge_seq_blocks (void);
|
922 |
|
|
|
923 |
|
|
typedef struct conflict_graph_def *conflict_graph;
|
924 |
|
|
|
925 |
|
|
/* Callback function when enumerating conflicts. The arguments are
|
926 |
|
|
the smaller and larger regno in the conflict. Returns zero if
|
927 |
|
|
enumeration is to continue, nonzero to halt enumeration. */
|
928 |
|
|
typedef int (*conflict_graph_enum_fn) (int, int, void *);
|
929 |
|
|
|
930 |
|
|
|
931 |
|
|
/* Prototypes of operations on conflict graphs. */
|
932 |
|
|
|
933 |
|
|
extern conflict_graph conflict_graph_new
|
934 |
|
|
(int);
|
935 |
|
|
extern void conflict_graph_delete (conflict_graph);
|
936 |
|
|
extern int conflict_graph_add (conflict_graph, int, int);
|
937 |
|
|
extern int conflict_graph_conflict_p (conflict_graph, int, int);
|
938 |
|
|
extern void conflict_graph_enum (conflict_graph, int, conflict_graph_enum_fn,
|
939 |
|
|
void *);
|
940 |
|
|
extern void conflict_graph_merge_regs (conflict_graph, int, int);
|
941 |
|
|
extern void conflict_graph_print (conflict_graph, FILE*);
|
942 |
|
|
extern bool mark_dfs_back_edges (void);
|
943 |
|
|
extern void set_edge_can_fallthru_flag (void);
|
944 |
|
|
extern void update_br_prob_note (basic_block);
|
945 |
|
|
extern void fixup_abnormal_edges (void);
|
946 |
|
|
extern bool inside_basic_block_p (rtx);
|
947 |
|
|
extern bool control_flow_insn_p (rtx);
|
948 |
|
|
extern rtx get_last_bb_insn (basic_block);
|
949 |
|
|
|
950 |
|
|
/* In bb-reorder.c */
|
951 |
|
|
extern void reorder_basic_blocks (unsigned int);
|
952 |
|
|
|
953 |
|
|
/* In dominance.c */
|
954 |
|
|
|
955 |
|
|
enum cdi_direction
|
956 |
|
|
{
|
957 |
|
|
CDI_DOMINATORS,
|
958 |
|
|
CDI_POST_DOMINATORS
|
959 |
|
|
};
|
960 |
|
|
|
961 |
|
|
enum dom_state
|
962 |
|
|
{
|
963 |
|
|
DOM_NONE, /* Not computed at all. */
|
964 |
|
|
DOM_NO_FAST_QUERY, /* The data is OK, but the fast query data are not usable. */
|
965 |
|
|
DOM_OK /* Everything is ok. */
|
966 |
|
|
};
|
967 |
|
|
|
968 |
|
|
extern enum dom_state dom_computed[2];
|
969 |
|
|
|
970 |
|
|
extern bool dom_info_available_p (enum cdi_direction);
|
971 |
|
|
extern void calculate_dominance_info (enum cdi_direction);
|
972 |
|
|
extern void free_dominance_info (enum cdi_direction);
|
973 |
|
|
extern basic_block nearest_common_dominator (enum cdi_direction,
|
974 |
|
|
basic_block, basic_block);
|
975 |
|
|
extern basic_block nearest_common_dominator_for_set (enum cdi_direction,
|
976 |
|
|
bitmap);
|
977 |
|
|
extern void set_immediate_dominator (enum cdi_direction, basic_block,
|
978 |
|
|
basic_block);
|
979 |
|
|
extern basic_block get_immediate_dominator (enum cdi_direction, basic_block);
|
980 |
|
|
extern bool dominated_by_p (enum cdi_direction, basic_block, basic_block);
|
981 |
|
|
extern int get_dominated_by (enum cdi_direction, basic_block, basic_block **);
|
982 |
|
|
extern unsigned get_dominated_by_region (enum cdi_direction, basic_block *,
|
983 |
|
|
unsigned, basic_block *);
|
984 |
|
|
extern void add_to_dominance_info (enum cdi_direction, basic_block);
|
985 |
|
|
extern void delete_from_dominance_info (enum cdi_direction, basic_block);
|
986 |
|
|
basic_block recount_dominator (enum cdi_direction, basic_block);
|
987 |
|
|
extern void redirect_immediate_dominators (enum cdi_direction, basic_block,
|
988 |
|
|
basic_block);
|
989 |
|
|
extern void iterate_fix_dominators (enum cdi_direction, basic_block *, int);
|
990 |
|
|
extern void verify_dominators (enum cdi_direction);
|
991 |
|
|
extern basic_block first_dom_son (enum cdi_direction, basic_block);
|
992 |
|
|
extern basic_block next_dom_son (enum cdi_direction, basic_block);
|
993 |
|
|
unsigned bb_dom_dfs_in (enum cdi_direction, basic_block);
|
994 |
|
|
unsigned bb_dom_dfs_out (enum cdi_direction, basic_block);
|
995 |
|
|
|
996 |
|
|
extern edge try_redirect_by_replacing_jump (edge, basic_block, bool);
|
997 |
|
|
extern void break_superblocks (void);
|
998 |
|
|
extern void check_bb_profile (basic_block, FILE *);
|
999 |
|
|
extern void update_bb_profile_for_threading (basic_block, int, gcov_type, edge);
|
1000 |
|
|
extern void init_rtl_bb_info (basic_block);
|
1001 |
|
|
|
1002 |
|
|
extern void initialize_original_copy_tables (void);
|
1003 |
|
|
extern void free_original_copy_tables (void);
|
1004 |
|
|
extern void set_bb_original (basic_block, basic_block);
|
1005 |
|
|
extern basic_block get_bb_original (basic_block);
|
1006 |
|
|
extern void set_bb_copy (basic_block, basic_block);
|
1007 |
|
|
extern basic_block get_bb_copy (basic_block);
|
1008 |
|
|
|
1009 |
|
|
extern rtx insert_insn_end_bb_new (rtx, basic_block);
|
1010 |
|
|
|
1011 |
|
|
#include "cfghooks.h"
|
1012 |
|
|
|
1013 |
|
|
/* In struct-equiv.c */
|
1014 |
|
|
|
1015 |
|
|
/* Constants used to size arrays in struct equiv_info (currently only one).
|
1016 |
|
|
When these limits are exceeded, struct_equiv returns zero.
|
1017 |
|
|
The maximum number of pseudo registers that are different in the two blocks,
|
1018 |
|
|
but appear in equivalent places and are dead at the end (or where one of
|
1019 |
|
|
a pair is dead at the end). */
|
1020 |
|
|
#define STRUCT_EQUIV_MAX_LOCAL 16
|
1021 |
|
|
/* The maximum number of references to an input register that struct_equiv
|
1022 |
|
|
can handle. */
|
1023 |
|
|
|
1024 |
|
|
/* Structure used to track state during struct_equiv that can be rolled
|
1025 |
|
|
back when we find we can't match an insn, or if we want to match part
|
1026 |
|
|
of it in a different way.
|
1027 |
|
|
This information pertains to the pair of partial blocks that has been
|
1028 |
|
|
matched so far. Since this pair is structurally equivalent, this is
|
1029 |
|
|
conceptually just one partial block expressed in two potentially
|
1030 |
|
|
different ways. */
|
1031 |
|
|
struct struct_equiv_checkpoint
|
1032 |
|
|
{
|
1033 |
|
|
int ninsns; /* Insns are matched so far. */
|
1034 |
|
|
int local_count; /* Number of block-local registers. */
|
1035 |
|
|
int input_count; /* Number of inputs to the block. */
|
1036 |
|
|
|
1037 |
|
|
/* X_START and Y_START are the first insns (in insn stream order)
|
1038 |
|
|
of the partial blocks that have been considered for matching so far.
|
1039 |
|
|
Since we are scanning backwards, they are also the instructions that
|
1040 |
|
|
are currently considered - or the last ones that have been considered -
|
1041 |
|
|
for matching (Unless we tracked back to these because a preceding
|
1042 |
|
|
instruction failed to match). */
|
1043 |
|
|
rtx x_start, y_start;
|
1044 |
|
|
|
1045 |
|
|
/* INPUT_VALID indicates if we have actually set up X_INPUT / Y_INPUT
|
1046 |
|
|
during the current pass; we keep X_INPUT / Y_INPUT around between passes
|
1047 |
|
|
so that we can match REG_EQUAL / REG_EQUIV notes referring to these. */
|
1048 |
|
|
bool input_valid;
|
1049 |
|
|
|
1050 |
|
|
/* Some information would be expensive to exactly checkpoint, so we
|
1051 |
|
|
merely increment VERSION any time information about local
|
1052 |
|
|
registers, inputs and/or register liveness changes. When backtracking,
|
1053 |
|
|
it is decremented for changes that can be undone, and if a discrepancy
|
1054 |
|
|
remains, NEED_RERUN in the relevant struct equiv_info is set to indicate
|
1055 |
|
|
that a new pass should be made over the entire block match to get
|
1056 |
|
|
accurate register information. */
|
1057 |
|
|
int version;
|
1058 |
|
|
};
|
1059 |
|
|
|
1060 |
|
|
/* A struct equiv_info is used to pass information to struct_equiv and
|
1061 |
|
|
to gather state while two basic blocks are checked for structural
|
1062 |
|
|
equivalence. */
|
1063 |
|
|
|
1064 |
|
|
struct equiv_info
|
1065 |
|
|
{
|
1066 |
|
|
/* Fields set up by the caller to struct_equiv_block_eq */
|
1067 |
|
|
|
1068 |
|
|
basic_block x_block, y_block; /* The two blocks being matched. */
|
1069 |
|
|
|
1070 |
|
|
/* MODE carries the mode bits from cleanup_cfg if we are called from
|
1071 |
|
|
try_crossjump_to_edge, and additionally it carries the
|
1072 |
|
|
STRUCT_EQUIV_* bits described above. */
|
1073 |
|
|
int mode;
|
1074 |
|
|
|
1075 |
|
|
/* INPUT_COST is the cost that adding an extra input to the matched blocks
|
1076 |
|
|
is supposed to have, and is taken into account when considering if the
|
1077 |
|
|
matched sequence should be extended backwards. input_cost < 0 means
|
1078 |
|
|
don't accept any inputs at all. */
|
1079 |
|
|
int input_cost;
|
1080 |
|
|
|
1081 |
|
|
|
1082 |
|
|
/* Fields to track state inside of struct_equiv_block_eq. Some of these
|
1083 |
|
|
are also outputs. */
|
1084 |
|
|
|
1085 |
|
|
/* X_INPUT and Y_INPUT are used by struct_equiv to record a register that
|
1086 |
|
|
is used as an input parameter, i.e. where different registers are used
|
1087 |
|
|
as sources. This is only used for a register that is live at the end
|
1088 |
|
|
of the blocks, or in some identical code at the end of the blocks;
|
1089 |
|
|
Inputs that are dead at the end go into X_LOCAL / Y_LOCAL. */
|
1090 |
|
|
rtx x_input, y_input;
|
1091 |
|
|
/* When a previous pass has identified a valid input, INPUT_REG is set
|
1092 |
|
|
by struct_equiv_block_eq, and it is henceforth replaced in X_BLOCK
|
1093 |
|
|
for the input. */
|
1094 |
|
|
rtx input_reg;
|
1095 |
|
|
|
1096 |
|
|
/* COMMON_LIVE keeps track of the registers which are currently live
|
1097 |
|
|
(as we scan backwards from the end) and have the same numbers in both
|
1098 |
|
|
blocks. N.B. a register that is in common_live is unsuitable to become
|
1099 |
|
|
a local reg. */
|
1100 |
|
|
regset common_live;
|
1101 |
|
|
/* Likewise, X_LOCAL_LIVE / Y_LOCAL_LIVE keep track of registers that are
|
1102 |
|
|
local to one of the blocks; these registers must not be accepted as
|
1103 |
|
|
identical when encountered in both blocks. */
|
1104 |
|
|
regset x_local_live, y_local_live;
|
1105 |
|
|
|
1106 |
|
|
/* EQUIV_USED indicates for which insns a REG_EQUAL or REG_EQUIV note is
|
1107 |
|
|
being used, to avoid having to backtrack in the next pass, so that we
|
1108 |
|
|
get accurate life info for this insn then. For each such insn,
|
1109 |
|
|
the bit with the number corresponding to the CUR.NINSNS value at the
|
1110 |
|
|
time of scanning is set. */
|
1111 |
|
|
bitmap equiv_used;
|
1112 |
|
|
|
1113 |
|
|
/* Current state that can be saved & restored easily. */
|
1114 |
|
|
struct struct_equiv_checkpoint cur;
|
1115 |
|
|
/* BEST_MATCH is used to store the best match so far, weighing the
|
1116 |
|
|
cost of matched insns COSTS_N_INSNS (CUR.NINSNS) against the cost
|
1117 |
|
|
CUR.INPUT_COUNT * INPUT_COST of setting up the inputs. */
|
1118 |
|
|
struct struct_equiv_checkpoint best_match;
|
1119 |
|
|
/* If a checkpoint restore failed, or an input conflict newly arises,
|
1120 |
|
|
NEED_RERUN is set. This has to be tested by the caller to re-run
|
1121 |
|
|
the comparison if the match appears otherwise sound. The state kept in
|
1122 |
|
|
x_start, y_start, equiv_used and check_input_conflict ensures that
|
1123 |
|
|
we won't loop indefinitely. */
|
1124 |
|
|
bool need_rerun;
|
1125 |
|
|
/* If there is indication of an input conflict at the end,
|
1126 |
|
|
CHECK_INPUT_CONFLICT is set so that we'll check for input conflicts
|
1127 |
|
|
for each insn in the next pass. This is needed so that we won't discard
|
1128 |
|
|
a partial match if there is a longer match that has to be abandoned due
|
1129 |
|
|
to an input conflict. */
|
1130 |
|
|
bool check_input_conflict;
|
1131 |
|
|
/* HAD_INPUT_CONFLICT is set if CHECK_INPUT_CONFLICT was already set and we
|
1132 |
|
|
have passed a point where there were multiple dying inputs. This helps
|
1133 |
|
|
us decide if we should set check_input_conflict for the next pass. */
|
1134 |
|
|
bool had_input_conflict;
|
1135 |
|
|
|
1136 |
|
|
/* LIVE_UPDATE controls if we want to change any life info at all. We
|
1137 |
|
|
set it to false during REG_EQUAL / REG_EUQIV note comparison of the final
|
1138 |
|
|
pass so that we don't introduce new registers just for the note; if we
|
1139 |
|
|
can't match the notes without the current register information, we drop
|
1140 |
|
|
them. */
|
1141 |
|
|
bool live_update;
|
1142 |
|
|
|
1143 |
|
|
/* X_LOCAL and Y_LOCAL are used to gather register numbers of register pairs
|
1144 |
|
|
that are local to X_BLOCK and Y_BLOCK, with CUR.LOCAL_COUNT being the index
|
1145 |
|
|
to the next free entry. */
|
1146 |
|
|
rtx x_local[STRUCT_EQUIV_MAX_LOCAL], y_local[STRUCT_EQUIV_MAX_LOCAL];
|
1147 |
|
|
/* LOCAL_RVALUE is nonzero if the corresponding X_LOCAL / Y_LOCAL entry
|
1148 |
|
|
was a source operand (including STRICT_LOW_PART) for the last invocation
|
1149 |
|
|
of struct_equiv mentioning it, zero if it was a destination-only operand.
|
1150 |
|
|
Since we are scanning backwards, this means the register is input/local
|
1151 |
|
|
for the (partial) block scanned so far. */
|
1152 |
|
|
bool local_rvalue[STRUCT_EQUIV_MAX_LOCAL];
|
1153 |
|
|
|
1154 |
|
|
|
1155 |
|
|
/* Additional fields that are computed for the convenience of the caller. */
|
1156 |
|
|
|
1157 |
|
|
/* DYING_INPUTS is set to the number of local registers that turn out
|
1158 |
|
|
to be inputs to the (possibly partial) block. */
|
1159 |
|
|
int dying_inputs;
|
1160 |
|
|
/* X_END and Y_END are the last insns in X_BLOCK and Y_BLOCK, respectively,
|
1161 |
|
|
that are being compared. A final jump insn will not be included. */
|
1162 |
|
|
rtx x_end, y_end;
|
1163 |
|
|
|
1164 |
|
|
/* If we are matching tablejumps, X_LABEL in X_BLOCK corresponds to
|
1165 |
|
|
Y_LABEL in Y_BLOCK. */
|
1166 |
|
|
rtx x_label, y_label;
|
1167 |
|
|
|
1168 |
|
|
};
|
1169 |
|
|
|
1170 |
|
|
extern bool insns_match_p (rtx, rtx, struct equiv_info *);
|
1171 |
|
|
extern int struct_equiv_block_eq (int, struct equiv_info *);
|
1172 |
|
|
extern bool struct_equiv_init (int, struct equiv_info *);
|
1173 |
|
|
extern bool rtx_equiv_p (rtx *, rtx, int, struct equiv_info *);
|
1174 |
|
|
|
1175 |
|
|
/* In cfgrtl.c */
|
1176 |
|
|
extern bool condjump_equiv_p (struct equiv_info *, bool);
|
1177 |
|
|
|
1178 |
|
|
/* Return true when one of the predecessor edges of BB is marked with EDGE_EH. */
|
1179 |
|
|
static inline bool bb_has_eh_pred (basic_block bb)
|
1180 |
|
|
{
|
1181 |
|
|
edge e;
|
1182 |
|
|
edge_iterator ei;
|
1183 |
|
|
|
1184 |
|
|
FOR_EACH_EDGE (e, ei, bb->preds)
|
1185 |
|
|
{
|
1186 |
|
|
if (e->flags & EDGE_EH)
|
1187 |
|
|
return true;
|
1188 |
|
|
}
|
1189 |
|
|
return false;
|
1190 |
|
|
}
|
1191 |
|
|
|
1192 |
|
|
#endif /* GCC_BASIC_BLOCK_H */
|