1 |
38 |
julius |
/* Control flow graph manipulation code for GNU compiler.
|
2 |
|
|
Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
|
3 |
|
|
1999, 2000, 2001, 2002, 2003, 2004, 2005, 2007 Free Software Foundation, Inc.
|
4 |
|
|
|
5 |
|
|
This file is part of GCC.
|
6 |
|
|
|
7 |
|
|
GCC is free software; you can redistribute it and/or modify it under
|
8 |
|
|
the terms of the GNU General Public License as published by the Free
|
9 |
|
|
Software Foundation; either version 3, or (at your option) any later
|
10 |
|
|
version.
|
11 |
|
|
|
12 |
|
|
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
|
13 |
|
|
WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
14 |
|
|
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
15 |
|
|
for more details.
|
16 |
|
|
|
17 |
|
|
You should have received a copy of the GNU General Public License
|
18 |
|
|
along with GCC; see the file COPYING3. If not see
|
19 |
|
|
<http://www.gnu.org/licenses/>. */
|
20 |
|
|
|
21 |
|
|
/* This file contains low level functions to manipulate the CFG and analyze it
|
22 |
|
|
that are aware of the RTL intermediate language.
|
23 |
|
|
|
24 |
|
|
Available functionality:
|
25 |
|
|
- Basic CFG/RTL manipulation API documented in cfghooks.h
|
26 |
|
|
- CFG-aware instruction chain manipulation
|
27 |
|
|
delete_insn, delete_insn_chain
|
28 |
|
|
- Edge splitting and committing to edges
|
29 |
|
|
insert_insn_on_edge, commit_edge_insertions
|
30 |
|
|
- CFG updating after insn simplification
|
31 |
|
|
purge_dead_edges, purge_all_dead_edges
|
32 |
|
|
|
33 |
|
|
Functions not supposed for generic use:
|
34 |
|
|
- Infrastructure to determine quickly basic block for insn
|
35 |
|
|
compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
|
36 |
|
|
- Edge redirection with updating and optimizing of insn chain
|
37 |
|
|
block_label, tidy_fallthru_edge, force_nonfallthru */
|
38 |
|
|
|
39 |
|
|
#include "config.h"
|
40 |
|
|
#include "system.h"
|
41 |
|
|
#include "coretypes.h"
|
42 |
|
|
#include "tm.h"
|
43 |
|
|
#include "tree.h"
|
44 |
|
|
#include "rtl.h"
|
45 |
|
|
#include "hard-reg-set.h"
|
46 |
|
|
#include "basic-block.h"
|
47 |
|
|
#include "regs.h"
|
48 |
|
|
#include "flags.h"
|
49 |
|
|
#include "output.h"
|
50 |
|
|
#include "function.h"
|
51 |
|
|
#include "except.h"
|
52 |
|
|
#include "toplev.h"
|
53 |
|
|
#include "tm_p.h"
|
54 |
|
|
#include "obstack.h"
|
55 |
|
|
#include "insn-config.h"
|
56 |
|
|
#include "cfglayout.h"
|
57 |
|
|
#include "expr.h"
|
58 |
|
|
#include "target.h"
|
59 |
|
|
#include "cfgloop.h"
|
60 |
|
|
#include "ggc.h"
|
61 |
|
|
#include "tree-pass.h"
|
62 |
|
|
|
63 |
|
|
static int can_delete_note_p (rtx);
|
64 |
|
|
static int can_delete_label_p (rtx);
|
65 |
|
|
static void commit_one_edge_insertion (edge, int);
|
66 |
|
|
static basic_block rtl_split_edge (edge);
|
67 |
|
|
static bool rtl_move_block_after (basic_block, basic_block);
|
68 |
|
|
static int rtl_verify_flow_info (void);
|
69 |
|
|
static basic_block cfg_layout_split_block (basic_block, void *);
|
70 |
|
|
static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
|
71 |
|
|
static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
|
72 |
|
|
static void cfg_layout_delete_block (basic_block);
|
73 |
|
|
static void rtl_delete_block (basic_block);
|
74 |
|
|
static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
|
75 |
|
|
static edge rtl_redirect_edge_and_branch (edge, basic_block);
|
76 |
|
|
static basic_block rtl_split_block (basic_block, void *);
|
77 |
|
|
static void rtl_dump_bb (basic_block, FILE *, int);
|
78 |
|
|
static int rtl_verify_flow_info_1 (void);
|
79 |
|
|
static void rtl_make_forwarder_block (edge);
|
80 |
|
|
|
81 |
|
|
/* Return true if NOTE is not one of the ones that must be kept paired,
|
82 |
|
|
so that we may simply delete it. */
|
83 |
|
|
|
84 |
|
|
static int
|
85 |
|
|
can_delete_note_p (rtx note)
|
86 |
|
|
{
|
87 |
|
|
return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
|
88 |
|
|
|| NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
|
89 |
|
|
}
|
90 |
|
|
|
91 |
|
|
/* True if a given label can be deleted. */
|
92 |
|
|
|
93 |
|
|
static int
|
94 |
|
|
can_delete_label_p (rtx label)
|
95 |
|
|
{
|
96 |
|
|
return (!LABEL_PRESERVE_P (label)
|
97 |
|
|
/* User declared labels must be preserved. */
|
98 |
|
|
&& LABEL_NAME (label) == 0
|
99 |
|
|
&& !in_expr_list_p (forced_labels, label));
|
100 |
|
|
}
|
101 |
|
|
|
102 |
|
|
/* Delete INSN by patching it out. Return the next insn. */
|
103 |
|
|
|
104 |
|
|
rtx
|
105 |
|
|
delete_insn (rtx insn)
|
106 |
|
|
{
|
107 |
|
|
rtx next = NEXT_INSN (insn);
|
108 |
|
|
rtx note;
|
109 |
|
|
bool really_delete = true;
|
110 |
|
|
|
111 |
|
|
if (LABEL_P (insn))
|
112 |
|
|
{
|
113 |
|
|
/* Some labels can't be directly removed from the INSN chain, as they
|
114 |
|
|
might be references via variables, constant pool etc.
|
115 |
|
|
Convert them to the special NOTE_INSN_DELETED_LABEL note. */
|
116 |
|
|
if (! can_delete_label_p (insn))
|
117 |
|
|
{
|
118 |
|
|
const char *name = LABEL_NAME (insn);
|
119 |
|
|
|
120 |
|
|
really_delete = false;
|
121 |
|
|
PUT_CODE (insn, NOTE);
|
122 |
|
|
NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
|
123 |
|
|
NOTE_DELETED_LABEL_NAME (insn) = name;
|
124 |
|
|
}
|
125 |
|
|
|
126 |
|
|
remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
|
127 |
|
|
}
|
128 |
|
|
|
129 |
|
|
if (really_delete)
|
130 |
|
|
{
|
131 |
|
|
/* If this insn has already been deleted, something is very wrong. */
|
132 |
|
|
gcc_assert (!INSN_DELETED_P (insn));
|
133 |
|
|
remove_insn (insn);
|
134 |
|
|
INSN_DELETED_P (insn) = 1;
|
135 |
|
|
}
|
136 |
|
|
|
137 |
|
|
/* If deleting a jump, decrement the use count of the label. Deleting
|
138 |
|
|
the label itself should happen in the normal course of block merging. */
|
139 |
|
|
if (JUMP_P (insn)
|
140 |
|
|
&& JUMP_LABEL (insn)
|
141 |
|
|
&& LABEL_P (JUMP_LABEL (insn)))
|
142 |
|
|
LABEL_NUSES (JUMP_LABEL (insn))--;
|
143 |
|
|
|
144 |
|
|
/* Also if deleting an insn that references a label. */
|
145 |
|
|
else
|
146 |
|
|
{
|
147 |
|
|
while ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
|
148 |
|
|
&& LABEL_P (XEXP (note, 0)))
|
149 |
|
|
{
|
150 |
|
|
LABEL_NUSES (XEXP (note, 0))--;
|
151 |
|
|
remove_note (insn, note);
|
152 |
|
|
}
|
153 |
|
|
}
|
154 |
|
|
|
155 |
|
|
if (JUMP_P (insn)
|
156 |
|
|
&& (GET_CODE (PATTERN (insn)) == ADDR_VEC
|
157 |
|
|
|| GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
|
158 |
|
|
{
|
159 |
|
|
rtx pat = PATTERN (insn);
|
160 |
|
|
int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
|
161 |
|
|
int len = XVECLEN (pat, diff_vec_p);
|
162 |
|
|
int i;
|
163 |
|
|
|
164 |
|
|
for (i = 0; i < len; i++)
|
165 |
|
|
{
|
166 |
|
|
rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0);
|
167 |
|
|
|
168 |
|
|
/* When deleting code in bulk (e.g. removing many unreachable
|
169 |
|
|
blocks) we can delete a label that's a target of the vector
|
170 |
|
|
before deleting the vector itself. */
|
171 |
|
|
if (!NOTE_P (label))
|
172 |
|
|
LABEL_NUSES (label)--;
|
173 |
|
|
}
|
174 |
|
|
}
|
175 |
|
|
|
176 |
|
|
return next;
|
177 |
|
|
}
|
178 |
|
|
|
179 |
|
|
/* Like delete_insn but also purge dead edges from BB. */
|
180 |
|
|
rtx
|
181 |
|
|
delete_insn_and_edges (rtx insn)
|
182 |
|
|
{
|
183 |
|
|
rtx x;
|
184 |
|
|
bool purge = false;
|
185 |
|
|
|
186 |
|
|
if (INSN_P (insn)
|
187 |
|
|
&& BLOCK_FOR_INSN (insn)
|
188 |
|
|
&& BB_END (BLOCK_FOR_INSN (insn)) == insn)
|
189 |
|
|
purge = true;
|
190 |
|
|
x = delete_insn (insn);
|
191 |
|
|
if (purge)
|
192 |
|
|
purge_dead_edges (BLOCK_FOR_INSN (insn));
|
193 |
|
|
return x;
|
194 |
|
|
}
|
195 |
|
|
|
196 |
|
|
/* Unlink a chain of insns between START and FINISH, leaving notes
|
197 |
|
|
that must be paired. */
|
198 |
|
|
|
199 |
|
|
void
|
200 |
|
|
delete_insn_chain (rtx start, rtx finish)
|
201 |
|
|
{
|
202 |
|
|
rtx next;
|
203 |
|
|
|
204 |
|
|
/* Unchain the insns one by one. It would be quicker to delete all of these
|
205 |
|
|
with a single unchaining, rather than one at a time, but we need to keep
|
206 |
|
|
the NOTE's. */
|
207 |
|
|
while (1)
|
208 |
|
|
{
|
209 |
|
|
next = NEXT_INSN (start);
|
210 |
|
|
if (NOTE_P (start) && !can_delete_note_p (start))
|
211 |
|
|
;
|
212 |
|
|
else
|
213 |
|
|
next = delete_insn (start);
|
214 |
|
|
|
215 |
|
|
if (start == finish)
|
216 |
|
|
break;
|
217 |
|
|
start = next;
|
218 |
|
|
}
|
219 |
|
|
}
|
220 |
|
|
|
221 |
|
|
/* Like delete_insn but also purge dead edges from BB. */
|
222 |
|
|
void
|
223 |
|
|
delete_insn_chain_and_edges (rtx first, rtx last)
|
224 |
|
|
{
|
225 |
|
|
bool purge = false;
|
226 |
|
|
|
227 |
|
|
if (INSN_P (last)
|
228 |
|
|
&& BLOCK_FOR_INSN (last)
|
229 |
|
|
&& BB_END (BLOCK_FOR_INSN (last)) == last)
|
230 |
|
|
purge = true;
|
231 |
|
|
delete_insn_chain (first, last);
|
232 |
|
|
if (purge)
|
233 |
|
|
purge_dead_edges (BLOCK_FOR_INSN (last));
|
234 |
|
|
}
|
235 |
|
|
|
236 |
|
|
/* Create a new basic block consisting of the instructions between HEAD and END
|
237 |
|
|
inclusive. This function is designed to allow fast BB construction - reuses
|
238 |
|
|
the note and basic block struct in BB_NOTE, if any and do not grow
|
239 |
|
|
BASIC_BLOCK chain and should be used directly only by CFG construction code.
|
240 |
|
|
END can be NULL in to create new empty basic block before HEAD. Both END
|
241 |
|
|
and HEAD can be NULL to create basic block at the end of INSN chain.
|
242 |
|
|
AFTER is the basic block we should be put after. */
|
243 |
|
|
|
244 |
|
|
basic_block
|
245 |
|
|
create_basic_block_structure (rtx head, rtx end, rtx bb_note, basic_block after)
|
246 |
|
|
{
|
247 |
|
|
basic_block bb;
|
248 |
|
|
|
249 |
|
|
if (bb_note
|
250 |
|
|
&& (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
|
251 |
|
|
&& bb->aux == NULL)
|
252 |
|
|
{
|
253 |
|
|
/* If we found an existing note, thread it back onto the chain. */
|
254 |
|
|
|
255 |
|
|
rtx after;
|
256 |
|
|
|
257 |
|
|
if (LABEL_P (head))
|
258 |
|
|
after = head;
|
259 |
|
|
else
|
260 |
|
|
{
|
261 |
|
|
after = PREV_INSN (head);
|
262 |
|
|
head = bb_note;
|
263 |
|
|
}
|
264 |
|
|
|
265 |
|
|
if (after != bb_note && NEXT_INSN (after) != bb_note)
|
266 |
|
|
reorder_insns_nobb (bb_note, bb_note, after);
|
267 |
|
|
}
|
268 |
|
|
else
|
269 |
|
|
{
|
270 |
|
|
/* Otherwise we must create a note and a basic block structure. */
|
271 |
|
|
|
272 |
|
|
bb = alloc_block ();
|
273 |
|
|
|
274 |
|
|
init_rtl_bb_info (bb);
|
275 |
|
|
if (!head && !end)
|
276 |
|
|
head = end = bb_note
|
277 |
|
|
= emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
|
278 |
|
|
else if (LABEL_P (head) && end)
|
279 |
|
|
{
|
280 |
|
|
bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
|
281 |
|
|
if (head == end)
|
282 |
|
|
end = bb_note;
|
283 |
|
|
}
|
284 |
|
|
else
|
285 |
|
|
{
|
286 |
|
|
bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
|
287 |
|
|
head = bb_note;
|
288 |
|
|
if (!end)
|
289 |
|
|
end = head;
|
290 |
|
|
}
|
291 |
|
|
|
292 |
|
|
NOTE_BASIC_BLOCK (bb_note) = bb;
|
293 |
|
|
}
|
294 |
|
|
|
295 |
|
|
/* Always include the bb note in the block. */
|
296 |
|
|
if (NEXT_INSN (end) == bb_note)
|
297 |
|
|
end = bb_note;
|
298 |
|
|
|
299 |
|
|
BB_HEAD (bb) = head;
|
300 |
|
|
BB_END (bb) = end;
|
301 |
|
|
bb->index = last_basic_block++;
|
302 |
|
|
bb->flags = BB_NEW | BB_RTL;
|
303 |
|
|
link_block (bb, after);
|
304 |
|
|
SET_BASIC_BLOCK (bb->index, bb);
|
305 |
|
|
update_bb_for_insn (bb);
|
306 |
|
|
BB_SET_PARTITION (bb, BB_UNPARTITIONED);
|
307 |
|
|
|
308 |
|
|
/* Tag the block so that we know it has been used when considering
|
309 |
|
|
other basic block notes. */
|
310 |
|
|
bb->aux = bb;
|
311 |
|
|
|
312 |
|
|
return bb;
|
313 |
|
|
}
|
314 |
|
|
|
315 |
|
|
/* Create new basic block consisting of instructions in between HEAD and END
|
316 |
|
|
and place it to the BB chain after block AFTER. END can be NULL in to
|
317 |
|
|
create new empty basic block before HEAD. Both END and HEAD can be NULL to
|
318 |
|
|
create basic block at the end of INSN chain. */
|
319 |
|
|
|
320 |
|
|
static basic_block
|
321 |
|
|
rtl_create_basic_block (void *headp, void *endp, basic_block after)
|
322 |
|
|
{
|
323 |
|
|
rtx head = headp, end = endp;
|
324 |
|
|
basic_block bb;
|
325 |
|
|
|
326 |
|
|
/* Grow the basic block array if needed. */
|
327 |
|
|
if ((size_t) last_basic_block >= VEC_length (basic_block, basic_block_info))
|
328 |
|
|
{
|
329 |
|
|
size_t old_size = VEC_length (basic_block, basic_block_info);
|
330 |
|
|
size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
|
331 |
|
|
basic_block *p;
|
332 |
|
|
VEC_safe_grow (basic_block, gc, basic_block_info, new_size);
|
333 |
|
|
p = VEC_address (basic_block, basic_block_info);
|
334 |
|
|
memset (&p[old_size], 0, sizeof (basic_block) * (new_size - old_size));
|
335 |
|
|
}
|
336 |
|
|
|
337 |
|
|
n_basic_blocks++;
|
338 |
|
|
|
339 |
|
|
bb = create_basic_block_structure (head, end, NULL, after);
|
340 |
|
|
bb->aux = NULL;
|
341 |
|
|
return bb;
|
342 |
|
|
}
|
343 |
|
|
|
344 |
|
|
static basic_block
|
345 |
|
|
cfg_layout_create_basic_block (void *head, void *end, basic_block after)
|
346 |
|
|
{
|
347 |
|
|
basic_block newbb = rtl_create_basic_block (head, end, after);
|
348 |
|
|
|
349 |
|
|
return newbb;
|
350 |
|
|
}
|
351 |
|
|
|
352 |
|
|
/* Delete the insns in a (non-live) block. We physically delete every
|
353 |
|
|
non-deleted-note insn, and update the flow graph appropriately.
|
354 |
|
|
|
355 |
|
|
Return nonzero if we deleted an exception handler. */
|
356 |
|
|
|
357 |
|
|
/* ??? Preserving all such notes strikes me as wrong. It would be nice
|
358 |
|
|
to post-process the stream to remove empty blocks, loops, ranges, etc. */
|
359 |
|
|
|
360 |
|
|
static void
|
361 |
|
|
rtl_delete_block (basic_block b)
|
362 |
|
|
{
|
363 |
|
|
rtx insn, end;
|
364 |
|
|
|
365 |
|
|
/* If the head of this block is a CODE_LABEL, then it might be the
|
366 |
|
|
label for an exception handler which can't be reached. We need
|
367 |
|
|
to remove the label from the exception_handler_label list. */
|
368 |
|
|
insn = BB_HEAD (b);
|
369 |
|
|
if (LABEL_P (insn))
|
370 |
|
|
maybe_remove_eh_handler (insn);
|
371 |
|
|
|
372 |
|
|
end = get_last_bb_insn (b);
|
373 |
|
|
|
374 |
|
|
/* Selectively delete the entire chain. */
|
375 |
|
|
BB_HEAD (b) = NULL;
|
376 |
|
|
delete_insn_chain (insn, end);
|
377 |
|
|
if (b->il.rtl->global_live_at_start)
|
378 |
|
|
{
|
379 |
|
|
FREE_REG_SET (b->il.rtl->global_live_at_start);
|
380 |
|
|
FREE_REG_SET (b->il.rtl->global_live_at_end);
|
381 |
|
|
b->il.rtl->global_live_at_start = NULL;
|
382 |
|
|
b->il.rtl->global_live_at_end = NULL;
|
383 |
|
|
}
|
384 |
|
|
}
|
385 |
|
|
|
386 |
|
|
/* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
|
387 |
|
|
|
388 |
|
|
void
|
389 |
|
|
compute_bb_for_insn (void)
|
390 |
|
|
{
|
391 |
|
|
basic_block bb;
|
392 |
|
|
|
393 |
|
|
FOR_EACH_BB (bb)
|
394 |
|
|
{
|
395 |
|
|
rtx end = BB_END (bb);
|
396 |
|
|
rtx insn;
|
397 |
|
|
|
398 |
|
|
for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
|
399 |
|
|
{
|
400 |
|
|
BLOCK_FOR_INSN (insn) = bb;
|
401 |
|
|
if (insn == end)
|
402 |
|
|
break;
|
403 |
|
|
}
|
404 |
|
|
}
|
405 |
|
|
}
|
406 |
|
|
|
407 |
|
|
/* Release the basic_block_for_insn array. */
|
408 |
|
|
|
409 |
|
|
unsigned int
|
410 |
|
|
free_bb_for_insn (void)
|
411 |
|
|
{
|
412 |
|
|
rtx insn;
|
413 |
|
|
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
|
414 |
|
|
if (!BARRIER_P (insn))
|
415 |
|
|
BLOCK_FOR_INSN (insn) = NULL;
|
416 |
|
|
return 0;
|
417 |
|
|
}
|
418 |
|
|
|
419 |
|
|
struct tree_opt_pass pass_free_cfg =
|
420 |
|
|
{
|
421 |
|
|
NULL, /* name */
|
422 |
|
|
NULL, /* gate */
|
423 |
|
|
free_bb_for_insn, /* execute */
|
424 |
|
|
NULL, /* sub */
|
425 |
|
|
NULL, /* next */
|
426 |
|
|
0, /* static_pass_number */
|
427 |
|
|
0, /* tv_id */
|
428 |
|
|
0, /* properties_required */
|
429 |
|
|
0, /* properties_provided */
|
430 |
|
|
PROP_cfg, /* properties_destroyed */
|
431 |
|
|
0, /* todo_flags_start */
|
432 |
|
|
0, /* todo_flags_finish */
|
433 |
|
|
|
434 |
|
|
};
|
435 |
|
|
|
436 |
|
|
/* Return RTX to emit after when we want to emit code on the entry of function. */
|
437 |
|
|
rtx
|
438 |
|
|
entry_of_function (void)
|
439 |
|
|
{
|
440 |
|
|
return (n_basic_blocks > NUM_FIXED_BLOCKS ?
|
441 |
|
|
BB_HEAD (ENTRY_BLOCK_PTR->next_bb) : get_insns ());
|
442 |
|
|
}
|
443 |
|
|
|
444 |
|
|
/* Emit INSN at the entry point of the function, ensuring that it is only
|
445 |
|
|
executed once per function. */
|
446 |
|
|
void
|
447 |
|
|
emit_insn_at_entry (rtx insn)
|
448 |
|
|
{
|
449 |
|
|
edge_iterator ei = ei_start (ENTRY_BLOCK_PTR->succs);
|
450 |
|
|
edge e = ei_safe_edge (ei);
|
451 |
|
|
gcc_assert (e->flags & EDGE_FALLTHRU);
|
452 |
|
|
|
453 |
|
|
insert_insn_on_edge (insn, e);
|
454 |
|
|
commit_edge_insertions ();
|
455 |
|
|
}
|
456 |
|
|
|
457 |
|
|
/* Update insns block within BB. */
|
458 |
|
|
|
459 |
|
|
void
|
460 |
|
|
update_bb_for_insn (basic_block bb)
|
461 |
|
|
{
|
462 |
|
|
rtx insn;
|
463 |
|
|
|
464 |
|
|
for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
|
465 |
|
|
{
|
466 |
|
|
if (!BARRIER_P (insn))
|
467 |
|
|
set_block_for_insn (insn, bb);
|
468 |
|
|
if (insn == BB_END (bb))
|
469 |
|
|
break;
|
470 |
|
|
}
|
471 |
|
|
}
|
472 |
|
|
|
473 |
|
|
/* Creates a new basic block just after basic block B by splitting
|
474 |
|
|
everything after specified instruction I. */
|
475 |
|
|
|
476 |
|
|
static basic_block
|
477 |
|
|
rtl_split_block (basic_block bb, void *insnp)
|
478 |
|
|
{
|
479 |
|
|
basic_block new_bb;
|
480 |
|
|
rtx insn = insnp;
|
481 |
|
|
edge e;
|
482 |
|
|
edge_iterator ei;
|
483 |
|
|
|
484 |
|
|
if (!insn)
|
485 |
|
|
{
|
486 |
|
|
insn = first_insn_after_basic_block_note (bb);
|
487 |
|
|
|
488 |
|
|
if (insn)
|
489 |
|
|
insn = PREV_INSN (insn);
|
490 |
|
|
else
|
491 |
|
|
insn = get_last_insn ();
|
492 |
|
|
}
|
493 |
|
|
|
494 |
|
|
/* We probably should check type of the insn so that we do not create
|
495 |
|
|
inconsistent cfg. It is checked in verify_flow_info anyway, so do not
|
496 |
|
|
bother. */
|
497 |
|
|
if (insn == BB_END (bb))
|
498 |
|
|
emit_note_after (NOTE_INSN_DELETED, insn);
|
499 |
|
|
|
500 |
|
|
/* Create the new basic block. */
|
501 |
|
|
new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
|
502 |
|
|
BB_COPY_PARTITION (new_bb, bb);
|
503 |
|
|
BB_END (bb) = insn;
|
504 |
|
|
|
505 |
|
|
/* Redirect the outgoing edges. */
|
506 |
|
|
new_bb->succs = bb->succs;
|
507 |
|
|
bb->succs = NULL;
|
508 |
|
|
FOR_EACH_EDGE (e, ei, new_bb->succs)
|
509 |
|
|
e->src = new_bb;
|
510 |
|
|
|
511 |
|
|
if (bb->il.rtl->global_live_at_start)
|
512 |
|
|
{
|
513 |
|
|
new_bb->il.rtl->global_live_at_start = ALLOC_REG_SET (®_obstack);
|
514 |
|
|
new_bb->il.rtl->global_live_at_end = ALLOC_REG_SET (®_obstack);
|
515 |
|
|
COPY_REG_SET (new_bb->il.rtl->global_live_at_end, bb->il.rtl->global_live_at_end);
|
516 |
|
|
|
517 |
|
|
/* We now have to calculate which registers are live at the end
|
518 |
|
|
of the split basic block and at the start of the new basic
|
519 |
|
|
block. Start with those registers that are known to be live
|
520 |
|
|
at the end of the original basic block and get
|
521 |
|
|
propagate_block to determine which registers are live. */
|
522 |
|
|
COPY_REG_SET (new_bb->il.rtl->global_live_at_start, bb->il.rtl->global_live_at_end);
|
523 |
|
|
propagate_block (new_bb, new_bb->il.rtl->global_live_at_start, NULL, NULL, 0);
|
524 |
|
|
COPY_REG_SET (bb->il.rtl->global_live_at_end,
|
525 |
|
|
new_bb->il.rtl->global_live_at_start);
|
526 |
|
|
#ifdef HAVE_conditional_execution
|
527 |
|
|
/* In the presence of conditional execution we are not able to update
|
528 |
|
|
liveness precisely. */
|
529 |
|
|
if (reload_completed)
|
530 |
|
|
{
|
531 |
|
|
bb->flags |= BB_DIRTY;
|
532 |
|
|
new_bb->flags |= BB_DIRTY;
|
533 |
|
|
}
|
534 |
|
|
#endif
|
535 |
|
|
}
|
536 |
|
|
|
537 |
|
|
return new_bb;
|
538 |
|
|
}
|
539 |
|
|
|
540 |
|
|
/* Blocks A and B are to be merged into a single block A. The insns
|
541 |
|
|
are already contiguous. */
|
542 |
|
|
|
543 |
|
|
static void
|
544 |
|
|
rtl_merge_blocks (basic_block a, basic_block b)
|
545 |
|
|
{
|
546 |
|
|
rtx b_head = BB_HEAD (b), b_end = BB_END (b), a_end = BB_END (a);
|
547 |
|
|
rtx del_first = NULL_RTX, del_last = NULL_RTX;
|
548 |
|
|
int b_empty = 0;
|
549 |
|
|
|
550 |
|
|
/* If there was a CODE_LABEL beginning B, delete it. */
|
551 |
|
|
if (LABEL_P (b_head))
|
552 |
|
|
{
|
553 |
|
|
/* This might have been an EH label that no longer has incoming
|
554 |
|
|
EH edges. Update data structures to match. */
|
555 |
|
|
maybe_remove_eh_handler (b_head);
|
556 |
|
|
|
557 |
|
|
/* Detect basic blocks with nothing but a label. This can happen
|
558 |
|
|
in particular at the end of a function. */
|
559 |
|
|
if (b_head == b_end)
|
560 |
|
|
b_empty = 1;
|
561 |
|
|
|
562 |
|
|
del_first = del_last = b_head;
|
563 |
|
|
b_head = NEXT_INSN (b_head);
|
564 |
|
|
}
|
565 |
|
|
|
566 |
|
|
/* Delete the basic block note and handle blocks containing just that
|
567 |
|
|
note. */
|
568 |
|
|
if (NOTE_INSN_BASIC_BLOCK_P (b_head))
|
569 |
|
|
{
|
570 |
|
|
if (b_head == b_end)
|
571 |
|
|
b_empty = 1;
|
572 |
|
|
if (! del_last)
|
573 |
|
|
del_first = b_head;
|
574 |
|
|
|
575 |
|
|
del_last = b_head;
|
576 |
|
|
b_head = NEXT_INSN (b_head);
|
577 |
|
|
}
|
578 |
|
|
|
579 |
|
|
/* If there was a jump out of A, delete it. */
|
580 |
|
|
if (JUMP_P (a_end))
|
581 |
|
|
{
|
582 |
|
|
rtx prev;
|
583 |
|
|
|
584 |
|
|
for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
|
585 |
|
|
if (!NOTE_P (prev)
|
586 |
|
|
|| NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
|
587 |
|
|
|| prev == BB_HEAD (a))
|
588 |
|
|
break;
|
589 |
|
|
|
590 |
|
|
del_first = a_end;
|
591 |
|
|
|
592 |
|
|
#ifdef HAVE_cc0
|
593 |
|
|
/* If this was a conditional jump, we need to also delete
|
594 |
|
|
the insn that set cc0. */
|
595 |
|
|
if (only_sets_cc0_p (prev))
|
596 |
|
|
{
|
597 |
|
|
rtx tmp = prev;
|
598 |
|
|
|
599 |
|
|
prev = prev_nonnote_insn (prev);
|
600 |
|
|
if (!prev)
|
601 |
|
|
prev = BB_HEAD (a);
|
602 |
|
|
del_first = tmp;
|
603 |
|
|
}
|
604 |
|
|
#endif
|
605 |
|
|
|
606 |
|
|
a_end = PREV_INSN (del_first);
|
607 |
|
|
}
|
608 |
|
|
else if (BARRIER_P (NEXT_INSN (a_end)))
|
609 |
|
|
del_first = NEXT_INSN (a_end);
|
610 |
|
|
|
611 |
|
|
/* Delete everything marked above as well as crap that might be
|
612 |
|
|
hanging out between the two blocks. */
|
613 |
|
|
BB_HEAD (b) = NULL;
|
614 |
|
|
delete_insn_chain (del_first, del_last);
|
615 |
|
|
|
616 |
|
|
/* Reassociate the insns of B with A. */
|
617 |
|
|
if (!b_empty)
|
618 |
|
|
{
|
619 |
|
|
rtx x;
|
620 |
|
|
|
621 |
|
|
for (x = a_end; x != b_end; x = NEXT_INSN (x))
|
622 |
|
|
set_block_for_insn (x, a);
|
623 |
|
|
|
624 |
|
|
set_block_for_insn (b_end, a);
|
625 |
|
|
|
626 |
|
|
a_end = b_end;
|
627 |
|
|
}
|
628 |
|
|
|
629 |
|
|
BB_END (a) = a_end;
|
630 |
|
|
a->il.rtl->global_live_at_end = b->il.rtl->global_live_at_end;
|
631 |
|
|
}
|
632 |
|
|
|
633 |
|
|
/* Return true when block A and B can be merged. */
|
634 |
|
|
static bool
|
635 |
|
|
rtl_can_merge_blocks (basic_block a,basic_block b)
|
636 |
|
|
{
|
637 |
|
|
/* If we are partitioning hot/cold basic blocks, we don't want to
|
638 |
|
|
mess up unconditional or indirect jumps that cross between hot
|
639 |
|
|
and cold sections.
|
640 |
|
|
|
641 |
|
|
Basic block partitioning may result in some jumps that appear to
|
642 |
|
|
be optimizable (or blocks that appear to be mergeable), but which really
|
643 |
|
|
must be left untouched (they are required to make it safely across
|
644 |
|
|
partition boundaries). See the comments at the top of
|
645 |
|
|
bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
|
646 |
|
|
|
647 |
|
|
if (BB_PARTITION (a) != BB_PARTITION (b))
|
648 |
|
|
return false;
|
649 |
|
|
|
650 |
|
|
/* There must be exactly one edge in between the blocks. */
|
651 |
|
|
return (single_succ_p (a)
|
652 |
|
|
&& single_succ (a) == b
|
653 |
|
|
&& single_pred_p (b)
|
654 |
|
|
&& a != b
|
655 |
|
|
/* Must be simple edge. */
|
656 |
|
|
&& !(single_succ_edge (a)->flags & EDGE_COMPLEX)
|
657 |
|
|
&& a->next_bb == b
|
658 |
|
|
&& a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
|
659 |
|
|
/* If the jump insn has side effects,
|
660 |
|
|
we can't kill the edge. */
|
661 |
|
|
&& (!JUMP_P (BB_END (a))
|
662 |
|
|
|| (reload_completed
|
663 |
|
|
? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
|
664 |
|
|
}
|
665 |
|
|
|
666 |
|
|
/* Return the label in the head of basic block BLOCK. Create one if it doesn't
|
667 |
|
|
exist. */
|
668 |
|
|
|
669 |
|
|
rtx
|
670 |
|
|
block_label (basic_block block)
|
671 |
|
|
{
|
672 |
|
|
if (block == EXIT_BLOCK_PTR)
|
673 |
|
|
return NULL_RTX;
|
674 |
|
|
|
675 |
|
|
if (!LABEL_P (BB_HEAD (block)))
|
676 |
|
|
{
|
677 |
|
|
BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
|
678 |
|
|
}
|
679 |
|
|
|
680 |
|
|
return BB_HEAD (block);
|
681 |
|
|
}
|
682 |
|
|
|
683 |
|
|
/* Attempt to perform edge redirection by replacing possibly complex jump
|
684 |
|
|
instruction by unconditional jump or removing jump completely. This can
|
685 |
|
|
apply only if all edges now point to the same block. The parameters and
|
686 |
|
|
return values are equivalent to redirect_edge_and_branch. */
|
687 |
|
|
|
688 |
|
|
edge
|
689 |
|
|
try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
|
690 |
|
|
{
|
691 |
|
|
basic_block src = e->src;
|
692 |
|
|
rtx insn = BB_END (src), kill_from;
|
693 |
|
|
rtx set;
|
694 |
|
|
int fallthru = 0;
|
695 |
|
|
|
696 |
|
|
/* If we are partitioning hot/cold basic blocks, we don't want to
|
697 |
|
|
mess up unconditional or indirect jumps that cross between hot
|
698 |
|
|
and cold sections.
|
699 |
|
|
|
700 |
|
|
Basic block partitioning may result in some jumps that appear to
|
701 |
|
|
be optimizable (or blocks that appear to be mergeable), but which really
|
702 |
|
|
must be left untouched (they are required to make it safely across
|
703 |
|
|
partition boundaries). See the comments at the top of
|
704 |
|
|
bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
|
705 |
|
|
|
706 |
|
|
if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
|
707 |
|
|
|| BB_PARTITION (src) != BB_PARTITION (target))
|
708 |
|
|
return NULL;
|
709 |
|
|
|
710 |
|
|
/* We can replace or remove a complex jump only when we have exactly
|
711 |
|
|
two edges. Also, if we have exactly one outgoing edge, we can
|
712 |
|
|
redirect that. */
|
713 |
|
|
if (EDGE_COUNT (src->succs) >= 3
|
714 |
|
|
/* Verify that all targets will be TARGET. Specifically, the
|
715 |
|
|
edge that is not E must also go to TARGET. */
|
716 |
|
|
|| (EDGE_COUNT (src->succs) == 2
|
717 |
|
|
&& EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
|
718 |
|
|
return NULL;
|
719 |
|
|
|
720 |
|
|
if (!onlyjump_p (insn))
|
721 |
|
|
return NULL;
|
722 |
|
|
if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
|
723 |
|
|
return NULL;
|
724 |
|
|
|
725 |
|
|
/* Avoid removing branch with side effects. */
|
726 |
|
|
set = single_set (insn);
|
727 |
|
|
if (!set || side_effects_p (set))
|
728 |
|
|
return NULL;
|
729 |
|
|
|
730 |
|
|
/* In case we zap a conditional jump, we'll need to kill
|
731 |
|
|
the cc0 setter too. */
|
732 |
|
|
kill_from = insn;
|
733 |
|
|
#ifdef HAVE_cc0
|
734 |
|
|
if (reg_mentioned_p (cc0_rtx, PATTERN (insn)))
|
735 |
|
|
kill_from = PREV_INSN (insn);
|
736 |
|
|
#endif
|
737 |
|
|
|
738 |
|
|
/* See if we can create the fallthru edge. */
|
739 |
|
|
if (in_cfglayout || can_fallthru (src, target))
|
740 |
|
|
{
|
741 |
|
|
if (dump_file)
|
742 |
|
|
fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
|
743 |
|
|
fallthru = 1;
|
744 |
|
|
|
745 |
|
|
/* Selectively unlink whole insn chain. */
|
746 |
|
|
if (in_cfglayout)
|
747 |
|
|
{
|
748 |
|
|
rtx insn = src->il.rtl->footer;
|
749 |
|
|
|
750 |
|
|
delete_insn_chain (kill_from, BB_END (src));
|
751 |
|
|
|
752 |
|
|
/* Remove barriers but keep jumptables. */
|
753 |
|
|
while (insn)
|
754 |
|
|
{
|
755 |
|
|
if (BARRIER_P (insn))
|
756 |
|
|
{
|
757 |
|
|
if (PREV_INSN (insn))
|
758 |
|
|
NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
|
759 |
|
|
else
|
760 |
|
|
src->il.rtl->footer = NEXT_INSN (insn);
|
761 |
|
|
if (NEXT_INSN (insn))
|
762 |
|
|
PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
|
763 |
|
|
}
|
764 |
|
|
if (LABEL_P (insn))
|
765 |
|
|
break;
|
766 |
|
|
insn = NEXT_INSN (insn);
|
767 |
|
|
}
|
768 |
|
|
}
|
769 |
|
|
else
|
770 |
|
|
delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)));
|
771 |
|
|
}
|
772 |
|
|
|
773 |
|
|
/* If this already is simplejump, redirect it. */
|
774 |
|
|
else if (simplejump_p (insn))
|
775 |
|
|
{
|
776 |
|
|
if (e->dest == target)
|
777 |
|
|
return NULL;
|
778 |
|
|
if (dump_file)
|
779 |
|
|
fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
|
780 |
|
|
INSN_UID (insn), e->dest->index, target->index);
|
781 |
|
|
if (!redirect_jump (insn, block_label (target), 0))
|
782 |
|
|
{
|
783 |
|
|
gcc_assert (target == EXIT_BLOCK_PTR);
|
784 |
|
|
return NULL;
|
785 |
|
|
}
|
786 |
|
|
}
|
787 |
|
|
|
788 |
|
|
/* Cannot do anything for target exit block. */
|
789 |
|
|
else if (target == EXIT_BLOCK_PTR)
|
790 |
|
|
return NULL;
|
791 |
|
|
|
792 |
|
|
/* Or replace possibly complicated jump insn by simple jump insn. */
|
793 |
|
|
else
|
794 |
|
|
{
|
795 |
|
|
rtx target_label = block_label (target);
|
796 |
|
|
rtx barrier, label, table;
|
797 |
|
|
|
798 |
|
|
emit_jump_insn_after_noloc (gen_jump (target_label), insn);
|
799 |
|
|
JUMP_LABEL (BB_END (src)) = target_label;
|
800 |
|
|
LABEL_NUSES (target_label)++;
|
801 |
|
|
if (dump_file)
|
802 |
|
|
fprintf (dump_file, "Replacing insn %i by jump %i\n",
|
803 |
|
|
INSN_UID (insn), INSN_UID (BB_END (src)));
|
804 |
|
|
|
805 |
|
|
|
806 |
|
|
delete_insn_chain (kill_from, insn);
|
807 |
|
|
|
808 |
|
|
/* Recognize a tablejump that we are converting to a
|
809 |
|
|
simple jump and remove its associated CODE_LABEL
|
810 |
|
|
and ADDR_VEC or ADDR_DIFF_VEC. */
|
811 |
|
|
if (tablejump_p (insn, &label, &table))
|
812 |
|
|
delete_insn_chain (label, table);
|
813 |
|
|
|
814 |
|
|
barrier = next_nonnote_insn (BB_END (src));
|
815 |
|
|
if (!barrier || !BARRIER_P (barrier))
|
816 |
|
|
emit_barrier_after (BB_END (src));
|
817 |
|
|
else
|
818 |
|
|
{
|
819 |
|
|
if (barrier != NEXT_INSN (BB_END (src)))
|
820 |
|
|
{
|
821 |
|
|
/* Move the jump before barrier so that the notes
|
822 |
|
|
which originally were or were created before jump table are
|
823 |
|
|
inside the basic block. */
|
824 |
|
|
rtx new_insn = BB_END (src);
|
825 |
|
|
rtx tmp;
|
826 |
|
|
|
827 |
|
|
for (tmp = NEXT_INSN (BB_END (src)); tmp != barrier;
|
828 |
|
|
tmp = NEXT_INSN (tmp))
|
829 |
|
|
set_block_for_insn (tmp, src);
|
830 |
|
|
|
831 |
|
|
NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
|
832 |
|
|
PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
|
833 |
|
|
|
834 |
|
|
NEXT_INSN (new_insn) = barrier;
|
835 |
|
|
NEXT_INSN (PREV_INSN (barrier)) = new_insn;
|
836 |
|
|
|
837 |
|
|
PREV_INSN (new_insn) = PREV_INSN (barrier);
|
838 |
|
|
PREV_INSN (barrier) = new_insn;
|
839 |
|
|
}
|
840 |
|
|
}
|
841 |
|
|
}
|
842 |
|
|
|
843 |
|
|
/* Keep only one edge out and set proper flags. */
|
844 |
|
|
if (!single_succ_p (src))
|
845 |
|
|
remove_edge (e);
|
846 |
|
|
gcc_assert (single_succ_p (src));
|
847 |
|
|
|
848 |
|
|
e = single_succ_edge (src);
|
849 |
|
|
if (fallthru)
|
850 |
|
|
e->flags = EDGE_FALLTHRU;
|
851 |
|
|
else
|
852 |
|
|
e->flags = 0;
|
853 |
|
|
|
854 |
|
|
e->probability = REG_BR_PROB_BASE;
|
855 |
|
|
e->count = src->count;
|
856 |
|
|
|
857 |
|
|
/* We don't want a block to end on a line-number note since that has
|
858 |
|
|
the potential of changing the code between -g and not -g. */
|
859 |
|
|
while (NOTE_P (BB_END (e->src))
|
860 |
|
|
&& NOTE_LINE_NUMBER (BB_END (e->src)) >= 0)
|
861 |
|
|
delete_insn (BB_END (e->src));
|
862 |
|
|
|
863 |
|
|
if (e->dest != target)
|
864 |
|
|
redirect_edge_succ (e, target);
|
865 |
|
|
|
866 |
|
|
return e;
|
867 |
|
|
}
|
868 |
|
|
|
869 |
|
|
/* Redirect edge representing branch of (un)conditional jump or tablejump,
|
870 |
|
|
NULL on failure */
|
871 |
|
|
static edge
|
872 |
|
|
redirect_branch_edge (edge e, basic_block target)
|
873 |
|
|
{
|
874 |
|
|
rtx tmp;
|
875 |
|
|
rtx old_label = BB_HEAD (e->dest);
|
876 |
|
|
basic_block src = e->src;
|
877 |
|
|
rtx insn = BB_END (src);
|
878 |
|
|
|
879 |
|
|
/* We can only redirect non-fallthru edges of jump insn. */
|
880 |
|
|
if (e->flags & EDGE_FALLTHRU)
|
881 |
|
|
return NULL;
|
882 |
|
|
else if (!JUMP_P (insn))
|
883 |
|
|
return NULL;
|
884 |
|
|
|
885 |
|
|
/* Recognize a tablejump and adjust all matching cases. */
|
886 |
|
|
if (tablejump_p (insn, NULL, &tmp))
|
887 |
|
|
{
|
888 |
|
|
rtvec vec;
|
889 |
|
|
int j;
|
890 |
|
|
rtx new_label = block_label (target);
|
891 |
|
|
|
892 |
|
|
if (target == EXIT_BLOCK_PTR)
|
893 |
|
|
return NULL;
|
894 |
|
|
if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
|
895 |
|
|
vec = XVEC (PATTERN (tmp), 0);
|
896 |
|
|
else
|
897 |
|
|
vec = XVEC (PATTERN (tmp), 1);
|
898 |
|
|
|
899 |
|
|
for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
|
900 |
|
|
if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
|
901 |
|
|
{
|
902 |
|
|
RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
|
903 |
|
|
--LABEL_NUSES (old_label);
|
904 |
|
|
++LABEL_NUSES (new_label);
|
905 |
|
|
}
|
906 |
|
|
|
907 |
|
|
/* Handle casesi dispatch insns. */
|
908 |
|
|
if ((tmp = single_set (insn)) != NULL
|
909 |
|
|
&& SET_DEST (tmp) == pc_rtx
|
910 |
|
|
&& GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
|
911 |
|
|
&& GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
|
912 |
|
|
&& XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
|
913 |
|
|
{
|
914 |
|
|
XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
|
915 |
|
|
new_label);
|
916 |
|
|
--LABEL_NUSES (old_label);
|
917 |
|
|
++LABEL_NUSES (new_label);
|
918 |
|
|
}
|
919 |
|
|
}
|
920 |
|
|
else
|
921 |
|
|
{
|
922 |
|
|
/* ?? We may play the games with moving the named labels from
|
923 |
|
|
one basic block to the other in case only one computed_jump is
|
924 |
|
|
available. */
|
925 |
|
|
if (computed_jump_p (insn)
|
926 |
|
|
/* A return instruction can't be redirected. */
|
927 |
|
|
|| returnjump_p (insn))
|
928 |
|
|
return NULL;
|
929 |
|
|
|
930 |
|
|
/* If the insn doesn't go where we think, we're confused. */
|
931 |
|
|
gcc_assert (JUMP_LABEL (insn) == old_label);
|
932 |
|
|
|
933 |
|
|
/* If the substitution doesn't succeed, die. This can happen
|
934 |
|
|
if the back end emitted unrecognizable instructions or if
|
935 |
|
|
target is exit block on some arches. */
|
936 |
|
|
if (!redirect_jump (insn, block_label (target), 0))
|
937 |
|
|
{
|
938 |
|
|
gcc_assert (target == EXIT_BLOCK_PTR);
|
939 |
|
|
return NULL;
|
940 |
|
|
}
|
941 |
|
|
}
|
942 |
|
|
|
943 |
|
|
if (dump_file)
|
944 |
|
|
fprintf (dump_file, "Edge %i->%i redirected to %i\n",
|
945 |
|
|
e->src->index, e->dest->index, target->index);
|
946 |
|
|
|
947 |
|
|
if (e->dest != target)
|
948 |
|
|
e = redirect_edge_succ_nodup (e, target);
|
949 |
|
|
return e;
|
950 |
|
|
}
|
951 |
|
|
|
952 |
|
|
/* Attempt to change code to redirect edge E to TARGET. Don't do that on
|
953 |
|
|
expense of adding new instructions or reordering basic blocks.
|
954 |
|
|
|
955 |
|
|
Function can be also called with edge destination equivalent to the TARGET.
|
956 |
|
|
Then it should try the simplifications and do nothing if none is possible.
|
957 |
|
|
|
958 |
|
|
Return edge representing the branch if transformation succeeded. Return NULL
|
959 |
|
|
on failure.
|
960 |
|
|
We still return NULL in case E already destinated TARGET and we didn't
|
961 |
|
|
managed to simplify instruction stream. */
|
962 |
|
|
|
963 |
|
|
static edge
|
964 |
|
|
rtl_redirect_edge_and_branch (edge e, basic_block target)
|
965 |
|
|
{
|
966 |
|
|
edge ret;
|
967 |
|
|
basic_block src = e->src;
|
968 |
|
|
|
969 |
|
|
if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
|
970 |
|
|
return NULL;
|
971 |
|
|
|
972 |
|
|
if (e->dest == target)
|
973 |
|
|
return e;
|
974 |
|
|
|
975 |
|
|
if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
|
976 |
|
|
{
|
977 |
|
|
src->flags |= BB_DIRTY;
|
978 |
|
|
return ret;
|
979 |
|
|
}
|
980 |
|
|
|
981 |
|
|
ret = redirect_branch_edge (e, target);
|
982 |
|
|
if (!ret)
|
983 |
|
|
return NULL;
|
984 |
|
|
|
985 |
|
|
src->flags |= BB_DIRTY;
|
986 |
|
|
return ret;
|
987 |
|
|
}
|
988 |
|
|
|
989 |
|
|
/* Like force_nonfallthru below, but additionally performs redirection
|
990 |
|
|
Used by redirect_edge_and_branch_force. */
|
991 |
|
|
|
992 |
|
|
static basic_block
|
993 |
|
|
force_nonfallthru_and_redirect (edge e, basic_block target)
|
994 |
|
|
{
|
995 |
|
|
basic_block jump_block, new_bb = NULL, src = e->src;
|
996 |
|
|
rtx note;
|
997 |
|
|
edge new_edge;
|
998 |
|
|
int abnormal_edge_flags = 0;
|
999 |
|
|
|
1000 |
|
|
/* In the case the last instruction is conditional jump to the next
|
1001 |
|
|
instruction, first redirect the jump itself and then continue
|
1002 |
|
|
by creating a basic block afterwards to redirect fallthru edge. */
|
1003 |
|
|
if (e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR
|
1004 |
|
|
&& any_condjump_p (BB_END (e->src))
|
1005 |
|
|
&& JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
|
1006 |
|
|
{
|
1007 |
|
|
rtx note;
|
1008 |
|
|
edge b = unchecked_make_edge (e->src, target, 0);
|
1009 |
|
|
bool redirected;
|
1010 |
|
|
|
1011 |
|
|
redirected = redirect_jump (BB_END (e->src), block_label (target), 0);
|
1012 |
|
|
gcc_assert (redirected);
|
1013 |
|
|
|
1014 |
|
|
note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
|
1015 |
|
|
if (note)
|
1016 |
|
|
{
|
1017 |
|
|
int prob = INTVAL (XEXP (note, 0));
|
1018 |
|
|
|
1019 |
|
|
b->probability = prob;
|
1020 |
|
|
b->count = e->count * prob / REG_BR_PROB_BASE;
|
1021 |
|
|
e->probability -= e->probability;
|
1022 |
|
|
e->count -= b->count;
|
1023 |
|
|
if (e->probability < 0)
|
1024 |
|
|
e->probability = 0;
|
1025 |
|
|
if (e->count < 0)
|
1026 |
|
|
e->count = 0;
|
1027 |
|
|
}
|
1028 |
|
|
}
|
1029 |
|
|
|
1030 |
|
|
if (e->flags & EDGE_ABNORMAL)
|
1031 |
|
|
{
|
1032 |
|
|
/* Irritating special case - fallthru edge to the same block as abnormal
|
1033 |
|
|
edge.
|
1034 |
|
|
We can't redirect abnormal edge, but we still can split the fallthru
|
1035 |
|
|
one and create separate abnormal edge to original destination.
|
1036 |
|
|
This allows bb-reorder to make such edge non-fallthru. */
|
1037 |
|
|
gcc_assert (e->dest == target);
|
1038 |
|
|
abnormal_edge_flags = e->flags & ~(EDGE_FALLTHRU | EDGE_CAN_FALLTHRU);
|
1039 |
|
|
e->flags &= EDGE_FALLTHRU | EDGE_CAN_FALLTHRU;
|
1040 |
|
|
}
|
1041 |
|
|
else
|
1042 |
|
|
{
|
1043 |
|
|
gcc_assert (e->flags & EDGE_FALLTHRU);
|
1044 |
|
|
if (e->src == ENTRY_BLOCK_PTR)
|
1045 |
|
|
{
|
1046 |
|
|
/* We can't redirect the entry block. Create an empty block
|
1047 |
|
|
at the start of the function which we use to add the new
|
1048 |
|
|
jump. */
|
1049 |
|
|
edge tmp;
|
1050 |
|
|
edge_iterator ei;
|
1051 |
|
|
bool found = false;
|
1052 |
|
|
|
1053 |
|
|
basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL, ENTRY_BLOCK_PTR);
|
1054 |
|
|
|
1055 |
|
|
/* Change the existing edge's source to be the new block, and add
|
1056 |
|
|
a new edge from the entry block to the new block. */
|
1057 |
|
|
e->src = bb;
|
1058 |
|
|
for (ei = ei_start (ENTRY_BLOCK_PTR->succs); (tmp = ei_safe_edge (ei)); )
|
1059 |
|
|
{
|
1060 |
|
|
if (tmp == e)
|
1061 |
|
|
{
|
1062 |
|
|
VEC_unordered_remove (edge, ENTRY_BLOCK_PTR->succs, ei.index);
|
1063 |
|
|
found = true;
|
1064 |
|
|
break;
|
1065 |
|
|
}
|
1066 |
|
|
else
|
1067 |
|
|
ei_next (&ei);
|
1068 |
|
|
}
|
1069 |
|
|
|
1070 |
|
|
gcc_assert (found);
|
1071 |
|
|
|
1072 |
|
|
VEC_safe_push (edge, gc, bb->succs, e);
|
1073 |
|
|
make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
|
1074 |
|
|
}
|
1075 |
|
|
}
|
1076 |
|
|
|
1077 |
|
|
if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags)
|
1078 |
|
|
{
|
1079 |
|
|
/* Create the new structures. */
|
1080 |
|
|
|
1081 |
|
|
/* If the old block ended with a tablejump, skip its table
|
1082 |
|
|
by searching forward from there. Otherwise start searching
|
1083 |
|
|
forward from the last instruction of the old block. */
|
1084 |
|
|
if (!tablejump_p (BB_END (e->src), NULL, ¬e))
|
1085 |
|
|
note = BB_END (e->src);
|
1086 |
|
|
note = NEXT_INSN (note);
|
1087 |
|
|
|
1088 |
|
|
jump_block = create_basic_block (note, NULL, e->src);
|
1089 |
|
|
jump_block->count = e->count;
|
1090 |
|
|
jump_block->frequency = EDGE_FREQUENCY (e);
|
1091 |
|
|
jump_block->loop_depth = target->loop_depth;
|
1092 |
|
|
|
1093 |
|
|
if (target->il.rtl->global_live_at_start)
|
1094 |
|
|
{
|
1095 |
|
|
jump_block->il.rtl->global_live_at_start = ALLOC_REG_SET (®_obstack);
|
1096 |
|
|
jump_block->il.rtl->global_live_at_end = ALLOC_REG_SET (®_obstack);
|
1097 |
|
|
COPY_REG_SET (jump_block->il.rtl->global_live_at_start,
|
1098 |
|
|
target->il.rtl->global_live_at_start);
|
1099 |
|
|
COPY_REG_SET (jump_block->il.rtl->global_live_at_end,
|
1100 |
|
|
target->il.rtl->global_live_at_start);
|
1101 |
|
|
}
|
1102 |
|
|
|
1103 |
|
|
/* Make sure new block ends up in correct hot/cold section. */
|
1104 |
|
|
|
1105 |
|
|
BB_COPY_PARTITION (jump_block, e->src);
|
1106 |
|
|
if (flag_reorder_blocks_and_partition
|
1107 |
|
|
&& targetm.have_named_sections
|
1108 |
|
|
&& JUMP_P (BB_END (jump_block))
|
1109 |
|
|
&& !any_condjump_p (BB_END (jump_block))
|
1110 |
|
|
&& (EDGE_SUCC (jump_block, 0)->flags & EDGE_CROSSING))
|
1111 |
|
|
REG_NOTES (BB_END (jump_block)) = gen_rtx_EXPR_LIST (REG_CROSSING_JUMP,
|
1112 |
|
|
NULL_RTX,
|
1113 |
|
|
REG_NOTES
|
1114 |
|
|
(BB_END
|
1115 |
|
|
(jump_block)));
|
1116 |
|
|
|
1117 |
|
|
/* Wire edge in. */
|
1118 |
|
|
new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
|
1119 |
|
|
new_edge->probability = e->probability;
|
1120 |
|
|
new_edge->count = e->count;
|
1121 |
|
|
|
1122 |
|
|
/* Redirect old edge. */
|
1123 |
|
|
redirect_edge_pred (e, jump_block);
|
1124 |
|
|
e->probability = REG_BR_PROB_BASE;
|
1125 |
|
|
|
1126 |
|
|
new_bb = jump_block;
|
1127 |
|
|
}
|
1128 |
|
|
else
|
1129 |
|
|
jump_block = e->src;
|
1130 |
|
|
|
1131 |
|
|
e->flags &= ~EDGE_FALLTHRU;
|
1132 |
|
|
if (target == EXIT_BLOCK_PTR)
|
1133 |
|
|
{
|
1134 |
|
|
#ifdef HAVE_return
|
1135 |
|
|
emit_jump_insn_after_noloc (gen_return (), BB_END (jump_block));
|
1136 |
|
|
#else
|
1137 |
|
|
gcc_unreachable ();
|
1138 |
|
|
#endif
|
1139 |
|
|
}
|
1140 |
|
|
else
|
1141 |
|
|
{
|
1142 |
|
|
rtx label = block_label (target);
|
1143 |
|
|
emit_jump_insn_after_noloc (gen_jump (label), BB_END (jump_block));
|
1144 |
|
|
JUMP_LABEL (BB_END (jump_block)) = label;
|
1145 |
|
|
LABEL_NUSES (label)++;
|
1146 |
|
|
}
|
1147 |
|
|
|
1148 |
|
|
emit_barrier_after (BB_END (jump_block));
|
1149 |
|
|
redirect_edge_succ_nodup (e, target);
|
1150 |
|
|
|
1151 |
|
|
if (abnormal_edge_flags)
|
1152 |
|
|
make_edge (src, target, abnormal_edge_flags);
|
1153 |
|
|
|
1154 |
|
|
return new_bb;
|
1155 |
|
|
}
|
1156 |
|
|
|
1157 |
|
|
/* Edge E is assumed to be fallthru edge. Emit needed jump instruction
|
1158 |
|
|
(and possibly create new basic block) to make edge non-fallthru.
|
1159 |
|
|
Return newly created BB or NULL if none. */
|
1160 |
|
|
|
1161 |
|
|
basic_block
|
1162 |
|
|
force_nonfallthru (edge e)
|
1163 |
|
|
{
|
1164 |
|
|
return force_nonfallthru_and_redirect (e, e->dest);
|
1165 |
|
|
}
|
1166 |
|
|
|
1167 |
|
|
/* Redirect edge even at the expense of creating new jump insn or
|
1168 |
|
|
basic block. Return new basic block if created, NULL otherwise.
|
1169 |
|
|
Conversion must be possible. */
|
1170 |
|
|
|
1171 |
|
|
static basic_block
|
1172 |
|
|
rtl_redirect_edge_and_branch_force (edge e, basic_block target)
|
1173 |
|
|
{
|
1174 |
|
|
if (redirect_edge_and_branch (e, target)
|
1175 |
|
|
|| e->dest == target)
|
1176 |
|
|
return NULL;
|
1177 |
|
|
|
1178 |
|
|
/* In case the edge redirection failed, try to force it to be non-fallthru
|
1179 |
|
|
and redirect newly created simplejump. */
|
1180 |
|
|
e->src->flags |= BB_DIRTY;
|
1181 |
|
|
return force_nonfallthru_and_redirect (e, target);
|
1182 |
|
|
}
|
1183 |
|
|
|
1184 |
|
|
/* The given edge should potentially be a fallthru edge. If that is in
|
1185 |
|
|
fact true, delete the jump and barriers that are in the way. */
|
1186 |
|
|
|
1187 |
|
|
static void
|
1188 |
|
|
rtl_tidy_fallthru_edge (edge e)
|
1189 |
|
|
{
|
1190 |
|
|
rtx q;
|
1191 |
|
|
basic_block b = e->src, c = b->next_bb;
|
1192 |
|
|
|
1193 |
|
|
/* ??? In a late-running flow pass, other folks may have deleted basic
|
1194 |
|
|
blocks by nopping out blocks, leaving multiple BARRIERs between here
|
1195 |
|
|
and the target label. They ought to be chastised and fixed.
|
1196 |
|
|
|
1197 |
|
|
We can also wind up with a sequence of undeletable labels between
|
1198 |
|
|
one block and the next.
|
1199 |
|
|
|
1200 |
|
|
So search through a sequence of barriers, labels, and notes for
|
1201 |
|
|
the head of block C and assert that we really do fall through. */
|
1202 |
|
|
|
1203 |
|
|
for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
|
1204 |
|
|
if (INSN_P (q))
|
1205 |
|
|
return;
|
1206 |
|
|
|
1207 |
|
|
/* Remove what will soon cease being the jump insn from the source block.
|
1208 |
|
|
If block B consisted only of this single jump, turn it into a deleted
|
1209 |
|
|
note. */
|
1210 |
|
|
q = BB_END (b);
|
1211 |
|
|
if (JUMP_P (q)
|
1212 |
|
|
&& onlyjump_p (q)
|
1213 |
|
|
&& (any_uncondjump_p (q)
|
1214 |
|
|
|| single_succ_p (b)))
|
1215 |
|
|
{
|
1216 |
|
|
#ifdef HAVE_cc0
|
1217 |
|
|
/* If this was a conditional jump, we need to also delete
|
1218 |
|
|
the insn that set cc0. */
|
1219 |
|
|
if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
|
1220 |
|
|
q = PREV_INSN (q);
|
1221 |
|
|
#endif
|
1222 |
|
|
|
1223 |
|
|
q = PREV_INSN (q);
|
1224 |
|
|
|
1225 |
|
|
/* We don't want a block to end on a line-number note since that has
|
1226 |
|
|
the potential of changing the code between -g and not -g. */
|
1227 |
|
|
while (NOTE_P (q) && NOTE_LINE_NUMBER (q) >= 0)
|
1228 |
|
|
q = PREV_INSN (q);
|
1229 |
|
|
}
|
1230 |
|
|
|
1231 |
|
|
/* Selectively unlink the sequence. */
|
1232 |
|
|
if (q != PREV_INSN (BB_HEAD (c)))
|
1233 |
|
|
delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)));
|
1234 |
|
|
|
1235 |
|
|
e->flags |= EDGE_FALLTHRU;
|
1236 |
|
|
}
|
1237 |
|
|
|
1238 |
|
|
/* Should move basic block BB after basic block AFTER. NIY. */
|
1239 |
|
|
|
1240 |
|
|
static bool
|
1241 |
|
|
rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
|
1242 |
|
|
basic_block after ATTRIBUTE_UNUSED)
|
1243 |
|
|
{
|
1244 |
|
|
return false;
|
1245 |
|
|
}
|
1246 |
|
|
|
1247 |
|
|
/* Split a (typically critical) edge. Return the new block.
|
1248 |
|
|
The edge must not be abnormal.
|
1249 |
|
|
|
1250 |
|
|
??? The code generally expects to be called on critical edges.
|
1251 |
|
|
The case of a block ending in an unconditional jump to a
|
1252 |
|
|
block with multiple predecessors is not handled optimally. */
|
1253 |
|
|
|
1254 |
|
|
static basic_block
|
1255 |
|
|
rtl_split_edge (edge edge_in)
|
1256 |
|
|
{
|
1257 |
|
|
basic_block bb;
|
1258 |
|
|
rtx before;
|
1259 |
|
|
|
1260 |
|
|
/* Abnormal edges cannot be split. */
|
1261 |
|
|
gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
|
1262 |
|
|
|
1263 |
|
|
/* We are going to place the new block in front of edge destination.
|
1264 |
|
|
Avoid existence of fallthru predecessors. */
|
1265 |
|
|
if ((edge_in->flags & EDGE_FALLTHRU) == 0)
|
1266 |
|
|
{
|
1267 |
|
|
edge e;
|
1268 |
|
|
edge_iterator ei;
|
1269 |
|
|
|
1270 |
|
|
FOR_EACH_EDGE (e, ei, edge_in->dest->preds)
|
1271 |
|
|
if (e->flags & EDGE_FALLTHRU)
|
1272 |
|
|
break;
|
1273 |
|
|
|
1274 |
|
|
if (e)
|
1275 |
|
|
force_nonfallthru (e);
|
1276 |
|
|
}
|
1277 |
|
|
|
1278 |
|
|
/* Create the basic block note. */
|
1279 |
|
|
if (edge_in->dest != EXIT_BLOCK_PTR)
|
1280 |
|
|
before = BB_HEAD (edge_in->dest);
|
1281 |
|
|
else
|
1282 |
|
|
before = NULL_RTX;
|
1283 |
|
|
|
1284 |
|
|
/* If this is a fall through edge to the exit block, the blocks might be
|
1285 |
|
|
not adjacent, and the right place is the after the source. */
|
1286 |
|
|
if (edge_in->flags & EDGE_FALLTHRU && edge_in->dest == EXIT_BLOCK_PTR)
|
1287 |
|
|
{
|
1288 |
|
|
before = NEXT_INSN (BB_END (edge_in->src));
|
1289 |
|
|
bb = create_basic_block (before, NULL, edge_in->src);
|
1290 |
|
|
BB_COPY_PARTITION (bb, edge_in->src);
|
1291 |
|
|
}
|
1292 |
|
|
else
|
1293 |
|
|
{
|
1294 |
|
|
bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
|
1295 |
|
|
/* ??? Why not edge_in->dest->prev_bb here? */
|
1296 |
|
|
BB_COPY_PARTITION (bb, edge_in->dest);
|
1297 |
|
|
}
|
1298 |
|
|
|
1299 |
|
|
/* ??? This info is likely going to be out of date very soon. */
|
1300 |
|
|
if (edge_in->dest->il.rtl->global_live_at_start)
|
1301 |
|
|
{
|
1302 |
|
|
bb->il.rtl->global_live_at_start = ALLOC_REG_SET (®_obstack);
|
1303 |
|
|
bb->il.rtl->global_live_at_end = ALLOC_REG_SET (®_obstack);
|
1304 |
|
|
COPY_REG_SET (bb->il.rtl->global_live_at_start,
|
1305 |
|
|
edge_in->dest->il.rtl->global_live_at_start);
|
1306 |
|
|
COPY_REG_SET (bb->il.rtl->global_live_at_end,
|
1307 |
|
|
edge_in->dest->il.rtl->global_live_at_start);
|
1308 |
|
|
}
|
1309 |
|
|
|
1310 |
|
|
make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
|
1311 |
|
|
|
1312 |
|
|
/* For non-fallthru edges, we must adjust the predecessor's
|
1313 |
|
|
jump instruction to target our new block. */
|
1314 |
|
|
if ((edge_in->flags & EDGE_FALLTHRU) == 0)
|
1315 |
|
|
{
|
1316 |
|
|
edge redirected = redirect_edge_and_branch (edge_in, bb);
|
1317 |
|
|
gcc_assert (redirected);
|
1318 |
|
|
}
|
1319 |
|
|
else
|
1320 |
|
|
redirect_edge_succ (edge_in, bb);
|
1321 |
|
|
|
1322 |
|
|
return bb;
|
1323 |
|
|
}
|
1324 |
|
|
|
1325 |
|
|
/* Queue instructions for insertion on an edge between two basic blocks.
|
1326 |
|
|
The new instructions and basic blocks (if any) will not appear in the
|
1327 |
|
|
CFG until commit_edge_insertions is called. */
|
1328 |
|
|
|
1329 |
|
|
void
|
1330 |
|
|
insert_insn_on_edge (rtx pattern, edge e)
|
1331 |
|
|
{
|
1332 |
|
|
/* We cannot insert instructions on an abnormal critical edge.
|
1333 |
|
|
It will be easier to find the culprit if we die now. */
|
1334 |
|
|
gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
|
1335 |
|
|
|
1336 |
|
|
if (e->insns.r == NULL_RTX)
|
1337 |
|
|
start_sequence ();
|
1338 |
|
|
else
|
1339 |
|
|
push_to_sequence (e->insns.r);
|
1340 |
|
|
|
1341 |
|
|
emit_insn (pattern);
|
1342 |
|
|
|
1343 |
|
|
e->insns.r = get_insns ();
|
1344 |
|
|
end_sequence ();
|
1345 |
|
|
}
|
1346 |
|
|
|
1347 |
|
|
/* Update the CFG for the instructions queued on edge E. */
|
1348 |
|
|
|
1349 |
|
|
static void
|
1350 |
|
|
commit_one_edge_insertion (edge e, int watch_calls)
|
1351 |
|
|
{
|
1352 |
|
|
rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
|
1353 |
|
|
basic_block bb = NULL;
|
1354 |
|
|
|
1355 |
|
|
/* Pull the insns off the edge now since the edge might go away. */
|
1356 |
|
|
insns = e->insns.r;
|
1357 |
|
|
e->insns.r = NULL_RTX;
|
1358 |
|
|
|
1359 |
|
|
/* Special case -- avoid inserting code between call and storing
|
1360 |
|
|
its return value. */
|
1361 |
|
|
if (watch_calls && (e->flags & EDGE_FALLTHRU)
|
1362 |
|
|
&& single_pred_p (e->dest)
|
1363 |
|
|
&& e->src != ENTRY_BLOCK_PTR
|
1364 |
|
|
&& CALL_P (BB_END (e->src)))
|
1365 |
|
|
{
|
1366 |
|
|
rtx next = next_nonnote_insn (BB_END (e->src));
|
1367 |
|
|
|
1368 |
|
|
after = BB_HEAD (e->dest);
|
1369 |
|
|
/* The first insn after the call may be a stack pop, skip it. */
|
1370 |
|
|
while (next
|
1371 |
|
|
&& keep_with_call_p (next))
|
1372 |
|
|
{
|
1373 |
|
|
after = next;
|
1374 |
|
|
next = next_nonnote_insn (next);
|
1375 |
|
|
}
|
1376 |
|
|
bb = e->dest;
|
1377 |
|
|
}
|
1378 |
|
|
if (!before && !after)
|
1379 |
|
|
{
|
1380 |
|
|
/* Figure out where to put these things. If the destination has
|
1381 |
|
|
one predecessor, insert there. Except for the exit block. */
|
1382 |
|
|
if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR)
|
1383 |
|
|
{
|
1384 |
|
|
bb = e->dest;
|
1385 |
|
|
|
1386 |
|
|
/* Get the location correct wrt a code label, and "nice" wrt
|
1387 |
|
|
a basic block note, and before everything else. */
|
1388 |
|
|
tmp = BB_HEAD (bb);
|
1389 |
|
|
if (LABEL_P (tmp))
|
1390 |
|
|
tmp = NEXT_INSN (tmp);
|
1391 |
|
|
if (NOTE_INSN_BASIC_BLOCK_P (tmp))
|
1392 |
|
|
tmp = NEXT_INSN (tmp);
|
1393 |
|
|
if (tmp == BB_HEAD (bb))
|
1394 |
|
|
before = tmp;
|
1395 |
|
|
else if (tmp)
|
1396 |
|
|
after = PREV_INSN (tmp);
|
1397 |
|
|
else
|
1398 |
|
|
after = get_last_insn ();
|
1399 |
|
|
}
|
1400 |
|
|
|
1401 |
|
|
/* If the source has one successor and the edge is not abnormal,
|
1402 |
|
|
insert there. Except for the entry block. */
|
1403 |
|
|
else if ((e->flags & EDGE_ABNORMAL) == 0
|
1404 |
|
|
&& single_succ_p (e->src)
|
1405 |
|
|
&& e->src != ENTRY_BLOCK_PTR)
|
1406 |
|
|
{
|
1407 |
|
|
bb = e->src;
|
1408 |
|
|
|
1409 |
|
|
/* It is possible to have a non-simple jump here. Consider a target
|
1410 |
|
|
where some forms of unconditional jumps clobber a register. This
|
1411 |
|
|
happens on the fr30 for example.
|
1412 |
|
|
|
1413 |
|
|
We know this block has a single successor, so we can just emit
|
1414 |
|
|
the queued insns before the jump. */
|
1415 |
|
|
if (JUMP_P (BB_END (bb)))
|
1416 |
|
|
before = BB_END (bb);
|
1417 |
|
|
else
|
1418 |
|
|
{
|
1419 |
|
|
/* We'd better be fallthru, or we've lost track of
|
1420 |
|
|
what's what. */
|
1421 |
|
|
gcc_assert (e->flags & EDGE_FALLTHRU);
|
1422 |
|
|
|
1423 |
|
|
after = BB_END (bb);
|
1424 |
|
|
}
|
1425 |
|
|
}
|
1426 |
|
|
/* Otherwise we must split the edge. */
|
1427 |
|
|
else
|
1428 |
|
|
{
|
1429 |
|
|
bb = split_edge (e);
|
1430 |
|
|
after = BB_END (bb);
|
1431 |
|
|
|
1432 |
|
|
if (flag_reorder_blocks_and_partition
|
1433 |
|
|
&& targetm.have_named_sections
|
1434 |
|
|
&& e->src != ENTRY_BLOCK_PTR
|
1435 |
|
|
&& BB_PARTITION (e->src) == BB_COLD_PARTITION
|
1436 |
|
|
&& !(e->flags & EDGE_CROSSING))
|
1437 |
|
|
{
|
1438 |
|
|
rtx bb_note, cur_insn;
|
1439 |
|
|
|
1440 |
|
|
bb_note = NULL_RTX;
|
1441 |
|
|
for (cur_insn = BB_HEAD (bb); cur_insn != NEXT_INSN (BB_END (bb));
|
1442 |
|
|
cur_insn = NEXT_INSN (cur_insn))
|
1443 |
|
|
if (NOTE_P (cur_insn)
|
1444 |
|
|
&& NOTE_LINE_NUMBER (cur_insn) == NOTE_INSN_BASIC_BLOCK)
|
1445 |
|
|
{
|
1446 |
|
|
bb_note = cur_insn;
|
1447 |
|
|
break;
|
1448 |
|
|
}
|
1449 |
|
|
|
1450 |
|
|
if (JUMP_P (BB_END (bb))
|
1451 |
|
|
&& !any_condjump_p (BB_END (bb))
|
1452 |
|
|
&& (single_succ_edge (bb)->flags & EDGE_CROSSING))
|
1453 |
|
|
REG_NOTES (BB_END (bb)) = gen_rtx_EXPR_LIST
|
1454 |
|
|
(REG_CROSSING_JUMP, NULL_RTX, REG_NOTES (BB_END (bb)));
|
1455 |
|
|
}
|
1456 |
|
|
}
|
1457 |
|
|
}
|
1458 |
|
|
|
1459 |
|
|
/* Now that we've found the spot, do the insertion. */
|
1460 |
|
|
|
1461 |
|
|
if (before)
|
1462 |
|
|
{
|
1463 |
|
|
emit_insn_before_noloc (insns, before);
|
1464 |
|
|
last = prev_nonnote_insn (before);
|
1465 |
|
|
}
|
1466 |
|
|
else
|
1467 |
|
|
last = emit_insn_after_noloc (insns, after);
|
1468 |
|
|
|
1469 |
|
|
if (returnjump_p (last))
|
1470 |
|
|
{
|
1471 |
|
|
/* ??? Remove all outgoing edges from BB and add one for EXIT.
|
1472 |
|
|
This is not currently a problem because this only happens
|
1473 |
|
|
for the (single) epilogue, which already has a fallthru edge
|
1474 |
|
|
to EXIT. */
|
1475 |
|
|
|
1476 |
|
|
e = single_succ_edge (bb);
|
1477 |
|
|
gcc_assert (e->dest == EXIT_BLOCK_PTR
|
1478 |
|
|
&& single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
|
1479 |
|
|
|
1480 |
|
|
e->flags &= ~EDGE_FALLTHRU;
|
1481 |
|
|
emit_barrier_after (last);
|
1482 |
|
|
|
1483 |
|
|
if (before)
|
1484 |
|
|
delete_insn (before);
|
1485 |
|
|
}
|
1486 |
|
|
else
|
1487 |
|
|
gcc_assert (!JUMP_P (last));
|
1488 |
|
|
|
1489 |
|
|
/* Mark the basic block for find_many_sub_basic_blocks. */
|
1490 |
|
|
bb->aux = &bb->aux;
|
1491 |
|
|
}
|
1492 |
|
|
|
1493 |
|
|
/* Update the CFG for all queued instructions. */
|
1494 |
|
|
|
1495 |
|
|
void
|
1496 |
|
|
commit_edge_insertions (void)
|
1497 |
|
|
{
|
1498 |
|
|
basic_block bb;
|
1499 |
|
|
sbitmap blocks;
|
1500 |
|
|
bool changed = false;
|
1501 |
|
|
|
1502 |
|
|
#ifdef ENABLE_CHECKING
|
1503 |
|
|
verify_flow_info ();
|
1504 |
|
|
#endif
|
1505 |
|
|
|
1506 |
|
|
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
|
1507 |
|
|
{
|
1508 |
|
|
edge e;
|
1509 |
|
|
edge_iterator ei;
|
1510 |
|
|
|
1511 |
|
|
FOR_EACH_EDGE (e, ei, bb->succs)
|
1512 |
|
|
if (e->insns.r)
|
1513 |
|
|
{
|
1514 |
|
|
changed = true;
|
1515 |
|
|
commit_one_edge_insertion (e, false);
|
1516 |
|
|
}
|
1517 |
|
|
}
|
1518 |
|
|
|
1519 |
|
|
if (!changed)
|
1520 |
|
|
return;
|
1521 |
|
|
|
1522 |
|
|
blocks = sbitmap_alloc (last_basic_block);
|
1523 |
|
|
sbitmap_zero (blocks);
|
1524 |
|
|
FOR_EACH_BB (bb)
|
1525 |
|
|
if (bb->aux)
|
1526 |
|
|
{
|
1527 |
|
|
SET_BIT (blocks, bb->index);
|
1528 |
|
|
/* Check for forgotten bb->aux values before commit_edge_insertions
|
1529 |
|
|
call. */
|
1530 |
|
|
gcc_assert (bb->aux == &bb->aux);
|
1531 |
|
|
bb->aux = NULL;
|
1532 |
|
|
}
|
1533 |
|
|
find_many_sub_basic_blocks (blocks);
|
1534 |
|
|
sbitmap_free (blocks);
|
1535 |
|
|
}
|
1536 |
|
|
|
1537 |
|
|
/* Update the CFG for all queued instructions, taking special care of inserting
|
1538 |
|
|
code on edges between call and storing its return value. */
|
1539 |
|
|
|
1540 |
|
|
void
|
1541 |
|
|
commit_edge_insertions_watch_calls (void)
|
1542 |
|
|
{
|
1543 |
|
|
basic_block bb;
|
1544 |
|
|
sbitmap blocks;
|
1545 |
|
|
bool changed = false;
|
1546 |
|
|
|
1547 |
|
|
#ifdef ENABLE_CHECKING
|
1548 |
|
|
verify_flow_info ();
|
1549 |
|
|
#endif
|
1550 |
|
|
|
1551 |
|
|
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
|
1552 |
|
|
{
|
1553 |
|
|
edge e;
|
1554 |
|
|
edge_iterator ei;
|
1555 |
|
|
|
1556 |
|
|
FOR_EACH_EDGE (e, ei, bb->succs)
|
1557 |
|
|
if (e->insns.r)
|
1558 |
|
|
{
|
1559 |
|
|
changed = true;
|
1560 |
|
|
commit_one_edge_insertion (e, true);
|
1561 |
|
|
}
|
1562 |
|
|
}
|
1563 |
|
|
|
1564 |
|
|
if (!changed)
|
1565 |
|
|
return;
|
1566 |
|
|
|
1567 |
|
|
blocks = sbitmap_alloc (last_basic_block);
|
1568 |
|
|
sbitmap_zero (blocks);
|
1569 |
|
|
FOR_EACH_BB (bb)
|
1570 |
|
|
if (bb->aux)
|
1571 |
|
|
{
|
1572 |
|
|
SET_BIT (blocks, bb->index);
|
1573 |
|
|
/* Check for forgotten bb->aux values before commit_edge_insertions
|
1574 |
|
|
call. */
|
1575 |
|
|
gcc_assert (bb->aux == &bb->aux);
|
1576 |
|
|
bb->aux = NULL;
|
1577 |
|
|
}
|
1578 |
|
|
find_many_sub_basic_blocks (blocks);
|
1579 |
|
|
sbitmap_free (blocks);
|
1580 |
|
|
}
|
1581 |
|
|
|
1582 |
|
|
/* Print out RTL-specific basic block information (live information
|
1583 |
|
|
at start and end). */
|
1584 |
|
|
|
1585 |
|
|
static void
|
1586 |
|
|
rtl_dump_bb (basic_block bb, FILE *outf, int indent)
|
1587 |
|
|
{
|
1588 |
|
|
rtx insn;
|
1589 |
|
|
rtx last;
|
1590 |
|
|
char *s_indent;
|
1591 |
|
|
|
1592 |
|
|
s_indent = alloca ((size_t) indent + 1);
|
1593 |
|
|
memset (s_indent, ' ', (size_t) indent);
|
1594 |
|
|
s_indent[indent] = '\0';
|
1595 |
|
|
|
1596 |
|
|
fprintf (outf, ";;%s Registers live at start: ", s_indent);
|
1597 |
|
|
dump_regset (bb->il.rtl->global_live_at_start, outf);
|
1598 |
|
|
putc ('\n', outf);
|
1599 |
|
|
|
1600 |
|
|
for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last;
|
1601 |
|
|
insn = NEXT_INSN (insn))
|
1602 |
|
|
print_rtl_single (outf, insn);
|
1603 |
|
|
|
1604 |
|
|
fprintf (outf, ";;%s Registers live at end: ", s_indent);
|
1605 |
|
|
dump_regset (bb->il.rtl->global_live_at_end, outf);
|
1606 |
|
|
putc ('\n', outf);
|
1607 |
|
|
}
|
1608 |
|
|
|
1609 |
|
|
/* Like print_rtl, but also print out live information for the start of each
|
1610 |
|
|
basic block. */
|
1611 |
|
|
|
1612 |
|
|
void
|
1613 |
|
|
print_rtl_with_bb (FILE *outf, rtx rtx_first)
|
1614 |
|
|
{
|
1615 |
|
|
rtx tmp_rtx;
|
1616 |
|
|
|
1617 |
|
|
if (rtx_first == 0)
|
1618 |
|
|
fprintf (outf, "(nil)\n");
|
1619 |
|
|
else
|
1620 |
|
|
{
|
1621 |
|
|
enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
|
1622 |
|
|
int max_uid = get_max_uid ();
|
1623 |
|
|
basic_block *start = XCNEWVEC (basic_block, max_uid);
|
1624 |
|
|
basic_block *end = XCNEWVEC (basic_block, max_uid);
|
1625 |
|
|
enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
|
1626 |
|
|
|
1627 |
|
|
basic_block bb;
|
1628 |
|
|
|
1629 |
|
|
FOR_EACH_BB_REVERSE (bb)
|
1630 |
|
|
{
|
1631 |
|
|
rtx x;
|
1632 |
|
|
|
1633 |
|
|
start[INSN_UID (BB_HEAD (bb))] = bb;
|
1634 |
|
|
end[INSN_UID (BB_END (bb))] = bb;
|
1635 |
|
|
for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
|
1636 |
|
|
{
|
1637 |
|
|
enum bb_state state = IN_MULTIPLE_BB;
|
1638 |
|
|
|
1639 |
|
|
if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
|
1640 |
|
|
state = IN_ONE_BB;
|
1641 |
|
|
in_bb_p[INSN_UID (x)] = state;
|
1642 |
|
|
|
1643 |
|
|
if (x == BB_END (bb))
|
1644 |
|
|
break;
|
1645 |
|
|
}
|
1646 |
|
|
}
|
1647 |
|
|
|
1648 |
|
|
for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
|
1649 |
|
|
{
|
1650 |
|
|
int did_output;
|
1651 |
|
|
|
1652 |
|
|
if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
|
1653 |
|
|
{
|
1654 |
|
|
fprintf (outf, ";; Start of basic block %d, registers live:",
|
1655 |
|
|
bb->index);
|
1656 |
|
|
dump_regset (bb->il.rtl->global_live_at_start, outf);
|
1657 |
|
|
putc ('\n', outf);
|
1658 |
|
|
}
|
1659 |
|
|
|
1660 |
|
|
if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
|
1661 |
|
|
&& !NOTE_P (tmp_rtx)
|
1662 |
|
|
&& !BARRIER_P (tmp_rtx))
|
1663 |
|
|
fprintf (outf, ";; Insn is not within a basic block\n");
|
1664 |
|
|
else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
|
1665 |
|
|
fprintf (outf, ";; Insn is in multiple basic blocks\n");
|
1666 |
|
|
|
1667 |
|
|
did_output = print_rtl_single (outf, tmp_rtx);
|
1668 |
|
|
|
1669 |
|
|
if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
|
1670 |
|
|
{
|
1671 |
|
|
fprintf (outf, ";; End of basic block %d, registers live:\n",
|
1672 |
|
|
bb->index);
|
1673 |
|
|
dump_regset (bb->il.rtl->global_live_at_end, outf);
|
1674 |
|
|
putc ('\n', outf);
|
1675 |
|
|
}
|
1676 |
|
|
|
1677 |
|
|
if (did_output)
|
1678 |
|
|
putc ('\n', outf);
|
1679 |
|
|
}
|
1680 |
|
|
|
1681 |
|
|
free (start);
|
1682 |
|
|
free (end);
|
1683 |
|
|
free (in_bb_p);
|
1684 |
|
|
}
|
1685 |
|
|
|
1686 |
|
|
if (current_function_epilogue_delay_list != 0)
|
1687 |
|
|
{
|
1688 |
|
|
fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
|
1689 |
|
|
for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
|
1690 |
|
|
tmp_rtx = XEXP (tmp_rtx, 1))
|
1691 |
|
|
print_rtl_single (outf, XEXP (tmp_rtx, 0));
|
1692 |
|
|
}
|
1693 |
|
|
}
|
1694 |
|
|
|
1695 |
|
|
void
|
1696 |
|
|
update_br_prob_note (basic_block bb)
|
1697 |
|
|
{
|
1698 |
|
|
rtx note;
|
1699 |
|
|
if (!JUMP_P (BB_END (bb)))
|
1700 |
|
|
return;
|
1701 |
|
|
note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
|
1702 |
|
|
if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability)
|
1703 |
|
|
return;
|
1704 |
|
|
XEXP (note, 0) = GEN_INT (BRANCH_EDGE (bb)->probability);
|
1705 |
|
|
}
|
1706 |
|
|
|
1707 |
|
|
/* Get the last insn associated with block BB (that includes barriers and
|
1708 |
|
|
tablejumps after BB). */
|
1709 |
|
|
rtx
|
1710 |
|
|
get_last_bb_insn (basic_block bb)
|
1711 |
|
|
{
|
1712 |
|
|
rtx tmp;
|
1713 |
|
|
rtx end = BB_END (bb);
|
1714 |
|
|
|
1715 |
|
|
/* Include any jump table following the basic block. */
|
1716 |
|
|
if (tablejump_p (end, NULL, &tmp))
|
1717 |
|
|
end = tmp;
|
1718 |
|
|
|
1719 |
|
|
/* Include any barriers that may follow the basic block. */
|
1720 |
|
|
tmp = next_nonnote_insn (end);
|
1721 |
|
|
while (tmp && BARRIER_P (tmp))
|
1722 |
|
|
{
|
1723 |
|
|
end = tmp;
|
1724 |
|
|
tmp = next_nonnote_insn (end);
|
1725 |
|
|
}
|
1726 |
|
|
|
1727 |
|
|
return end;
|
1728 |
|
|
}
|
1729 |
|
|
|
1730 |
|
|
/* Verify the CFG and RTL consistency common for both underlying RTL and
|
1731 |
|
|
cfglayout RTL.
|
1732 |
|
|
|
1733 |
|
|
Currently it does following checks:
|
1734 |
|
|
|
1735 |
|
|
- test head/end pointers
|
1736 |
|
|
- overlapping of basic blocks
|
1737 |
|
|
- headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
|
1738 |
|
|
- tails of basic blocks (ensure that boundary is necessary)
|
1739 |
|
|
- scans body of the basic block for JUMP_INSN, CODE_LABEL
|
1740 |
|
|
and NOTE_INSN_BASIC_BLOCK
|
1741 |
|
|
- verify that no fall_thru edge crosses hot/cold partition boundaries
|
1742 |
|
|
|
1743 |
|
|
In future it can be extended check a lot of other stuff as well
|
1744 |
|
|
(reachability of basic blocks, life information, etc. etc.). */
|
1745 |
|
|
|
1746 |
|
|
static int
|
1747 |
|
|
rtl_verify_flow_info_1 (void)
|
1748 |
|
|
{
|
1749 |
|
|
const int max_uid = get_max_uid ();
|
1750 |
|
|
rtx last_head = get_last_insn ();
|
1751 |
|
|
basic_block *bb_info;
|
1752 |
|
|
rtx x;
|
1753 |
|
|
int err = 0;
|
1754 |
|
|
basic_block bb;
|
1755 |
|
|
|
1756 |
|
|
bb_info = XCNEWVEC (basic_block, max_uid);
|
1757 |
|
|
|
1758 |
|
|
FOR_EACH_BB_REVERSE (bb)
|
1759 |
|
|
{
|
1760 |
|
|
rtx head = BB_HEAD (bb);
|
1761 |
|
|
rtx end = BB_END (bb);
|
1762 |
|
|
|
1763 |
|
|
/* Verify the end of the basic block is in the INSN chain. */
|
1764 |
|
|
for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
|
1765 |
|
|
if (x == end)
|
1766 |
|
|
break;
|
1767 |
|
|
|
1768 |
|
|
if (!(bb->flags & BB_RTL))
|
1769 |
|
|
{
|
1770 |
|
|
error ("BB_RTL flag not set for block %d", bb->index);
|
1771 |
|
|
err = 1;
|
1772 |
|
|
}
|
1773 |
|
|
|
1774 |
|
|
if (!x)
|
1775 |
|
|
{
|
1776 |
|
|
error ("end insn %d for block %d not found in the insn stream",
|
1777 |
|
|
INSN_UID (end), bb->index);
|
1778 |
|
|
err = 1;
|
1779 |
|
|
}
|
1780 |
|
|
|
1781 |
|
|
/* Work backwards from the end to the head of the basic block
|
1782 |
|
|
to verify the head is in the RTL chain. */
|
1783 |
|
|
for (; x != NULL_RTX; x = PREV_INSN (x))
|
1784 |
|
|
{
|
1785 |
|
|
/* While walking over the insn chain, verify insns appear
|
1786 |
|
|
in only one basic block and initialize the BB_INFO array
|
1787 |
|
|
used by other passes. */
|
1788 |
|
|
if (bb_info[INSN_UID (x)] != NULL)
|
1789 |
|
|
{
|
1790 |
|
|
error ("insn %d is in multiple basic blocks (%d and %d)",
|
1791 |
|
|
INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
|
1792 |
|
|
err = 1;
|
1793 |
|
|
}
|
1794 |
|
|
|
1795 |
|
|
bb_info[INSN_UID (x)] = bb;
|
1796 |
|
|
|
1797 |
|
|
if (x == head)
|
1798 |
|
|
break;
|
1799 |
|
|
}
|
1800 |
|
|
if (!x)
|
1801 |
|
|
{
|
1802 |
|
|
error ("head insn %d for block %d not found in the insn stream",
|
1803 |
|
|
INSN_UID (head), bb->index);
|
1804 |
|
|
err = 1;
|
1805 |
|
|
}
|
1806 |
|
|
|
1807 |
|
|
last_head = x;
|
1808 |
|
|
}
|
1809 |
|
|
|
1810 |
|
|
/* Now check the basic blocks (boundaries etc.) */
|
1811 |
|
|
FOR_EACH_BB_REVERSE (bb)
|
1812 |
|
|
{
|
1813 |
|
|
int n_fallthru = 0, n_eh = 0, n_call = 0, n_abnormal = 0, n_branch = 0;
|
1814 |
|
|
edge e, fallthru = NULL;
|
1815 |
|
|
rtx note;
|
1816 |
|
|
edge_iterator ei;
|
1817 |
|
|
|
1818 |
|
|
if (JUMP_P (BB_END (bb))
|
1819 |
|
|
&& (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
|
1820 |
|
|
&& EDGE_COUNT (bb->succs) >= 2
|
1821 |
|
|
&& any_condjump_p (BB_END (bb)))
|
1822 |
|
|
{
|
1823 |
|
|
if (INTVAL (XEXP (note, 0)) != BRANCH_EDGE (bb)->probability
|
1824 |
|
|
&& profile_status != PROFILE_ABSENT)
|
1825 |
|
|
{
|
1826 |
|
|
error ("verify_flow_info: REG_BR_PROB does not match cfg %wi %i",
|
1827 |
|
|
INTVAL (XEXP (note, 0)), BRANCH_EDGE (bb)->probability);
|
1828 |
|
|
err = 1;
|
1829 |
|
|
}
|
1830 |
|
|
}
|
1831 |
|
|
FOR_EACH_EDGE (e, ei, bb->succs)
|
1832 |
|
|
{
|
1833 |
|
|
if (e->flags & EDGE_FALLTHRU)
|
1834 |
|
|
{
|
1835 |
|
|
n_fallthru++, fallthru = e;
|
1836 |
|
|
if ((e->flags & EDGE_CROSSING)
|
1837 |
|
|
|| (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
|
1838 |
|
|
&& e->src != ENTRY_BLOCK_PTR
|
1839 |
|
|
&& e->dest != EXIT_BLOCK_PTR))
|
1840 |
|
|
{
|
1841 |
|
|
error ("fallthru edge crosses section boundary (bb %i)",
|
1842 |
|
|
e->src->index);
|
1843 |
|
|
err = 1;
|
1844 |
|
|
}
|
1845 |
|
|
}
|
1846 |
|
|
|
1847 |
|
|
if ((e->flags & ~(EDGE_DFS_BACK
|
1848 |
|
|
| EDGE_CAN_FALLTHRU
|
1849 |
|
|
| EDGE_IRREDUCIBLE_LOOP
|
1850 |
|
|
| EDGE_LOOP_EXIT
|
1851 |
|
|
| EDGE_CROSSING)) == 0)
|
1852 |
|
|
n_branch++;
|
1853 |
|
|
|
1854 |
|
|
if (e->flags & EDGE_ABNORMAL_CALL)
|
1855 |
|
|
n_call++;
|
1856 |
|
|
|
1857 |
|
|
if (e->flags & EDGE_EH)
|
1858 |
|
|
n_eh++;
|
1859 |
|
|
else if (e->flags & EDGE_ABNORMAL)
|
1860 |
|
|
n_abnormal++;
|
1861 |
|
|
}
|
1862 |
|
|
|
1863 |
|
|
if (n_eh && GET_CODE (PATTERN (BB_END (bb))) != RESX
|
1864 |
|
|
&& !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
|
1865 |
|
|
{
|
1866 |
|
|
error ("missing REG_EH_REGION note in the end of bb %i", bb->index);
|
1867 |
|
|
err = 1;
|
1868 |
|
|
}
|
1869 |
|
|
if (n_branch
|
1870 |
|
|
&& (!JUMP_P (BB_END (bb))
|
1871 |
|
|
|| (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
|
1872 |
|
|
|| any_condjump_p (BB_END (bb))))))
|
1873 |
|
|
{
|
1874 |
|
|
error ("too many outgoing branch edges from bb %i", bb->index);
|
1875 |
|
|
err = 1;
|
1876 |
|
|
}
|
1877 |
|
|
if (n_fallthru && any_uncondjump_p (BB_END (bb)))
|
1878 |
|
|
{
|
1879 |
|
|
error ("fallthru edge after unconditional jump %i", bb->index);
|
1880 |
|
|
err = 1;
|
1881 |
|
|
}
|
1882 |
|
|
if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
|
1883 |
|
|
{
|
1884 |
|
|
error ("wrong amount of branch edges after unconditional jump %i", bb->index);
|
1885 |
|
|
err = 1;
|
1886 |
|
|
}
|
1887 |
|
|
if (n_branch != 1 && any_condjump_p (BB_END (bb))
|
1888 |
|
|
&& JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
|
1889 |
|
|
{
|
1890 |
|
|
error ("wrong amount of branch edges after conditional jump %i",
|
1891 |
|
|
bb->index);
|
1892 |
|
|
err = 1;
|
1893 |
|
|
}
|
1894 |
|
|
if (n_call && !CALL_P (BB_END (bb)))
|
1895 |
|
|
{
|
1896 |
|
|
error ("call edges for non-call insn in bb %i", bb->index);
|
1897 |
|
|
err = 1;
|
1898 |
|
|
}
|
1899 |
|
|
if (n_abnormal
|
1900 |
|
|
&& (!CALL_P (BB_END (bb)) && n_call != n_abnormal)
|
1901 |
|
|
&& (!JUMP_P (BB_END (bb))
|
1902 |
|
|
|| any_condjump_p (BB_END (bb))
|
1903 |
|
|
|| any_uncondjump_p (BB_END (bb))))
|
1904 |
|
|
{
|
1905 |
|
|
error ("abnormal edges for no purpose in bb %i", bb->index);
|
1906 |
|
|
err = 1;
|
1907 |
|
|
}
|
1908 |
|
|
|
1909 |
|
|
for (x = BB_HEAD (bb); x != NEXT_INSN (BB_END (bb)); x = NEXT_INSN (x))
|
1910 |
|
|
/* We may have a barrier inside a basic block before dead code
|
1911 |
|
|
elimination. There is no BLOCK_FOR_INSN field in a barrier. */
|
1912 |
|
|
if (!BARRIER_P (x) && BLOCK_FOR_INSN (x) != bb)
|
1913 |
|
|
{
|
1914 |
|
|
debug_rtx (x);
|
1915 |
|
|
if (! BLOCK_FOR_INSN (x))
|
1916 |
|
|
error
|
1917 |
|
|
("insn %d inside basic block %d but block_for_insn is NULL",
|
1918 |
|
|
INSN_UID (x), bb->index);
|
1919 |
|
|
else
|
1920 |
|
|
error
|
1921 |
|
|
("insn %d inside basic block %d but block_for_insn is %i",
|
1922 |
|
|
INSN_UID (x), bb->index, BLOCK_FOR_INSN (x)->index);
|
1923 |
|
|
|
1924 |
|
|
err = 1;
|
1925 |
|
|
}
|
1926 |
|
|
|
1927 |
|
|
/* OK pointers are correct. Now check the header of basic
|
1928 |
|
|
block. It ought to contain optional CODE_LABEL followed
|
1929 |
|
|
by NOTE_BASIC_BLOCK. */
|
1930 |
|
|
x = BB_HEAD (bb);
|
1931 |
|
|
if (LABEL_P (x))
|
1932 |
|
|
{
|
1933 |
|
|
if (BB_END (bb) == x)
|
1934 |
|
|
{
|
1935 |
|
|
error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
|
1936 |
|
|
bb->index);
|
1937 |
|
|
err = 1;
|
1938 |
|
|
}
|
1939 |
|
|
|
1940 |
|
|
x = NEXT_INSN (x);
|
1941 |
|
|
}
|
1942 |
|
|
|
1943 |
|
|
if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
|
1944 |
|
|
{
|
1945 |
|
|
error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
|
1946 |
|
|
bb->index);
|
1947 |
|
|
err = 1;
|
1948 |
|
|
}
|
1949 |
|
|
|
1950 |
|
|
if (BB_END (bb) == x)
|
1951 |
|
|
/* Do checks for empty blocks here. */
|
1952 |
|
|
;
|
1953 |
|
|
else
|
1954 |
|
|
for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
|
1955 |
|
|
{
|
1956 |
|
|
if (NOTE_INSN_BASIC_BLOCK_P (x))
|
1957 |
|
|
{
|
1958 |
|
|
error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
|
1959 |
|
|
INSN_UID (x), bb->index);
|
1960 |
|
|
err = 1;
|
1961 |
|
|
}
|
1962 |
|
|
|
1963 |
|
|
if (x == BB_END (bb))
|
1964 |
|
|
break;
|
1965 |
|
|
|
1966 |
|
|
if (control_flow_insn_p (x))
|
1967 |
|
|
{
|
1968 |
|
|
error ("in basic block %d:", bb->index);
|
1969 |
|
|
fatal_insn ("flow control insn inside a basic block", x);
|
1970 |
|
|
}
|
1971 |
|
|
}
|
1972 |
|
|
}
|
1973 |
|
|
|
1974 |
|
|
/* Clean up. */
|
1975 |
|
|
free (bb_info);
|
1976 |
|
|
return err;
|
1977 |
|
|
}
|
1978 |
|
|
|
1979 |
|
|
/* Verify the CFG and RTL consistency common for both underlying RTL and
|
1980 |
|
|
cfglayout RTL.
|
1981 |
|
|
|
1982 |
|
|
Currently it does following checks:
|
1983 |
|
|
- all checks of rtl_verify_flow_info_1
|
1984 |
|
|
- check that all insns are in the basic blocks
|
1985 |
|
|
(except the switch handling code, barriers and notes)
|
1986 |
|
|
- check that all returns are followed by barriers
|
1987 |
|
|
- check that all fallthru edge points to the adjacent blocks. */
|
1988 |
|
|
static int
|
1989 |
|
|
rtl_verify_flow_info (void)
|
1990 |
|
|
{
|
1991 |
|
|
basic_block bb;
|
1992 |
|
|
int err = rtl_verify_flow_info_1 ();
|
1993 |
|
|
rtx x;
|
1994 |
|
|
int num_bb_notes;
|
1995 |
|
|
const rtx rtx_first = get_insns ();
|
1996 |
|
|
basic_block last_bb_seen = ENTRY_BLOCK_PTR, curr_bb = NULL;
|
1997 |
|
|
|
1998 |
|
|
FOR_EACH_BB_REVERSE (bb)
|
1999 |
|
|
{
|
2000 |
|
|
edge e;
|
2001 |
|
|
edge_iterator ei;
|
2002 |
|
|
|
2003 |
|
|
if (bb->predictions)
|
2004 |
|
|
{
|
2005 |
|
|
error ("bb prediction set for block %i, but it is not used in RTL land", bb->index);
|
2006 |
|
|
err = 1;
|
2007 |
|
|
}
|
2008 |
|
|
|
2009 |
|
|
FOR_EACH_EDGE (e, ei, bb->succs)
|
2010 |
|
|
if (e->flags & EDGE_FALLTHRU)
|
2011 |
|
|
break;
|
2012 |
|
|
if (!e)
|
2013 |
|
|
{
|
2014 |
|
|
rtx insn;
|
2015 |
|
|
|
2016 |
|
|
/* Ensure existence of barrier in BB with no fallthru edges. */
|
2017 |
|
|
for (insn = BB_END (bb); !insn || !BARRIER_P (insn);
|
2018 |
|
|
insn = NEXT_INSN (insn))
|
2019 |
|
|
if (!insn
|
2020 |
|
|
|| (NOTE_P (insn)
|
2021 |
|
|
&& NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK))
|
2022 |
|
|
{
|
2023 |
|
|
error ("missing barrier after block %i", bb->index);
|
2024 |
|
|
err = 1;
|
2025 |
|
|
break;
|
2026 |
|
|
}
|
2027 |
|
|
}
|
2028 |
|
|
else if (e->src != ENTRY_BLOCK_PTR
|
2029 |
|
|
&& e->dest != EXIT_BLOCK_PTR)
|
2030 |
|
|
{
|
2031 |
|
|
rtx insn;
|
2032 |
|
|
|
2033 |
|
|
if (e->src->next_bb != e->dest)
|
2034 |
|
|
{
|
2035 |
|
|
error
|
2036 |
|
|
("verify_flow_info: Incorrect blocks for fallthru %i->%i",
|
2037 |
|
|
e->src->index, e->dest->index);
|
2038 |
|
|
err = 1;
|
2039 |
|
|
}
|
2040 |
|
|
else
|
2041 |
|
|
for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
|
2042 |
|
|
insn = NEXT_INSN (insn))
|
2043 |
|
|
if (BARRIER_P (insn) || INSN_P (insn))
|
2044 |
|
|
{
|
2045 |
|
|
error ("verify_flow_info: Incorrect fallthru %i->%i",
|
2046 |
|
|
e->src->index, e->dest->index);
|
2047 |
|
|
fatal_insn ("wrong insn in the fallthru edge", insn);
|
2048 |
|
|
err = 1;
|
2049 |
|
|
}
|
2050 |
|
|
}
|
2051 |
|
|
}
|
2052 |
|
|
|
2053 |
|
|
num_bb_notes = 0;
|
2054 |
|
|
last_bb_seen = ENTRY_BLOCK_PTR;
|
2055 |
|
|
|
2056 |
|
|
for (x = rtx_first; x; x = NEXT_INSN (x))
|
2057 |
|
|
{
|
2058 |
|
|
if (NOTE_INSN_BASIC_BLOCK_P (x))
|
2059 |
|
|
{
|
2060 |
|
|
bb = NOTE_BASIC_BLOCK (x);
|
2061 |
|
|
|
2062 |
|
|
num_bb_notes++;
|
2063 |
|
|
if (bb != last_bb_seen->next_bb)
|
2064 |
|
|
internal_error ("basic blocks not laid down consecutively");
|
2065 |
|
|
|
2066 |
|
|
curr_bb = last_bb_seen = bb;
|
2067 |
|
|
}
|
2068 |
|
|
|
2069 |
|
|
if (!curr_bb)
|
2070 |
|
|
{
|
2071 |
|
|
switch (GET_CODE (x))
|
2072 |
|
|
{
|
2073 |
|
|
case BARRIER:
|
2074 |
|
|
case NOTE:
|
2075 |
|
|
break;
|
2076 |
|
|
|
2077 |
|
|
case CODE_LABEL:
|
2078 |
|
|
/* An addr_vec is placed outside any basic block. */
|
2079 |
|
|
if (NEXT_INSN (x)
|
2080 |
|
|
&& JUMP_P (NEXT_INSN (x))
|
2081 |
|
|
&& (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
|
2082 |
|
|
|| GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
|
2083 |
|
|
x = NEXT_INSN (x);
|
2084 |
|
|
|
2085 |
|
|
/* But in any case, non-deletable labels can appear anywhere. */
|
2086 |
|
|
break;
|
2087 |
|
|
|
2088 |
|
|
default:
|
2089 |
|
|
fatal_insn ("insn outside basic block", x);
|
2090 |
|
|
}
|
2091 |
|
|
}
|
2092 |
|
|
|
2093 |
|
|
if (JUMP_P (x)
|
2094 |
|
|
&& returnjump_p (x) && ! condjump_p (x)
|
2095 |
|
|
&& ! (NEXT_INSN (x) && BARRIER_P (NEXT_INSN (x))))
|
2096 |
|
|
fatal_insn ("return not followed by barrier", x);
|
2097 |
|
|
if (curr_bb && x == BB_END (curr_bb))
|
2098 |
|
|
curr_bb = NULL;
|
2099 |
|
|
}
|
2100 |
|
|
|
2101 |
|
|
if (num_bb_notes != n_basic_blocks - NUM_FIXED_BLOCKS)
|
2102 |
|
|
internal_error
|
2103 |
|
|
("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
|
2104 |
|
|
num_bb_notes, n_basic_blocks);
|
2105 |
|
|
|
2106 |
|
|
return err;
|
2107 |
|
|
}
|
2108 |
|
|
|
2109 |
|
|
/* Assume that the preceding pass has possibly eliminated jump instructions
|
2110 |
|
|
or converted the unconditional jumps. Eliminate the edges from CFG.
|
2111 |
|
|
Return true if any edges are eliminated. */
|
2112 |
|
|
|
2113 |
|
|
bool
|
2114 |
|
|
purge_dead_edges (basic_block bb)
|
2115 |
|
|
{
|
2116 |
|
|
edge e;
|
2117 |
|
|
rtx insn = BB_END (bb), note;
|
2118 |
|
|
bool purged = false;
|
2119 |
|
|
bool found;
|
2120 |
|
|
edge_iterator ei;
|
2121 |
|
|
|
2122 |
|
|
/* If this instruction cannot trap, remove REG_EH_REGION notes. */
|
2123 |
|
|
if (NONJUMP_INSN_P (insn)
|
2124 |
|
|
&& (note = find_reg_note (insn, REG_EH_REGION, NULL)))
|
2125 |
|
|
{
|
2126 |
|
|
rtx eqnote;
|
2127 |
|
|
|
2128 |
|
|
if (! may_trap_p (PATTERN (insn))
|
2129 |
|
|
|| ((eqnote = find_reg_equal_equiv_note (insn))
|
2130 |
|
|
&& ! may_trap_p (XEXP (eqnote, 0))))
|
2131 |
|
|
remove_note (insn, note);
|
2132 |
|
|
}
|
2133 |
|
|
|
2134 |
|
|
/* Cleanup abnormal edges caused by exceptions or non-local gotos. */
|
2135 |
|
|
for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
|
2136 |
|
|
{
|
2137 |
|
|
/* There are three types of edges we need to handle correctly here: EH
|
2138 |
|
|
edges, abnormal call EH edges, and abnormal call non-EH edges. The
|
2139 |
|
|
latter can appear when nonlocal gotos are used. */
|
2140 |
|
|
if (e->flags & EDGE_EH)
|
2141 |
|
|
{
|
2142 |
|
|
if (can_throw_internal (BB_END (bb))
|
2143 |
|
|
/* If this is a call edge, verify that this is a call insn. */
|
2144 |
|
|
&& (! (e->flags & EDGE_ABNORMAL_CALL)
|
2145 |
|
|
|| CALL_P (BB_END (bb))))
|
2146 |
|
|
{
|
2147 |
|
|
ei_next (&ei);
|
2148 |
|
|
continue;
|
2149 |
|
|
}
|
2150 |
|
|
}
|
2151 |
|
|
else if (e->flags & EDGE_ABNORMAL_CALL)
|
2152 |
|
|
{
|
2153 |
|
|
if (CALL_P (BB_END (bb))
|
2154 |
|
|
&& (! (note = find_reg_note (insn, REG_EH_REGION, NULL))
|
2155 |
|
|
|| INTVAL (XEXP (note, 0)) >= 0))
|
2156 |
|
|
{
|
2157 |
|
|
ei_next (&ei);
|
2158 |
|
|
continue;
|
2159 |
|
|
}
|
2160 |
|
|
}
|
2161 |
|
|
else
|
2162 |
|
|
{
|
2163 |
|
|
ei_next (&ei);
|
2164 |
|
|
continue;
|
2165 |
|
|
}
|
2166 |
|
|
|
2167 |
|
|
remove_edge (e);
|
2168 |
|
|
bb->flags |= BB_DIRTY;
|
2169 |
|
|
purged = true;
|
2170 |
|
|
}
|
2171 |
|
|
|
2172 |
|
|
if (JUMP_P (insn))
|
2173 |
|
|
{
|
2174 |
|
|
rtx note;
|
2175 |
|
|
edge b,f;
|
2176 |
|
|
edge_iterator ei;
|
2177 |
|
|
|
2178 |
|
|
/* We do care only about conditional jumps and simplejumps. */
|
2179 |
|
|
if (!any_condjump_p (insn)
|
2180 |
|
|
&& !returnjump_p (insn)
|
2181 |
|
|
&& !simplejump_p (insn))
|
2182 |
|
|
return purged;
|
2183 |
|
|
|
2184 |
|
|
/* Branch probability/prediction notes are defined only for
|
2185 |
|
|
condjumps. We've possibly turned condjump into simplejump. */
|
2186 |
|
|
if (simplejump_p (insn))
|
2187 |
|
|
{
|
2188 |
|
|
note = find_reg_note (insn, REG_BR_PROB, NULL);
|
2189 |
|
|
if (note)
|
2190 |
|
|
remove_note (insn, note);
|
2191 |
|
|
while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
|
2192 |
|
|
remove_note (insn, note);
|
2193 |
|
|
}
|
2194 |
|
|
|
2195 |
|
|
for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
|
2196 |
|
|
{
|
2197 |
|
|
/* Avoid abnormal flags to leak from computed jumps turned
|
2198 |
|
|
into simplejumps. */
|
2199 |
|
|
|
2200 |
|
|
e->flags &= ~EDGE_ABNORMAL;
|
2201 |
|
|
|
2202 |
|
|
/* See if this edge is one we should keep. */
|
2203 |
|
|
if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
|
2204 |
|
|
/* A conditional jump can fall through into the next
|
2205 |
|
|
block, so we should keep the edge. */
|
2206 |
|
|
{
|
2207 |
|
|
ei_next (&ei);
|
2208 |
|
|
continue;
|
2209 |
|
|
}
|
2210 |
|
|
else if (e->dest != EXIT_BLOCK_PTR
|
2211 |
|
|
&& BB_HEAD (e->dest) == JUMP_LABEL (insn))
|
2212 |
|
|
/* If the destination block is the target of the jump,
|
2213 |
|
|
keep the edge. */
|
2214 |
|
|
{
|
2215 |
|
|
ei_next (&ei);
|
2216 |
|
|
continue;
|
2217 |
|
|
}
|
2218 |
|
|
else if (e->dest == EXIT_BLOCK_PTR && returnjump_p (insn))
|
2219 |
|
|
/* If the destination block is the exit block, and this
|
2220 |
|
|
instruction is a return, then keep the edge. */
|
2221 |
|
|
{
|
2222 |
|
|
ei_next (&ei);
|
2223 |
|
|
continue;
|
2224 |
|
|
}
|
2225 |
|
|
else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
|
2226 |
|
|
/* Keep the edges that correspond to exceptions thrown by
|
2227 |
|
|
this instruction and rematerialize the EDGE_ABNORMAL
|
2228 |
|
|
flag we just cleared above. */
|
2229 |
|
|
{
|
2230 |
|
|
e->flags |= EDGE_ABNORMAL;
|
2231 |
|
|
ei_next (&ei);
|
2232 |
|
|
continue;
|
2233 |
|
|
}
|
2234 |
|
|
|
2235 |
|
|
/* We do not need this edge. */
|
2236 |
|
|
bb->flags |= BB_DIRTY;
|
2237 |
|
|
purged = true;
|
2238 |
|
|
remove_edge (e);
|
2239 |
|
|
}
|
2240 |
|
|
|
2241 |
|
|
if (EDGE_COUNT (bb->succs) == 0 || !purged)
|
2242 |
|
|
return purged;
|
2243 |
|
|
|
2244 |
|
|
if (dump_file)
|
2245 |
|
|
fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
|
2246 |
|
|
|
2247 |
|
|
if (!optimize)
|
2248 |
|
|
return purged;
|
2249 |
|
|
|
2250 |
|
|
/* Redistribute probabilities. */
|
2251 |
|
|
if (single_succ_p (bb))
|
2252 |
|
|
{
|
2253 |
|
|
single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
|
2254 |
|
|
single_succ_edge (bb)->count = bb->count;
|
2255 |
|
|
}
|
2256 |
|
|
else
|
2257 |
|
|
{
|
2258 |
|
|
note = find_reg_note (insn, REG_BR_PROB, NULL);
|
2259 |
|
|
if (!note)
|
2260 |
|
|
return purged;
|
2261 |
|
|
|
2262 |
|
|
b = BRANCH_EDGE (bb);
|
2263 |
|
|
f = FALLTHRU_EDGE (bb);
|
2264 |
|
|
b->probability = INTVAL (XEXP (note, 0));
|
2265 |
|
|
f->probability = REG_BR_PROB_BASE - b->probability;
|
2266 |
|
|
b->count = bb->count * b->probability / REG_BR_PROB_BASE;
|
2267 |
|
|
f->count = bb->count * f->probability / REG_BR_PROB_BASE;
|
2268 |
|
|
}
|
2269 |
|
|
|
2270 |
|
|
return purged;
|
2271 |
|
|
}
|
2272 |
|
|
else if (CALL_P (insn) && SIBLING_CALL_P (insn))
|
2273 |
|
|
{
|
2274 |
|
|
/* First, there should not be any EH or ABCALL edges resulting
|
2275 |
|
|
from non-local gotos and the like. If there were, we shouldn't
|
2276 |
|
|
have created the sibcall in the first place. Second, there
|
2277 |
|
|
should of course never have been a fallthru edge. */
|
2278 |
|
|
gcc_assert (single_succ_p (bb));
|
2279 |
|
|
gcc_assert (single_succ_edge (bb)->flags
|
2280 |
|
|
== (EDGE_SIBCALL | EDGE_ABNORMAL));
|
2281 |
|
|
|
2282 |
|
|
return 0;
|
2283 |
|
|
}
|
2284 |
|
|
|
2285 |
|
|
/* If we don't see a jump insn, we don't know exactly why the block would
|
2286 |
|
|
have been broken at this point. Look for a simple, non-fallthru edge,
|
2287 |
|
|
as these are only created by conditional branches. If we find such an
|
2288 |
|
|
edge we know that there used to be a jump here and can then safely
|
2289 |
|
|
remove all non-fallthru edges. */
|
2290 |
|
|
found = false;
|
2291 |
|
|
FOR_EACH_EDGE (e, ei, bb->succs)
|
2292 |
|
|
if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
|
2293 |
|
|
{
|
2294 |
|
|
found = true;
|
2295 |
|
|
break;
|
2296 |
|
|
}
|
2297 |
|
|
|
2298 |
|
|
if (!found)
|
2299 |
|
|
return purged;
|
2300 |
|
|
|
2301 |
|
|
/* Remove all but the fake and fallthru edges. The fake edge may be
|
2302 |
|
|
the only successor for this block in the case of noreturn
|
2303 |
|
|
calls. */
|
2304 |
|
|
for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
|
2305 |
|
|
{
|
2306 |
|
|
if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
|
2307 |
|
|
{
|
2308 |
|
|
bb->flags |= BB_DIRTY;
|
2309 |
|
|
remove_edge (e);
|
2310 |
|
|
purged = true;
|
2311 |
|
|
}
|
2312 |
|
|
else
|
2313 |
|
|
ei_next (&ei);
|
2314 |
|
|
}
|
2315 |
|
|
|
2316 |
|
|
gcc_assert (single_succ_p (bb));
|
2317 |
|
|
|
2318 |
|
|
single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
|
2319 |
|
|
single_succ_edge (bb)->count = bb->count;
|
2320 |
|
|
|
2321 |
|
|
if (dump_file)
|
2322 |
|
|
fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
|
2323 |
|
|
bb->index);
|
2324 |
|
|
return purged;
|
2325 |
|
|
}
|
2326 |
|
|
|
2327 |
|
|
/* Search all basic blocks for potentially dead edges and purge them. Return
|
2328 |
|
|
true if some edge has been eliminated. */
|
2329 |
|
|
|
2330 |
|
|
bool
|
2331 |
|
|
purge_all_dead_edges (void)
|
2332 |
|
|
{
|
2333 |
|
|
int purged = false;
|
2334 |
|
|
basic_block bb;
|
2335 |
|
|
|
2336 |
|
|
FOR_EACH_BB (bb)
|
2337 |
|
|
{
|
2338 |
|
|
bool purged_here = purge_dead_edges (bb);
|
2339 |
|
|
|
2340 |
|
|
purged |= purged_here;
|
2341 |
|
|
}
|
2342 |
|
|
|
2343 |
|
|
return purged;
|
2344 |
|
|
}
|
2345 |
|
|
|
2346 |
|
|
/* Same as split_block but update cfg_layout structures. */
|
2347 |
|
|
|
2348 |
|
|
static basic_block
|
2349 |
|
|
cfg_layout_split_block (basic_block bb, void *insnp)
|
2350 |
|
|
{
|
2351 |
|
|
rtx insn = insnp;
|
2352 |
|
|
basic_block new_bb = rtl_split_block (bb, insn);
|
2353 |
|
|
|
2354 |
|
|
new_bb->il.rtl->footer = bb->il.rtl->footer;
|
2355 |
|
|
bb->il.rtl->footer = NULL;
|
2356 |
|
|
|
2357 |
|
|
return new_bb;
|
2358 |
|
|
}
|
2359 |
|
|
|
2360 |
|
|
|
2361 |
|
|
/* Redirect Edge to DEST. */
|
2362 |
|
|
static edge
|
2363 |
|
|
cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
|
2364 |
|
|
{
|
2365 |
|
|
basic_block src = e->src;
|
2366 |
|
|
edge ret;
|
2367 |
|
|
|
2368 |
|
|
if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
|
2369 |
|
|
return NULL;
|
2370 |
|
|
|
2371 |
|
|
if (e->dest == dest)
|
2372 |
|
|
return e;
|
2373 |
|
|
|
2374 |
|
|
if (e->src != ENTRY_BLOCK_PTR
|
2375 |
|
|
&& (ret = try_redirect_by_replacing_jump (e, dest, true)))
|
2376 |
|
|
{
|
2377 |
|
|
src->flags |= BB_DIRTY;
|
2378 |
|
|
return ret;
|
2379 |
|
|
}
|
2380 |
|
|
|
2381 |
|
|
if (e->src == ENTRY_BLOCK_PTR
|
2382 |
|
|
&& (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
|
2383 |
|
|
{
|
2384 |
|
|
if (dump_file)
|
2385 |
|
|
fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
|
2386 |
|
|
e->src->index, dest->index);
|
2387 |
|
|
|
2388 |
|
|
e->src->flags |= BB_DIRTY;
|
2389 |
|
|
redirect_edge_succ (e, dest);
|
2390 |
|
|
return e;
|
2391 |
|
|
}
|
2392 |
|
|
|
2393 |
|
|
/* Redirect_edge_and_branch may decide to turn branch into fallthru edge
|
2394 |
|
|
in the case the basic block appears to be in sequence. Avoid this
|
2395 |
|
|
transformation. */
|
2396 |
|
|
|
2397 |
|
|
if (e->flags & EDGE_FALLTHRU)
|
2398 |
|
|
{
|
2399 |
|
|
/* Redirect any branch edges unified with the fallthru one. */
|
2400 |
|
|
if (JUMP_P (BB_END (src))
|
2401 |
|
|
&& label_is_jump_target_p (BB_HEAD (e->dest),
|
2402 |
|
|
BB_END (src)))
|
2403 |
|
|
{
|
2404 |
|
|
edge redirected;
|
2405 |
|
|
|
2406 |
|
|
if (dump_file)
|
2407 |
|
|
fprintf (dump_file, "Fallthru edge unified with branch "
|
2408 |
|
|
"%i->%i redirected to %i\n",
|
2409 |
|
|
e->src->index, e->dest->index, dest->index);
|
2410 |
|
|
e->flags &= ~EDGE_FALLTHRU;
|
2411 |
|
|
redirected = redirect_branch_edge (e, dest);
|
2412 |
|
|
gcc_assert (redirected);
|
2413 |
|
|
e->flags |= EDGE_FALLTHRU;
|
2414 |
|
|
e->src->flags |= BB_DIRTY;
|
2415 |
|
|
return e;
|
2416 |
|
|
}
|
2417 |
|
|
/* In case we are redirecting fallthru edge to the branch edge
|
2418 |
|
|
of conditional jump, remove it. */
|
2419 |
|
|
if (EDGE_COUNT (src->succs) == 2)
|
2420 |
|
|
{
|
2421 |
|
|
/* Find the edge that is different from E. */
|
2422 |
|
|
edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
|
2423 |
|
|
|
2424 |
|
|
if (s->dest == dest
|
2425 |
|
|
&& any_condjump_p (BB_END (src))
|
2426 |
|
|
&& onlyjump_p (BB_END (src)))
|
2427 |
|
|
delete_insn (BB_END (src));
|
2428 |
|
|
}
|
2429 |
|
|
ret = redirect_edge_succ_nodup (e, dest);
|
2430 |
|
|
if (dump_file)
|
2431 |
|
|
fprintf (dump_file, "Fallthru edge %i->%i redirected to %i\n",
|
2432 |
|
|
e->src->index, e->dest->index, dest->index);
|
2433 |
|
|
}
|
2434 |
|
|
else
|
2435 |
|
|
ret = redirect_branch_edge (e, dest);
|
2436 |
|
|
|
2437 |
|
|
/* We don't want simplejumps in the insn stream during cfglayout. */
|
2438 |
|
|
gcc_assert (!simplejump_p (BB_END (src)));
|
2439 |
|
|
|
2440 |
|
|
src->flags |= BB_DIRTY;
|
2441 |
|
|
return ret;
|
2442 |
|
|
}
|
2443 |
|
|
|
2444 |
|
|
/* Simple wrapper as we always can redirect fallthru edges. */
|
2445 |
|
|
static basic_block
|
2446 |
|
|
cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
|
2447 |
|
|
{
|
2448 |
|
|
edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
|
2449 |
|
|
|
2450 |
|
|
gcc_assert (redirected);
|
2451 |
|
|
return NULL;
|
2452 |
|
|
}
|
2453 |
|
|
|
2454 |
|
|
/* Same as delete_basic_block but update cfg_layout structures. */
|
2455 |
|
|
|
2456 |
|
|
static void
|
2457 |
|
|
cfg_layout_delete_block (basic_block bb)
|
2458 |
|
|
{
|
2459 |
|
|
rtx insn, next, prev = PREV_INSN (BB_HEAD (bb)), *to, remaints;
|
2460 |
|
|
|
2461 |
|
|
if (bb->il.rtl->header)
|
2462 |
|
|
{
|
2463 |
|
|
next = BB_HEAD (bb);
|
2464 |
|
|
if (prev)
|
2465 |
|
|
NEXT_INSN (prev) = bb->il.rtl->header;
|
2466 |
|
|
else
|
2467 |
|
|
set_first_insn (bb->il.rtl->header);
|
2468 |
|
|
PREV_INSN (bb->il.rtl->header) = prev;
|
2469 |
|
|
insn = bb->il.rtl->header;
|
2470 |
|
|
while (NEXT_INSN (insn))
|
2471 |
|
|
insn = NEXT_INSN (insn);
|
2472 |
|
|
NEXT_INSN (insn) = next;
|
2473 |
|
|
PREV_INSN (next) = insn;
|
2474 |
|
|
}
|
2475 |
|
|
next = NEXT_INSN (BB_END (bb));
|
2476 |
|
|
if (bb->il.rtl->footer)
|
2477 |
|
|
{
|
2478 |
|
|
insn = bb->il.rtl->footer;
|
2479 |
|
|
while (insn)
|
2480 |
|
|
{
|
2481 |
|
|
if (BARRIER_P (insn))
|
2482 |
|
|
{
|
2483 |
|
|
if (PREV_INSN (insn))
|
2484 |
|
|
NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
|
2485 |
|
|
else
|
2486 |
|
|
bb->il.rtl->footer = NEXT_INSN (insn);
|
2487 |
|
|
if (NEXT_INSN (insn))
|
2488 |
|
|
PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
|
2489 |
|
|
}
|
2490 |
|
|
if (LABEL_P (insn))
|
2491 |
|
|
break;
|
2492 |
|
|
insn = NEXT_INSN (insn);
|
2493 |
|
|
}
|
2494 |
|
|
if (bb->il.rtl->footer)
|
2495 |
|
|
{
|
2496 |
|
|
insn = BB_END (bb);
|
2497 |
|
|
NEXT_INSN (insn) = bb->il.rtl->footer;
|
2498 |
|
|
PREV_INSN (bb->il.rtl->footer) = insn;
|
2499 |
|
|
while (NEXT_INSN (insn))
|
2500 |
|
|
insn = NEXT_INSN (insn);
|
2501 |
|
|
NEXT_INSN (insn) = next;
|
2502 |
|
|
if (next)
|
2503 |
|
|
PREV_INSN (next) = insn;
|
2504 |
|
|
else
|
2505 |
|
|
set_last_insn (insn);
|
2506 |
|
|
}
|
2507 |
|
|
}
|
2508 |
|
|
if (bb->next_bb != EXIT_BLOCK_PTR)
|
2509 |
|
|
to = &bb->next_bb->il.rtl->header;
|
2510 |
|
|
else
|
2511 |
|
|
to = &cfg_layout_function_footer;
|
2512 |
|
|
|
2513 |
|
|
rtl_delete_block (bb);
|
2514 |
|
|
|
2515 |
|
|
if (prev)
|
2516 |
|
|
prev = NEXT_INSN (prev);
|
2517 |
|
|
else
|
2518 |
|
|
prev = get_insns ();
|
2519 |
|
|
if (next)
|
2520 |
|
|
next = PREV_INSN (next);
|
2521 |
|
|
else
|
2522 |
|
|
next = get_last_insn ();
|
2523 |
|
|
|
2524 |
|
|
if (next && NEXT_INSN (next) != prev)
|
2525 |
|
|
{
|
2526 |
|
|
remaints = unlink_insn_chain (prev, next);
|
2527 |
|
|
insn = remaints;
|
2528 |
|
|
while (NEXT_INSN (insn))
|
2529 |
|
|
insn = NEXT_INSN (insn);
|
2530 |
|
|
NEXT_INSN (insn) = *to;
|
2531 |
|
|
if (*to)
|
2532 |
|
|
PREV_INSN (*to) = insn;
|
2533 |
|
|
*to = remaints;
|
2534 |
|
|
}
|
2535 |
|
|
}
|
2536 |
|
|
|
2537 |
|
|
/* Return true when blocks A and B can be safely merged. */
|
2538 |
|
|
static bool
|
2539 |
|
|
cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
|
2540 |
|
|
{
|
2541 |
|
|
/* If we are partitioning hot/cold basic blocks, we don't want to
|
2542 |
|
|
mess up unconditional or indirect jumps that cross between hot
|
2543 |
|
|
and cold sections.
|
2544 |
|
|
|
2545 |
|
|
Basic block partitioning may result in some jumps that appear to
|
2546 |
|
|
be optimizable (or blocks that appear to be mergeable), but which really
|
2547 |
|
|
must be left untouched (they are required to make it safely across
|
2548 |
|
|
partition boundaries). See the comments at the top of
|
2549 |
|
|
bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
|
2550 |
|
|
|
2551 |
|
|
if (BB_PARTITION (a) != BB_PARTITION (b))
|
2552 |
|
|
return false;
|
2553 |
|
|
|
2554 |
|
|
/* There must be exactly one edge in between the blocks. */
|
2555 |
|
|
return (single_succ_p (a)
|
2556 |
|
|
&& single_succ (a) == b
|
2557 |
|
|
&& single_pred_p (b) == 1
|
2558 |
|
|
&& a != b
|
2559 |
|
|
/* Must be simple edge. */
|
2560 |
|
|
&& !(single_succ_edge (a)->flags & EDGE_COMPLEX)
|
2561 |
|
|
&& a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
|
2562 |
|
|
/* If the jump insn has side effects,
|
2563 |
|
|
we can't kill the edge. */
|
2564 |
|
|
&& (!JUMP_P (BB_END (a))
|
2565 |
|
|
|| (reload_completed
|
2566 |
|
|
? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
|
2567 |
|
|
}
|
2568 |
|
|
|
2569 |
|
|
/* Merge block A and B. The blocks must be mergeable. */
|
2570 |
|
|
|
2571 |
|
|
static void
|
2572 |
|
|
cfg_layout_merge_blocks (basic_block a, basic_block b)
|
2573 |
|
|
{
|
2574 |
|
|
#ifdef ENABLE_CHECKING
|
2575 |
|
|
gcc_assert (cfg_layout_can_merge_blocks_p (a, b));
|
2576 |
|
|
#endif
|
2577 |
|
|
|
2578 |
|
|
/* If there was a CODE_LABEL beginning B, delete it. */
|
2579 |
|
|
if (LABEL_P (BB_HEAD (b)))
|
2580 |
|
|
{
|
2581 |
|
|
/* This might have been an EH label that no longer has incoming
|
2582 |
|
|
EH edges. Update data structures to match. */
|
2583 |
|
|
maybe_remove_eh_handler (BB_HEAD (b));
|
2584 |
|
|
|
2585 |
|
|
delete_insn (BB_HEAD (b));
|
2586 |
|
|
}
|
2587 |
|
|
|
2588 |
|
|
/* We should have fallthru edge in a, or we can do dummy redirection to get
|
2589 |
|
|
it cleaned up. */
|
2590 |
|
|
if (JUMP_P (BB_END (a)))
|
2591 |
|
|
try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
|
2592 |
|
|
gcc_assert (!JUMP_P (BB_END (a)));
|
2593 |
|
|
|
2594 |
|
|
/* Possible line number notes should appear in between. */
|
2595 |
|
|
if (b->il.rtl->header)
|
2596 |
|
|
{
|
2597 |
|
|
rtx first = BB_END (a), last;
|
2598 |
|
|
|
2599 |
|
|
last = emit_insn_after_noloc (b->il.rtl->header, BB_END (a));
|
2600 |
|
|
delete_insn_chain (NEXT_INSN (first), last);
|
2601 |
|
|
b->il.rtl->header = NULL;
|
2602 |
|
|
}
|
2603 |
|
|
|
2604 |
|
|
/* In the case basic blocks are not adjacent, move them around. */
|
2605 |
|
|
if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
|
2606 |
|
|
{
|
2607 |
|
|
rtx first = unlink_insn_chain (BB_HEAD (b), BB_END (b));
|
2608 |
|
|
|
2609 |
|
|
emit_insn_after_noloc (first, BB_END (a));
|
2610 |
|
|
/* Skip possible DELETED_LABEL insn. */
|
2611 |
|
|
if (!NOTE_INSN_BASIC_BLOCK_P (first))
|
2612 |
|
|
first = NEXT_INSN (first);
|
2613 |
|
|
gcc_assert (NOTE_INSN_BASIC_BLOCK_P (first));
|
2614 |
|
|
BB_HEAD (b) = NULL;
|
2615 |
|
|
delete_insn (first);
|
2616 |
|
|
}
|
2617 |
|
|
/* Otherwise just re-associate the instructions. */
|
2618 |
|
|
else
|
2619 |
|
|
{
|
2620 |
|
|
rtx insn;
|
2621 |
|
|
|
2622 |
|
|
for (insn = BB_HEAD (b);
|
2623 |
|
|
insn != NEXT_INSN (BB_END (b));
|
2624 |
|
|
insn = NEXT_INSN (insn))
|
2625 |
|
|
set_block_for_insn (insn, a);
|
2626 |
|
|
insn = BB_HEAD (b);
|
2627 |
|
|
/* Skip possible DELETED_LABEL insn. */
|
2628 |
|
|
if (!NOTE_INSN_BASIC_BLOCK_P (insn))
|
2629 |
|
|
insn = NEXT_INSN (insn);
|
2630 |
|
|
gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
|
2631 |
|
|
BB_HEAD (b) = NULL;
|
2632 |
|
|
BB_END (a) = BB_END (b);
|
2633 |
|
|
delete_insn (insn);
|
2634 |
|
|
}
|
2635 |
|
|
|
2636 |
|
|
/* Possible tablejumps and barriers should appear after the block. */
|
2637 |
|
|
if (b->il.rtl->footer)
|
2638 |
|
|
{
|
2639 |
|
|
if (!a->il.rtl->footer)
|
2640 |
|
|
a->il.rtl->footer = b->il.rtl->footer;
|
2641 |
|
|
else
|
2642 |
|
|
{
|
2643 |
|
|
rtx last = a->il.rtl->footer;
|
2644 |
|
|
|
2645 |
|
|
while (NEXT_INSN (last))
|
2646 |
|
|
last = NEXT_INSN (last);
|
2647 |
|
|
NEXT_INSN (last) = b->il.rtl->footer;
|
2648 |
|
|
PREV_INSN (b->il.rtl->footer) = last;
|
2649 |
|
|
}
|
2650 |
|
|
b->il.rtl->footer = NULL;
|
2651 |
|
|
}
|
2652 |
|
|
a->il.rtl->global_live_at_end = b->il.rtl->global_live_at_end;
|
2653 |
|
|
|
2654 |
|
|
if (dump_file)
|
2655 |
|
|
fprintf (dump_file, "Merged blocks %d and %d.\n",
|
2656 |
|
|
a->index, b->index);
|
2657 |
|
|
}
|
2658 |
|
|
|
2659 |
|
|
/* Split edge E. */
|
2660 |
|
|
|
2661 |
|
|
static basic_block
|
2662 |
|
|
cfg_layout_split_edge (edge e)
|
2663 |
|
|
{
|
2664 |
|
|
basic_block new_bb =
|
2665 |
|
|
create_basic_block (e->src != ENTRY_BLOCK_PTR
|
2666 |
|
|
? NEXT_INSN (BB_END (e->src)) : get_insns (),
|
2667 |
|
|
NULL_RTX, e->src);
|
2668 |
|
|
|
2669 |
|
|
/* ??? This info is likely going to be out of date very soon, but we must
|
2670 |
|
|
create it to avoid getting an ICE later. */
|
2671 |
|
|
if (e->dest->il.rtl->global_live_at_start)
|
2672 |
|
|
{
|
2673 |
|
|
new_bb->il.rtl->global_live_at_start = ALLOC_REG_SET (®_obstack);
|
2674 |
|
|
new_bb->il.rtl->global_live_at_end = ALLOC_REG_SET (®_obstack);
|
2675 |
|
|
COPY_REG_SET (new_bb->il.rtl->global_live_at_start,
|
2676 |
|
|
e->dest->il.rtl->global_live_at_start);
|
2677 |
|
|
COPY_REG_SET (new_bb->il.rtl->global_live_at_end,
|
2678 |
|
|
e->dest->il.rtl->global_live_at_start);
|
2679 |
|
|
}
|
2680 |
|
|
|
2681 |
|
|
make_edge (new_bb, e->dest, EDGE_FALLTHRU);
|
2682 |
|
|
redirect_edge_and_branch_force (e, new_bb);
|
2683 |
|
|
|
2684 |
|
|
return new_bb;
|
2685 |
|
|
}
|
2686 |
|
|
|
2687 |
|
|
/* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */
|
2688 |
|
|
|
2689 |
|
|
static void
|
2690 |
|
|
rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
|
2691 |
|
|
{
|
2692 |
|
|
}
|
2693 |
|
|
|
2694 |
|
|
/* Return 1 if BB ends with a call, possibly followed by some
|
2695 |
|
|
instructions that must stay with the call, 0 otherwise. */
|
2696 |
|
|
|
2697 |
|
|
static bool
|
2698 |
|
|
rtl_block_ends_with_call_p (basic_block bb)
|
2699 |
|
|
{
|
2700 |
|
|
rtx insn = BB_END (bb);
|
2701 |
|
|
|
2702 |
|
|
while (!CALL_P (insn)
|
2703 |
|
|
&& insn != BB_HEAD (bb)
|
2704 |
|
|
&& keep_with_call_p (insn))
|
2705 |
|
|
insn = PREV_INSN (insn);
|
2706 |
|
|
return (CALL_P (insn));
|
2707 |
|
|
}
|
2708 |
|
|
|
2709 |
|
|
/* Return 1 if BB ends with a conditional branch, 0 otherwise. */
|
2710 |
|
|
|
2711 |
|
|
static bool
|
2712 |
|
|
rtl_block_ends_with_condjump_p (basic_block bb)
|
2713 |
|
|
{
|
2714 |
|
|
return any_condjump_p (BB_END (bb));
|
2715 |
|
|
}
|
2716 |
|
|
|
2717 |
|
|
/* Return true if we need to add fake edge to exit.
|
2718 |
|
|
Helper function for rtl_flow_call_edges_add. */
|
2719 |
|
|
|
2720 |
|
|
static bool
|
2721 |
|
|
need_fake_edge_p (rtx insn)
|
2722 |
|
|
{
|
2723 |
|
|
if (!INSN_P (insn))
|
2724 |
|
|
return false;
|
2725 |
|
|
|
2726 |
|
|
if ((CALL_P (insn)
|
2727 |
|
|
&& !SIBLING_CALL_P (insn)
|
2728 |
|
|
&& !find_reg_note (insn, REG_NORETURN, NULL)
|
2729 |
|
|
&& !CONST_OR_PURE_CALL_P (insn)))
|
2730 |
|
|
return true;
|
2731 |
|
|
|
2732 |
|
|
return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
|
2733 |
|
|
&& MEM_VOLATILE_P (PATTERN (insn)))
|
2734 |
|
|
|| (GET_CODE (PATTERN (insn)) == PARALLEL
|
2735 |
|
|
&& asm_noperands (insn) != -1
|
2736 |
|
|
&& MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
|
2737 |
|
|
|| GET_CODE (PATTERN (insn)) == ASM_INPUT);
|
2738 |
|
|
}
|
2739 |
|
|
|
2740 |
|
|
/* Add fake edges to the function exit for any non constant and non noreturn
|
2741 |
|
|
calls, volatile inline assembly in the bitmap of blocks specified by
|
2742 |
|
|
BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
|
2743 |
|
|
that were split.
|
2744 |
|
|
|
2745 |
|
|
The goal is to expose cases in which entering a basic block does not imply
|
2746 |
|
|
that all subsequent instructions must be executed. */
|
2747 |
|
|
|
2748 |
|
|
static int
|
2749 |
|
|
rtl_flow_call_edges_add (sbitmap blocks)
|
2750 |
|
|
{
|
2751 |
|
|
int i;
|
2752 |
|
|
int blocks_split = 0;
|
2753 |
|
|
int last_bb = last_basic_block;
|
2754 |
|
|
bool check_last_block = false;
|
2755 |
|
|
|
2756 |
|
|
if (n_basic_blocks == NUM_FIXED_BLOCKS)
|
2757 |
|
|
return 0;
|
2758 |
|
|
|
2759 |
|
|
if (! blocks)
|
2760 |
|
|
check_last_block = true;
|
2761 |
|
|
else
|
2762 |
|
|
check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
|
2763 |
|
|
|
2764 |
|
|
/* In the last basic block, before epilogue generation, there will be
|
2765 |
|
|
a fallthru edge to EXIT. Special care is required if the last insn
|
2766 |
|
|
of the last basic block is a call because make_edge folds duplicate
|
2767 |
|
|
edges, which would result in the fallthru edge also being marked
|
2768 |
|
|
fake, which would result in the fallthru edge being removed by
|
2769 |
|
|
remove_fake_edges, which would result in an invalid CFG.
|
2770 |
|
|
|
2771 |
|
|
Moreover, we can't elide the outgoing fake edge, since the block
|
2772 |
|
|
profiler needs to take this into account in order to solve the minimal
|
2773 |
|
|
spanning tree in the case that the call doesn't return.
|
2774 |
|
|
|
2775 |
|
|
Handle this by adding a dummy instruction in a new last basic block. */
|
2776 |
|
|
if (check_last_block)
|
2777 |
|
|
{
|
2778 |
|
|
basic_block bb = EXIT_BLOCK_PTR->prev_bb;
|
2779 |
|
|
rtx insn = BB_END (bb);
|
2780 |
|
|
|
2781 |
|
|
/* Back up past insns that must be kept in the same block as a call. */
|
2782 |
|
|
while (insn != BB_HEAD (bb)
|
2783 |
|
|
&& keep_with_call_p (insn))
|
2784 |
|
|
insn = PREV_INSN (insn);
|
2785 |
|
|
|
2786 |
|
|
if (need_fake_edge_p (insn))
|
2787 |
|
|
{
|
2788 |
|
|
edge e;
|
2789 |
|
|
|
2790 |
|
|
e = find_edge (bb, EXIT_BLOCK_PTR);
|
2791 |
|
|
if (e)
|
2792 |
|
|
{
|
2793 |
|
|
insert_insn_on_edge (gen_rtx_USE (VOIDmode, const0_rtx), e);
|
2794 |
|
|
commit_edge_insertions ();
|
2795 |
|
|
}
|
2796 |
|
|
}
|
2797 |
|
|
}
|
2798 |
|
|
|
2799 |
|
|
/* Now add fake edges to the function exit for any non constant
|
2800 |
|
|
calls since there is no way that we can determine if they will
|
2801 |
|
|
return or not... */
|
2802 |
|
|
|
2803 |
|
|
for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
|
2804 |
|
|
{
|
2805 |
|
|
basic_block bb = BASIC_BLOCK (i);
|
2806 |
|
|
rtx insn;
|
2807 |
|
|
rtx prev_insn;
|
2808 |
|
|
|
2809 |
|
|
if (!bb)
|
2810 |
|
|
continue;
|
2811 |
|
|
|
2812 |
|
|
if (blocks && !TEST_BIT (blocks, i))
|
2813 |
|
|
continue;
|
2814 |
|
|
|
2815 |
|
|
for (insn = BB_END (bb); ; insn = prev_insn)
|
2816 |
|
|
{
|
2817 |
|
|
prev_insn = PREV_INSN (insn);
|
2818 |
|
|
if (need_fake_edge_p (insn))
|
2819 |
|
|
{
|
2820 |
|
|
edge e;
|
2821 |
|
|
rtx split_at_insn = insn;
|
2822 |
|
|
|
2823 |
|
|
/* Don't split the block between a call and an insn that should
|
2824 |
|
|
remain in the same block as the call. */
|
2825 |
|
|
if (CALL_P (insn))
|
2826 |
|
|
while (split_at_insn != BB_END (bb)
|
2827 |
|
|
&& keep_with_call_p (NEXT_INSN (split_at_insn)))
|
2828 |
|
|
split_at_insn = NEXT_INSN (split_at_insn);
|
2829 |
|
|
|
2830 |
|
|
/* The handling above of the final block before the epilogue
|
2831 |
|
|
should be enough to verify that there is no edge to the exit
|
2832 |
|
|
block in CFG already. Calling make_edge in such case would
|
2833 |
|
|
cause us to mark that edge as fake and remove it later. */
|
2834 |
|
|
|
2835 |
|
|
#ifdef ENABLE_CHECKING
|
2836 |
|
|
if (split_at_insn == BB_END (bb))
|
2837 |
|
|
{
|
2838 |
|
|
e = find_edge (bb, EXIT_BLOCK_PTR);
|
2839 |
|
|
gcc_assert (e == NULL);
|
2840 |
|
|
}
|
2841 |
|
|
#endif
|
2842 |
|
|
|
2843 |
|
|
/* Note that the following may create a new basic block
|
2844 |
|
|
and renumber the existing basic blocks. */
|
2845 |
|
|
if (split_at_insn != BB_END (bb))
|
2846 |
|
|
{
|
2847 |
|
|
e = split_block (bb, split_at_insn);
|
2848 |
|
|
if (e)
|
2849 |
|
|
blocks_split++;
|
2850 |
|
|
}
|
2851 |
|
|
|
2852 |
|
|
make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
|
2853 |
|
|
}
|
2854 |
|
|
|
2855 |
|
|
if (insn == BB_HEAD (bb))
|
2856 |
|
|
break;
|
2857 |
|
|
}
|
2858 |
|
|
}
|
2859 |
|
|
|
2860 |
|
|
if (blocks_split)
|
2861 |
|
|
verify_flow_info ();
|
2862 |
|
|
|
2863 |
|
|
return blocks_split;
|
2864 |
|
|
}
|
2865 |
|
|
|
2866 |
|
|
/* Add COMP_RTX as a condition at end of COND_BB. FIRST_HEAD is
|
2867 |
|
|
the conditional branch target, SECOND_HEAD should be the fall-thru
|
2868 |
|
|
there is no need to handle this here the loop versioning code handles
|
2869 |
|
|
this. the reason for SECON_HEAD is that it is needed for condition
|
2870 |
|
|
in trees, and this should be of the same type since it is a hook. */
|
2871 |
|
|
static void
|
2872 |
|
|
rtl_lv_add_condition_to_bb (basic_block first_head ,
|
2873 |
|
|
basic_block second_head ATTRIBUTE_UNUSED,
|
2874 |
|
|
basic_block cond_bb, void *comp_rtx)
|
2875 |
|
|
{
|
2876 |
|
|
rtx label, seq, jump;
|
2877 |
|
|
rtx op0 = XEXP ((rtx)comp_rtx, 0);
|
2878 |
|
|
rtx op1 = XEXP ((rtx)comp_rtx, 1);
|
2879 |
|
|
enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
|
2880 |
|
|
enum machine_mode mode;
|
2881 |
|
|
|
2882 |
|
|
|
2883 |
|
|
label = block_label (first_head);
|
2884 |
|
|
mode = GET_MODE (op0);
|
2885 |
|
|
if (mode == VOIDmode)
|
2886 |
|
|
mode = GET_MODE (op1);
|
2887 |
|
|
|
2888 |
|
|
start_sequence ();
|
2889 |
|
|
op0 = force_operand (op0, NULL_RTX);
|
2890 |
|
|
op1 = force_operand (op1, NULL_RTX);
|
2891 |
|
|
do_compare_rtx_and_jump (op0, op1, comp, 0,
|
2892 |
|
|
mode, NULL_RTX, NULL_RTX, label);
|
2893 |
|
|
jump = get_last_insn ();
|
2894 |
|
|
JUMP_LABEL (jump) = label;
|
2895 |
|
|
LABEL_NUSES (label)++;
|
2896 |
|
|
seq = get_insns ();
|
2897 |
|
|
end_sequence ();
|
2898 |
|
|
|
2899 |
|
|
/* Add the new cond , in the new head. */
|
2900 |
|
|
emit_insn_after(seq, BB_END(cond_bb));
|
2901 |
|
|
}
|
2902 |
|
|
|
2903 |
|
|
|
2904 |
|
|
/* Given a block B with unconditional branch at its end, get the
|
2905 |
|
|
store the return the branch edge and the fall-thru edge in
|
2906 |
|
|
BRANCH_EDGE and FALLTHRU_EDGE respectively. */
|
2907 |
|
|
static void
|
2908 |
|
|
rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
|
2909 |
|
|
edge *fallthru_edge)
|
2910 |
|
|
{
|
2911 |
|
|
edge e = EDGE_SUCC (b, 0);
|
2912 |
|
|
|
2913 |
|
|
if (e->flags & EDGE_FALLTHRU)
|
2914 |
|
|
{
|
2915 |
|
|
*fallthru_edge = e;
|
2916 |
|
|
*branch_edge = EDGE_SUCC (b, 1);
|
2917 |
|
|
}
|
2918 |
|
|
else
|
2919 |
|
|
{
|
2920 |
|
|
*branch_edge = e;
|
2921 |
|
|
*fallthru_edge = EDGE_SUCC (b, 1);
|
2922 |
|
|
}
|
2923 |
|
|
}
|
2924 |
|
|
|
2925 |
|
|
void
|
2926 |
|
|
init_rtl_bb_info (basic_block bb)
|
2927 |
|
|
{
|
2928 |
|
|
gcc_assert (!bb->il.rtl);
|
2929 |
|
|
bb->il.rtl = ggc_alloc_cleared (sizeof (struct rtl_bb_info));
|
2930 |
|
|
}
|
2931 |
|
|
|
2932 |
|
|
|
2933 |
|
|
/* Add EXPR to the end of basic block BB. */
|
2934 |
|
|
|
2935 |
|
|
rtx
|
2936 |
|
|
insert_insn_end_bb_new (rtx pat, basic_block bb)
|
2937 |
|
|
{
|
2938 |
|
|
rtx insn = BB_END (bb);
|
2939 |
|
|
rtx new_insn;
|
2940 |
|
|
rtx pat_end = pat;
|
2941 |
|
|
|
2942 |
|
|
while (NEXT_INSN (pat_end) != NULL_RTX)
|
2943 |
|
|
pat_end = NEXT_INSN (pat_end);
|
2944 |
|
|
|
2945 |
|
|
/* If the last insn is a jump, insert EXPR in front [taking care to
|
2946 |
|
|
handle cc0, etc. properly]. Similarly we need to care trapping
|
2947 |
|
|
instructions in presence of non-call exceptions. */
|
2948 |
|
|
|
2949 |
|
|
if (JUMP_P (insn)
|
2950 |
|
|
|| (NONJUMP_INSN_P (insn)
|
2951 |
|
|
&& (!single_succ_p (bb)
|
2952 |
|
|
|| single_succ_edge (bb)->flags & EDGE_ABNORMAL)))
|
2953 |
|
|
{
|
2954 |
|
|
#ifdef HAVE_cc0
|
2955 |
|
|
rtx note;
|
2956 |
|
|
#endif
|
2957 |
|
|
/* If this is a jump table, then we can't insert stuff here. Since
|
2958 |
|
|
we know the previous real insn must be the tablejump, we insert
|
2959 |
|
|
the new instruction just before the tablejump. */
|
2960 |
|
|
if (GET_CODE (PATTERN (insn)) == ADDR_VEC
|
2961 |
|
|
|| GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
|
2962 |
|
|
insn = prev_real_insn (insn);
|
2963 |
|
|
|
2964 |
|
|
#ifdef HAVE_cc0
|
2965 |
|
|
/* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
|
2966 |
|
|
if cc0 isn't set. */
|
2967 |
|
|
note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
|
2968 |
|
|
if (note)
|
2969 |
|
|
insn = XEXP (note, 0);
|
2970 |
|
|
else
|
2971 |
|
|
{
|
2972 |
|
|
rtx maybe_cc0_setter = prev_nonnote_insn (insn);
|
2973 |
|
|
if (maybe_cc0_setter
|
2974 |
|
|
&& INSN_P (maybe_cc0_setter)
|
2975 |
|
|
&& sets_cc0_p (PATTERN (maybe_cc0_setter)))
|
2976 |
|
|
insn = maybe_cc0_setter;
|
2977 |
|
|
}
|
2978 |
|
|
#endif
|
2979 |
|
|
/* FIXME: What if something in cc0/jump uses value set in new
|
2980 |
|
|
insn? */
|
2981 |
|
|
new_insn = emit_insn_before_noloc (pat, insn);
|
2982 |
|
|
}
|
2983 |
|
|
|
2984 |
|
|
/* Likewise if the last insn is a call, as will happen in the presence
|
2985 |
|
|
of exception handling. */
|
2986 |
|
|
else if (CALL_P (insn)
|
2987 |
|
|
&& (!single_succ_p (bb)
|
2988 |
|
|
|| single_succ_edge (bb)->flags & EDGE_ABNORMAL))
|
2989 |
|
|
{
|
2990 |
|
|
/* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
|
2991 |
|
|
we search backward and place the instructions before the first
|
2992 |
|
|
parameter is loaded. Do this for everyone for consistency and a
|
2993 |
|
|
presumption that we'll get better code elsewhere as well. */
|
2994 |
|
|
|
2995 |
|
|
/* Since different machines initialize their parameter registers
|
2996 |
|
|
in different orders, assume nothing. Collect the set of all
|
2997 |
|
|
parameter registers. */
|
2998 |
|
|
insn = find_first_parameter_load (insn, BB_HEAD (bb));
|
2999 |
|
|
|
3000 |
|
|
/* If we found all the parameter loads, then we want to insert
|
3001 |
|
|
before the first parameter load.
|
3002 |
|
|
|
3003 |
|
|
If we did not find all the parameter loads, then we might have
|
3004 |
|
|
stopped on the head of the block, which could be a CODE_LABEL.
|
3005 |
|
|
If we inserted before the CODE_LABEL, then we would be putting
|
3006 |
|
|
the insn in the wrong basic block. In that case, put the insn
|
3007 |
|
|
after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
|
3008 |
|
|
while (LABEL_P (insn)
|
3009 |
|
|
|| NOTE_INSN_BASIC_BLOCK_P (insn))
|
3010 |
|
|
insn = NEXT_INSN (insn);
|
3011 |
|
|
|
3012 |
|
|
new_insn = emit_insn_before_noloc (pat, insn);
|
3013 |
|
|
}
|
3014 |
|
|
else
|
3015 |
|
|
new_insn = emit_insn_after_noloc (pat, insn);
|
3016 |
|
|
|
3017 |
|
|
return new_insn;
|
3018 |
|
|
}
|
3019 |
|
|
|
3020 |
|
|
/* Implementation of CFG manipulation for linearized RTL. */
|
3021 |
|
|
struct cfg_hooks rtl_cfg_hooks = {
|
3022 |
|
|
"rtl",
|
3023 |
|
|
rtl_verify_flow_info,
|
3024 |
|
|
rtl_dump_bb,
|
3025 |
|
|
rtl_create_basic_block,
|
3026 |
|
|
rtl_redirect_edge_and_branch,
|
3027 |
|
|
rtl_redirect_edge_and_branch_force,
|
3028 |
|
|
rtl_delete_block,
|
3029 |
|
|
rtl_split_block,
|
3030 |
|
|
rtl_move_block_after,
|
3031 |
|
|
rtl_can_merge_blocks, /* can_merge_blocks_p */
|
3032 |
|
|
rtl_merge_blocks,
|
3033 |
|
|
rtl_predict_edge,
|
3034 |
|
|
rtl_predicted_by_p,
|
3035 |
|
|
NULL, /* can_duplicate_block_p */
|
3036 |
|
|
NULL, /* duplicate_block */
|
3037 |
|
|
rtl_split_edge,
|
3038 |
|
|
rtl_make_forwarder_block,
|
3039 |
|
|
rtl_tidy_fallthru_edge,
|
3040 |
|
|
rtl_block_ends_with_call_p,
|
3041 |
|
|
rtl_block_ends_with_condjump_p,
|
3042 |
|
|
rtl_flow_call_edges_add,
|
3043 |
|
|
NULL, /* execute_on_growing_pred */
|
3044 |
|
|
NULL, /* execute_on_shrinking_pred */
|
3045 |
|
|
NULL, /* duplicate loop for trees */
|
3046 |
|
|
NULL, /* lv_add_condition_to_bb */
|
3047 |
|
|
NULL, /* lv_adjust_loop_header_phi*/
|
3048 |
|
|
NULL, /* extract_cond_bb_edges */
|
3049 |
|
|
NULL /* flush_pending_stmts */
|
3050 |
|
|
};
|
3051 |
|
|
|
3052 |
|
|
/* Implementation of CFG manipulation for cfg layout RTL, where
|
3053 |
|
|
basic block connected via fallthru edges does not have to be adjacent.
|
3054 |
|
|
This representation will hopefully become the default one in future
|
3055 |
|
|
version of the compiler. */
|
3056 |
|
|
|
3057 |
|
|
/* We do not want to declare these functions in a header file, since they
|
3058 |
|
|
should only be used through the cfghooks interface, and we do not want to
|
3059 |
|
|
move them here since it would require also moving quite a lot of related
|
3060 |
|
|
code. */
|
3061 |
|
|
extern bool cfg_layout_can_duplicate_bb_p (basic_block);
|
3062 |
|
|
extern basic_block cfg_layout_duplicate_bb (basic_block);
|
3063 |
|
|
|
3064 |
|
|
struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
|
3065 |
|
|
"cfglayout mode",
|
3066 |
|
|
rtl_verify_flow_info_1,
|
3067 |
|
|
rtl_dump_bb,
|
3068 |
|
|
cfg_layout_create_basic_block,
|
3069 |
|
|
cfg_layout_redirect_edge_and_branch,
|
3070 |
|
|
cfg_layout_redirect_edge_and_branch_force,
|
3071 |
|
|
cfg_layout_delete_block,
|
3072 |
|
|
cfg_layout_split_block,
|
3073 |
|
|
rtl_move_block_after,
|
3074 |
|
|
cfg_layout_can_merge_blocks_p,
|
3075 |
|
|
cfg_layout_merge_blocks,
|
3076 |
|
|
rtl_predict_edge,
|
3077 |
|
|
rtl_predicted_by_p,
|
3078 |
|
|
cfg_layout_can_duplicate_bb_p,
|
3079 |
|
|
cfg_layout_duplicate_bb,
|
3080 |
|
|
cfg_layout_split_edge,
|
3081 |
|
|
rtl_make_forwarder_block,
|
3082 |
|
|
NULL,
|
3083 |
|
|
rtl_block_ends_with_call_p,
|
3084 |
|
|
rtl_block_ends_with_condjump_p,
|
3085 |
|
|
rtl_flow_call_edges_add,
|
3086 |
|
|
NULL, /* execute_on_growing_pred */
|
3087 |
|
|
NULL, /* execute_on_shrinking_pred */
|
3088 |
|
|
duplicate_loop_to_header_edge, /* duplicate loop for trees */
|
3089 |
|
|
rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
|
3090 |
|
|
NULL, /* lv_adjust_loop_header_phi*/
|
3091 |
|
|
rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
|
3092 |
|
|
NULL /* flush_pending_stmts */
|
3093 |
|
|
};
|