1 |
280 |
jeremybenn |
/* Loop manipulation code for GNU compiler.
|
2 |
|
|
Copyright (C) 2002, 2003, 2004, 2005, 2007, 2008, 2009 Free Software
|
3 |
|
|
Foundation, Inc.
|
4 |
|
|
|
5 |
|
|
This file is part of GCC.
|
6 |
|
|
|
7 |
|
|
GCC is free software; you can redistribute it and/or modify it under
|
8 |
|
|
the terms of the GNU General Public License as published by the Free
|
9 |
|
|
Software Foundation; either version 3, or (at your option) any later
|
10 |
|
|
version.
|
11 |
|
|
|
12 |
|
|
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
|
13 |
|
|
WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
14 |
|
|
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
15 |
|
|
for more details.
|
16 |
|
|
|
17 |
|
|
You should have received a copy of the GNU General Public License
|
18 |
|
|
along with GCC; see the file COPYING3. If not see
|
19 |
|
|
<http://www.gnu.org/licenses/>. */
|
20 |
|
|
|
21 |
|
|
#include "config.h"
|
22 |
|
|
#include "system.h"
|
23 |
|
|
#include "coretypes.h"
|
24 |
|
|
#include "tm.h"
|
25 |
|
|
#include "rtl.h"
|
26 |
|
|
#include "hard-reg-set.h"
|
27 |
|
|
#include "obstack.h"
|
28 |
|
|
#include "basic-block.h"
|
29 |
|
|
#include "cfgloop.h"
|
30 |
|
|
#include "cfglayout.h"
|
31 |
|
|
#include "cfghooks.h"
|
32 |
|
|
#include "output.h"
|
33 |
|
|
#include "tree-flow.h"
|
34 |
|
|
|
35 |
|
|
static void copy_loops_to (struct loop **, int,
|
36 |
|
|
struct loop *);
|
37 |
|
|
static void loop_redirect_edge (edge, basic_block);
|
38 |
|
|
static void remove_bbs (basic_block *, int);
|
39 |
|
|
static bool rpe_enum_p (const_basic_block, const void *);
|
40 |
|
|
static int find_path (edge, basic_block **);
|
41 |
|
|
static void fix_loop_placements (struct loop *, bool *);
|
42 |
|
|
static bool fix_bb_placement (basic_block);
|
43 |
|
|
static void fix_bb_placements (basic_block, bool *);
|
44 |
|
|
static void unloop (struct loop *, bool *);
|
45 |
|
|
|
46 |
|
|
#define RDIV(X,Y) (((X) + (Y) / 2) / (Y))
|
47 |
|
|
|
48 |
|
|
/* Checks whether basic block BB is dominated by DATA. */
|
49 |
|
|
static bool
|
50 |
|
|
rpe_enum_p (const_basic_block bb, const void *data)
|
51 |
|
|
{
|
52 |
|
|
return dominated_by_p (CDI_DOMINATORS, bb, (const_basic_block) data);
|
53 |
|
|
}
|
54 |
|
|
|
55 |
|
|
/* Remove basic blocks BBS. NBBS is the number of the basic blocks. */
|
56 |
|
|
|
57 |
|
|
static void
|
58 |
|
|
remove_bbs (basic_block *bbs, int nbbs)
|
59 |
|
|
{
|
60 |
|
|
int i;
|
61 |
|
|
|
62 |
|
|
for (i = 0; i < nbbs; i++)
|
63 |
|
|
delete_basic_block (bbs[i]);
|
64 |
|
|
}
|
65 |
|
|
|
66 |
|
|
/* Find path -- i.e. the basic blocks dominated by edge E and put them
|
67 |
|
|
into array BBS, that will be allocated large enough to contain them.
|
68 |
|
|
E->dest must have exactly one predecessor for this to work (it is
|
69 |
|
|
easy to achieve and we do not put it here because we do not want to
|
70 |
|
|
alter anything by this function). The number of basic blocks in the
|
71 |
|
|
path is returned. */
|
72 |
|
|
static int
|
73 |
|
|
find_path (edge e, basic_block **bbs)
|
74 |
|
|
{
|
75 |
|
|
gcc_assert (EDGE_COUNT (e->dest->preds) <= 1);
|
76 |
|
|
|
77 |
|
|
/* Find bbs in the path. */
|
78 |
|
|
*bbs = XCNEWVEC (basic_block, n_basic_blocks);
|
79 |
|
|
return dfs_enumerate_from (e->dest, 0, rpe_enum_p, *bbs,
|
80 |
|
|
n_basic_blocks, e->dest);
|
81 |
|
|
}
|
82 |
|
|
|
83 |
|
|
/* Fix placement of basic block BB inside loop hierarchy --
|
84 |
|
|
Let L be a loop to that BB belongs. Then every successor of BB must either
|
85 |
|
|
1) belong to some superloop of loop L, or
|
86 |
|
|
2) be a header of loop K such that K->outer is superloop of L
|
87 |
|
|
Returns true if we had to move BB into other loop to enforce this condition,
|
88 |
|
|
false if the placement of BB was already correct (provided that placements
|
89 |
|
|
of its successors are correct). */
|
90 |
|
|
static bool
|
91 |
|
|
fix_bb_placement (basic_block bb)
|
92 |
|
|
{
|
93 |
|
|
edge e;
|
94 |
|
|
edge_iterator ei;
|
95 |
|
|
struct loop *loop = current_loops->tree_root, *act;
|
96 |
|
|
|
97 |
|
|
FOR_EACH_EDGE (e, ei, bb->succs)
|
98 |
|
|
{
|
99 |
|
|
if (e->dest == EXIT_BLOCK_PTR)
|
100 |
|
|
continue;
|
101 |
|
|
|
102 |
|
|
act = e->dest->loop_father;
|
103 |
|
|
if (act->header == e->dest)
|
104 |
|
|
act = loop_outer (act);
|
105 |
|
|
|
106 |
|
|
if (flow_loop_nested_p (loop, act))
|
107 |
|
|
loop = act;
|
108 |
|
|
}
|
109 |
|
|
|
110 |
|
|
if (loop == bb->loop_father)
|
111 |
|
|
return false;
|
112 |
|
|
|
113 |
|
|
remove_bb_from_loops (bb);
|
114 |
|
|
add_bb_to_loop (bb, loop);
|
115 |
|
|
|
116 |
|
|
return true;
|
117 |
|
|
}
|
118 |
|
|
|
119 |
|
|
/* Fix placement of LOOP inside loop tree, i.e. find the innermost superloop
|
120 |
|
|
of LOOP to that leads at least one exit edge of LOOP, and set it
|
121 |
|
|
as the immediate superloop of LOOP. Return true if the immediate superloop
|
122 |
|
|
of LOOP changed. */
|
123 |
|
|
|
124 |
|
|
static bool
|
125 |
|
|
fix_loop_placement (struct loop *loop)
|
126 |
|
|
{
|
127 |
|
|
unsigned i;
|
128 |
|
|
edge e;
|
129 |
|
|
VEC (edge, heap) *exits = get_loop_exit_edges (loop);
|
130 |
|
|
struct loop *father = current_loops->tree_root, *act;
|
131 |
|
|
bool ret = false;
|
132 |
|
|
|
133 |
|
|
for (i = 0; VEC_iterate (edge, exits, i, e); i++)
|
134 |
|
|
{
|
135 |
|
|
act = find_common_loop (loop, e->dest->loop_father);
|
136 |
|
|
if (flow_loop_nested_p (father, act))
|
137 |
|
|
father = act;
|
138 |
|
|
}
|
139 |
|
|
|
140 |
|
|
if (father != loop_outer (loop))
|
141 |
|
|
{
|
142 |
|
|
for (act = loop_outer (loop); act != father; act = loop_outer (act))
|
143 |
|
|
act->num_nodes -= loop->num_nodes;
|
144 |
|
|
flow_loop_tree_node_remove (loop);
|
145 |
|
|
flow_loop_tree_node_add (father, loop);
|
146 |
|
|
|
147 |
|
|
/* The exit edges of LOOP no longer exits its original immediate
|
148 |
|
|
superloops; remove them from the appropriate exit lists. */
|
149 |
|
|
for (i = 0; VEC_iterate (edge, exits, i, e); i++)
|
150 |
|
|
rescan_loop_exit (e, false, false);
|
151 |
|
|
|
152 |
|
|
ret = true;
|
153 |
|
|
}
|
154 |
|
|
|
155 |
|
|
VEC_free (edge, heap, exits);
|
156 |
|
|
return ret;
|
157 |
|
|
}
|
158 |
|
|
|
159 |
|
|
/* Fix placements of basic blocks inside loop hierarchy stored in loops; i.e.
|
160 |
|
|
enforce condition condition stated in description of fix_bb_placement. We
|
161 |
|
|
start from basic block FROM that had some of its successors removed, so that
|
162 |
|
|
his placement no longer has to be correct, and iteratively fix placement of
|
163 |
|
|
its predecessors that may change if placement of FROM changed. Also fix
|
164 |
|
|
placement of subloops of FROM->loop_father, that might also be altered due
|
165 |
|
|
to this change; the condition for them is similar, except that instead of
|
166 |
|
|
successors we consider edges coming out of the loops.
|
167 |
|
|
|
168 |
|
|
If the changes may invalidate the information about irreducible regions,
|
169 |
|
|
IRRED_INVALIDATED is set to true. */
|
170 |
|
|
|
171 |
|
|
static void
|
172 |
|
|
fix_bb_placements (basic_block from,
|
173 |
|
|
bool *irred_invalidated)
|
174 |
|
|
{
|
175 |
|
|
sbitmap in_queue;
|
176 |
|
|
basic_block *queue, *qtop, *qbeg, *qend;
|
177 |
|
|
struct loop *base_loop;
|
178 |
|
|
edge e;
|
179 |
|
|
|
180 |
|
|
/* We pass through blocks back-reachable from FROM, testing whether some
|
181 |
|
|
of their successors moved to outer loop. It may be necessary to
|
182 |
|
|
iterate several times, but it is finite, as we stop unless we move
|
183 |
|
|
the basic block up the loop structure. The whole story is a bit
|
184 |
|
|
more complicated due to presence of subloops, those are moved using
|
185 |
|
|
fix_loop_placement. */
|
186 |
|
|
|
187 |
|
|
base_loop = from->loop_father;
|
188 |
|
|
if (base_loop == current_loops->tree_root)
|
189 |
|
|
return;
|
190 |
|
|
|
191 |
|
|
in_queue = sbitmap_alloc (last_basic_block);
|
192 |
|
|
sbitmap_zero (in_queue);
|
193 |
|
|
SET_BIT (in_queue, from->index);
|
194 |
|
|
/* Prevent us from going out of the base_loop. */
|
195 |
|
|
SET_BIT (in_queue, base_loop->header->index);
|
196 |
|
|
|
197 |
|
|
queue = XNEWVEC (basic_block, base_loop->num_nodes + 1);
|
198 |
|
|
qtop = queue + base_loop->num_nodes + 1;
|
199 |
|
|
qbeg = queue;
|
200 |
|
|
qend = queue + 1;
|
201 |
|
|
*qbeg = from;
|
202 |
|
|
|
203 |
|
|
while (qbeg != qend)
|
204 |
|
|
{
|
205 |
|
|
edge_iterator ei;
|
206 |
|
|
from = *qbeg;
|
207 |
|
|
qbeg++;
|
208 |
|
|
if (qbeg == qtop)
|
209 |
|
|
qbeg = queue;
|
210 |
|
|
RESET_BIT (in_queue, from->index);
|
211 |
|
|
|
212 |
|
|
if (from->loop_father->header == from)
|
213 |
|
|
{
|
214 |
|
|
/* Subloop header, maybe move the loop upward. */
|
215 |
|
|
if (!fix_loop_placement (from->loop_father))
|
216 |
|
|
continue;
|
217 |
|
|
}
|
218 |
|
|
else
|
219 |
|
|
{
|
220 |
|
|
/* Ordinary basic block. */
|
221 |
|
|
if (!fix_bb_placement (from))
|
222 |
|
|
continue;
|
223 |
|
|
}
|
224 |
|
|
|
225 |
|
|
FOR_EACH_EDGE (e, ei, from->succs)
|
226 |
|
|
{
|
227 |
|
|
if (e->flags & EDGE_IRREDUCIBLE_LOOP)
|
228 |
|
|
*irred_invalidated = true;
|
229 |
|
|
}
|
230 |
|
|
|
231 |
|
|
/* Something has changed, insert predecessors into queue. */
|
232 |
|
|
FOR_EACH_EDGE (e, ei, from->preds)
|
233 |
|
|
{
|
234 |
|
|
basic_block pred = e->src;
|
235 |
|
|
struct loop *nca;
|
236 |
|
|
|
237 |
|
|
if (e->flags & EDGE_IRREDUCIBLE_LOOP)
|
238 |
|
|
*irred_invalidated = true;
|
239 |
|
|
|
240 |
|
|
if (TEST_BIT (in_queue, pred->index))
|
241 |
|
|
continue;
|
242 |
|
|
|
243 |
|
|
/* If it is subloop, then it either was not moved, or
|
244 |
|
|
the path up the loop tree from base_loop do not contain
|
245 |
|
|
it. */
|
246 |
|
|
nca = find_common_loop (pred->loop_father, base_loop);
|
247 |
|
|
if (pred->loop_father != base_loop
|
248 |
|
|
&& (nca == base_loop
|
249 |
|
|
|| nca != pred->loop_father))
|
250 |
|
|
pred = pred->loop_father->header;
|
251 |
|
|
else if (!flow_loop_nested_p (from->loop_father, pred->loop_father))
|
252 |
|
|
{
|
253 |
|
|
/* No point in processing it. */
|
254 |
|
|
continue;
|
255 |
|
|
}
|
256 |
|
|
|
257 |
|
|
if (TEST_BIT (in_queue, pred->index))
|
258 |
|
|
continue;
|
259 |
|
|
|
260 |
|
|
/* Schedule the basic block. */
|
261 |
|
|
*qend = pred;
|
262 |
|
|
qend++;
|
263 |
|
|
if (qend == qtop)
|
264 |
|
|
qend = queue;
|
265 |
|
|
SET_BIT (in_queue, pred->index);
|
266 |
|
|
}
|
267 |
|
|
}
|
268 |
|
|
free (in_queue);
|
269 |
|
|
free (queue);
|
270 |
|
|
}
|
271 |
|
|
|
272 |
|
|
/* Removes path beginning at edge E, i.e. remove basic blocks dominated by E
|
273 |
|
|
and update loop structures and dominators. Return true if we were able
|
274 |
|
|
to remove the path, false otherwise (and nothing is affected then). */
|
275 |
|
|
bool
|
276 |
|
|
remove_path (edge e)
|
277 |
|
|
{
|
278 |
|
|
edge ae;
|
279 |
|
|
basic_block *rem_bbs, *bord_bbs, from, bb;
|
280 |
|
|
VEC (basic_block, heap) *dom_bbs;
|
281 |
|
|
int i, nrem, n_bord_bbs;
|
282 |
|
|
sbitmap seen;
|
283 |
|
|
bool irred_invalidated = false;
|
284 |
|
|
|
285 |
|
|
if (!can_remove_branch_p (e))
|
286 |
|
|
return false;
|
287 |
|
|
|
288 |
|
|
/* Keep track of whether we need to update information about irreducible
|
289 |
|
|
regions. This is the case if the removed area is a part of the
|
290 |
|
|
irreducible region, or if the set of basic blocks that belong to a loop
|
291 |
|
|
that is inside an irreducible region is changed, or if such a loop is
|
292 |
|
|
removed. */
|
293 |
|
|
if (e->flags & EDGE_IRREDUCIBLE_LOOP)
|
294 |
|
|
irred_invalidated = true;
|
295 |
|
|
|
296 |
|
|
/* We need to check whether basic blocks are dominated by the edge
|
297 |
|
|
e, but we only have basic block dominators. This is easy to
|
298 |
|
|
fix -- when e->dest has exactly one predecessor, this corresponds
|
299 |
|
|
to blocks dominated by e->dest, if not, split the edge. */
|
300 |
|
|
if (!single_pred_p (e->dest))
|
301 |
|
|
e = single_pred_edge (split_edge (e));
|
302 |
|
|
|
303 |
|
|
/* It may happen that by removing path we remove one or more loops
|
304 |
|
|
we belong to. In this case first unloop the loops, then proceed
|
305 |
|
|
normally. We may assume that e->dest is not a header of any loop,
|
306 |
|
|
as it now has exactly one predecessor. */
|
307 |
|
|
while (loop_outer (e->src->loop_father)
|
308 |
|
|
&& dominated_by_p (CDI_DOMINATORS,
|
309 |
|
|
e->src->loop_father->latch, e->dest))
|
310 |
|
|
unloop (e->src->loop_father, &irred_invalidated);
|
311 |
|
|
|
312 |
|
|
/* Identify the path. */
|
313 |
|
|
nrem = find_path (e, &rem_bbs);
|
314 |
|
|
|
315 |
|
|
n_bord_bbs = 0;
|
316 |
|
|
bord_bbs = XCNEWVEC (basic_block, n_basic_blocks);
|
317 |
|
|
seen = sbitmap_alloc (last_basic_block);
|
318 |
|
|
sbitmap_zero (seen);
|
319 |
|
|
|
320 |
|
|
/* Find "border" hexes -- i.e. those with predecessor in removed path. */
|
321 |
|
|
for (i = 0; i < nrem; i++)
|
322 |
|
|
SET_BIT (seen, rem_bbs[i]->index);
|
323 |
|
|
for (i = 0; i < nrem; i++)
|
324 |
|
|
{
|
325 |
|
|
edge_iterator ei;
|
326 |
|
|
bb = rem_bbs[i];
|
327 |
|
|
FOR_EACH_EDGE (ae, ei, rem_bbs[i]->succs)
|
328 |
|
|
if (ae->dest != EXIT_BLOCK_PTR && !TEST_BIT (seen, ae->dest->index))
|
329 |
|
|
{
|
330 |
|
|
SET_BIT (seen, ae->dest->index);
|
331 |
|
|
bord_bbs[n_bord_bbs++] = ae->dest;
|
332 |
|
|
|
333 |
|
|
if (ae->flags & EDGE_IRREDUCIBLE_LOOP)
|
334 |
|
|
irred_invalidated = true;
|
335 |
|
|
}
|
336 |
|
|
}
|
337 |
|
|
|
338 |
|
|
/* Remove the path. */
|
339 |
|
|
from = e->src;
|
340 |
|
|
remove_branch (e);
|
341 |
|
|
dom_bbs = NULL;
|
342 |
|
|
|
343 |
|
|
/* Cancel loops contained in the path. */
|
344 |
|
|
for (i = 0; i < nrem; i++)
|
345 |
|
|
if (rem_bbs[i]->loop_father->header == rem_bbs[i])
|
346 |
|
|
cancel_loop_tree (rem_bbs[i]->loop_father);
|
347 |
|
|
|
348 |
|
|
remove_bbs (rem_bbs, nrem);
|
349 |
|
|
free (rem_bbs);
|
350 |
|
|
|
351 |
|
|
/* Find blocks whose dominators may be affected. */
|
352 |
|
|
sbitmap_zero (seen);
|
353 |
|
|
for (i = 0; i < n_bord_bbs; i++)
|
354 |
|
|
{
|
355 |
|
|
basic_block ldom;
|
356 |
|
|
|
357 |
|
|
bb = get_immediate_dominator (CDI_DOMINATORS, bord_bbs[i]);
|
358 |
|
|
if (TEST_BIT (seen, bb->index))
|
359 |
|
|
continue;
|
360 |
|
|
SET_BIT (seen, bb->index);
|
361 |
|
|
|
362 |
|
|
for (ldom = first_dom_son (CDI_DOMINATORS, bb);
|
363 |
|
|
ldom;
|
364 |
|
|
ldom = next_dom_son (CDI_DOMINATORS, ldom))
|
365 |
|
|
if (!dominated_by_p (CDI_DOMINATORS, from, ldom))
|
366 |
|
|
VEC_safe_push (basic_block, heap, dom_bbs, ldom);
|
367 |
|
|
}
|
368 |
|
|
|
369 |
|
|
free (seen);
|
370 |
|
|
|
371 |
|
|
/* Recount dominators. */
|
372 |
|
|
iterate_fix_dominators (CDI_DOMINATORS, dom_bbs, true);
|
373 |
|
|
VEC_free (basic_block, heap, dom_bbs);
|
374 |
|
|
free (bord_bbs);
|
375 |
|
|
|
376 |
|
|
/* Fix placements of basic blocks inside loops and the placement of
|
377 |
|
|
loops in the loop tree. */
|
378 |
|
|
fix_bb_placements (from, &irred_invalidated);
|
379 |
|
|
fix_loop_placements (from->loop_father, &irred_invalidated);
|
380 |
|
|
|
381 |
|
|
if (irred_invalidated
|
382 |
|
|
&& loops_state_satisfies_p (LOOPS_HAVE_MARKED_IRREDUCIBLE_REGIONS))
|
383 |
|
|
mark_irreducible_loops ();
|
384 |
|
|
|
385 |
|
|
return true;
|
386 |
|
|
}
|
387 |
|
|
|
388 |
|
|
/* Creates place for a new LOOP in loops structure. */
|
389 |
|
|
|
390 |
|
|
static void
|
391 |
|
|
place_new_loop (struct loop *loop)
|
392 |
|
|
{
|
393 |
|
|
loop->num = number_of_loops ();
|
394 |
|
|
VEC_safe_push (loop_p, gc, current_loops->larray, loop);
|
395 |
|
|
}
|
396 |
|
|
|
397 |
|
|
/* Given LOOP structure with filled header and latch, find the body of the
|
398 |
|
|
corresponding loop and add it to loops tree. Insert the LOOP as a son of
|
399 |
|
|
outer. */
|
400 |
|
|
|
401 |
|
|
void
|
402 |
|
|
add_loop (struct loop *loop, struct loop *outer)
|
403 |
|
|
{
|
404 |
|
|
basic_block *bbs;
|
405 |
|
|
int i, n;
|
406 |
|
|
struct loop *subloop;
|
407 |
|
|
edge e;
|
408 |
|
|
edge_iterator ei;
|
409 |
|
|
|
410 |
|
|
/* Add it to loop structure. */
|
411 |
|
|
place_new_loop (loop);
|
412 |
|
|
flow_loop_tree_node_add (outer, loop);
|
413 |
|
|
|
414 |
|
|
/* Find its nodes. */
|
415 |
|
|
bbs = XNEWVEC (basic_block, n_basic_blocks);
|
416 |
|
|
n = get_loop_body_with_size (loop, bbs, n_basic_blocks);
|
417 |
|
|
|
418 |
|
|
for (i = 0; i < n; i++)
|
419 |
|
|
{
|
420 |
|
|
if (bbs[i]->loop_father == outer)
|
421 |
|
|
{
|
422 |
|
|
remove_bb_from_loops (bbs[i]);
|
423 |
|
|
add_bb_to_loop (bbs[i], loop);
|
424 |
|
|
continue;
|
425 |
|
|
}
|
426 |
|
|
|
427 |
|
|
loop->num_nodes++;
|
428 |
|
|
|
429 |
|
|
/* If we find a direct subloop of OUTER, move it to LOOP. */
|
430 |
|
|
subloop = bbs[i]->loop_father;
|
431 |
|
|
if (loop_outer (subloop) == outer
|
432 |
|
|
&& subloop->header == bbs[i])
|
433 |
|
|
{
|
434 |
|
|
flow_loop_tree_node_remove (subloop);
|
435 |
|
|
flow_loop_tree_node_add (loop, subloop);
|
436 |
|
|
}
|
437 |
|
|
}
|
438 |
|
|
|
439 |
|
|
/* Update the information about loop exit edges. */
|
440 |
|
|
for (i = 0; i < n; i++)
|
441 |
|
|
{
|
442 |
|
|
FOR_EACH_EDGE (e, ei, bbs[i]->succs)
|
443 |
|
|
{
|
444 |
|
|
rescan_loop_exit (e, false, false);
|
445 |
|
|
}
|
446 |
|
|
}
|
447 |
|
|
|
448 |
|
|
free (bbs);
|
449 |
|
|
}
|
450 |
|
|
|
451 |
|
|
/* Multiply all frequencies in LOOP by NUM/DEN. */
|
452 |
|
|
void
|
453 |
|
|
scale_loop_frequencies (struct loop *loop, int num, int den)
|
454 |
|
|
{
|
455 |
|
|
basic_block *bbs;
|
456 |
|
|
|
457 |
|
|
bbs = get_loop_body (loop);
|
458 |
|
|
scale_bbs_frequencies_int (bbs, loop->num_nodes, num, den);
|
459 |
|
|
free (bbs);
|
460 |
|
|
}
|
461 |
|
|
|
462 |
|
|
/* Recompute dominance information for basic blocks outside LOOP. */
|
463 |
|
|
|
464 |
|
|
static void
|
465 |
|
|
update_dominators_in_loop (struct loop *loop)
|
466 |
|
|
{
|
467 |
|
|
VEC (basic_block, heap) *dom_bbs = NULL;
|
468 |
|
|
sbitmap seen;
|
469 |
|
|
basic_block *body;
|
470 |
|
|
unsigned i;
|
471 |
|
|
|
472 |
|
|
seen = sbitmap_alloc (last_basic_block);
|
473 |
|
|
sbitmap_zero (seen);
|
474 |
|
|
body = get_loop_body (loop);
|
475 |
|
|
|
476 |
|
|
for (i = 0; i < loop->num_nodes; i++)
|
477 |
|
|
SET_BIT (seen, body[i]->index);
|
478 |
|
|
|
479 |
|
|
for (i = 0; i < loop->num_nodes; i++)
|
480 |
|
|
{
|
481 |
|
|
basic_block ldom;
|
482 |
|
|
|
483 |
|
|
for (ldom = first_dom_son (CDI_DOMINATORS, body[i]);
|
484 |
|
|
ldom;
|
485 |
|
|
ldom = next_dom_son (CDI_DOMINATORS, ldom))
|
486 |
|
|
if (!TEST_BIT (seen, ldom->index))
|
487 |
|
|
{
|
488 |
|
|
SET_BIT (seen, ldom->index);
|
489 |
|
|
VEC_safe_push (basic_block, heap, dom_bbs, ldom);
|
490 |
|
|
}
|
491 |
|
|
}
|
492 |
|
|
|
493 |
|
|
iterate_fix_dominators (CDI_DOMINATORS, dom_bbs, false);
|
494 |
|
|
free (body);
|
495 |
|
|
free (seen);
|
496 |
|
|
VEC_free (basic_block, heap, dom_bbs);
|
497 |
|
|
}
|
498 |
|
|
|
499 |
|
|
/* Creates an if region as shown above. CONDITION is used to create
|
500 |
|
|
the test for the if.
|
501 |
|
|
|
502 |
|
|
|
|
503 |
|
|
| ------------- -------------
|
504 |
|
|
| | pred_bb | | pred_bb |
|
505 |
|
|
| ------------- -------------
|
506 |
|
|
| | |
|
507 |
|
|
| | | ENTRY_EDGE
|
508 |
|
|
| | ENTRY_EDGE V
|
509 |
|
|
| | ====> -------------
|
510 |
|
|
| | | cond_bb |
|
511 |
|
|
| | | CONDITION |
|
512 |
|
|
| | -------------
|
513 |
|
|
| V / \
|
514 |
|
|
| ------------- e_false / \ e_true
|
515 |
|
|
| | succ_bb | V V
|
516 |
|
|
| ------------- ----------- -----------
|
517 |
|
|
| | false_bb | | true_bb |
|
518 |
|
|
| ----------- -----------
|
519 |
|
|
| \ /
|
520 |
|
|
| \ /
|
521 |
|
|
| V V
|
522 |
|
|
| -------------
|
523 |
|
|
| | join_bb |
|
524 |
|
|
| -------------
|
525 |
|
|
| | exit_edge (result)
|
526 |
|
|
| V
|
527 |
|
|
| -----------
|
528 |
|
|
| | succ_bb |
|
529 |
|
|
| -----------
|
530 |
|
|
|
|
531 |
|
|
*/
|
532 |
|
|
|
533 |
|
|
edge
|
534 |
|
|
create_empty_if_region_on_edge (edge entry_edge, tree condition)
|
535 |
|
|
{
|
536 |
|
|
|
537 |
|
|
basic_block cond_bb, true_bb, false_bb, join_bb;
|
538 |
|
|
edge e_true, e_false, exit_edge;
|
539 |
|
|
gimple cond_stmt;
|
540 |
|
|
tree simple_cond;
|
541 |
|
|
gimple_stmt_iterator gsi;
|
542 |
|
|
|
543 |
|
|
cond_bb = split_edge (entry_edge);
|
544 |
|
|
|
545 |
|
|
/* Insert condition in cond_bb. */
|
546 |
|
|
gsi = gsi_last_bb (cond_bb);
|
547 |
|
|
simple_cond =
|
548 |
|
|
force_gimple_operand_gsi (&gsi, condition, true, NULL,
|
549 |
|
|
false, GSI_NEW_STMT);
|
550 |
|
|
cond_stmt = gimple_build_cond_from_tree (simple_cond, NULL_TREE, NULL_TREE);
|
551 |
|
|
gsi = gsi_last_bb (cond_bb);
|
552 |
|
|
gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
|
553 |
|
|
|
554 |
|
|
join_bb = split_edge (single_succ_edge (cond_bb));
|
555 |
|
|
|
556 |
|
|
e_true = single_succ_edge (cond_bb);
|
557 |
|
|
true_bb = split_edge (e_true);
|
558 |
|
|
|
559 |
|
|
e_false = make_edge (cond_bb, join_bb, 0);
|
560 |
|
|
false_bb = split_edge (e_false);
|
561 |
|
|
|
562 |
|
|
e_true->flags &= ~EDGE_FALLTHRU;
|
563 |
|
|
e_true->flags |= EDGE_TRUE_VALUE;
|
564 |
|
|
e_false->flags &= ~EDGE_FALLTHRU;
|
565 |
|
|
e_false->flags |= EDGE_FALSE_VALUE;
|
566 |
|
|
|
567 |
|
|
set_immediate_dominator (CDI_DOMINATORS, cond_bb, entry_edge->src);
|
568 |
|
|
set_immediate_dominator (CDI_DOMINATORS, true_bb, cond_bb);
|
569 |
|
|
set_immediate_dominator (CDI_DOMINATORS, false_bb, cond_bb);
|
570 |
|
|
set_immediate_dominator (CDI_DOMINATORS, join_bb, cond_bb);
|
571 |
|
|
|
572 |
|
|
exit_edge = single_succ_edge (join_bb);
|
573 |
|
|
|
574 |
|
|
if (single_pred_p (exit_edge->dest))
|
575 |
|
|
set_immediate_dominator (CDI_DOMINATORS, exit_edge->dest, join_bb);
|
576 |
|
|
|
577 |
|
|
return exit_edge;
|
578 |
|
|
}
|
579 |
|
|
|
580 |
|
|
/* create_empty_loop_on_edge
|
581 |
|
|
|
|
582 |
|
|
| - pred_bb - ------ pred_bb ------
|
583 |
|
|
| | | | iv0 = initial_value |
|
584 |
|
|
| -----|----- ---------|-----------
|
585 |
|
|
| | ______ | entry_edge
|
586 |
|
|
| | entry_edge / | |
|
587 |
|
|
| | ====> | -V---V- loop_header -------------
|
588 |
|
|
| V | | iv_before = phi (iv0, iv_after) |
|
589 |
|
|
| - succ_bb - | ---|-----------------------------
|
590 |
|
|
| | | | |
|
591 |
|
|
| ----------- | ---V--- loop_body ---------------
|
592 |
|
|
| | | iv_after = iv_before + stride |
|
593 |
|
|
| | | if (iv_before < upper_bound) |
|
594 |
|
|
| | ---|--------------\--------------
|
595 |
|
|
| | | \ exit_e
|
596 |
|
|
| | V \
|
597 |
|
|
| | - loop_latch - V- succ_bb -
|
598 |
|
|
| | | | | |
|
599 |
|
|
| | /------------- -----------
|
600 |
|
|
| \ ___ /
|
601 |
|
|
|
602 |
|
|
Creates an empty loop as shown above, the IV_BEFORE is the SSA_NAME
|
603 |
|
|
that is used before the increment of IV. IV_BEFORE should be used for
|
604 |
|
|
adding code to the body that uses the IV. OUTER is the outer loop in
|
605 |
|
|
which the new loop should be inserted.
|
606 |
|
|
|
607 |
|
|
Both INITIAL_VALUE and UPPER_BOUND expressions are gimplified and
|
608 |
|
|
inserted on the loop entry edge. This implies that this function
|
609 |
|
|
should be used only when the UPPER_BOUND expression is a loop
|
610 |
|
|
invariant. */
|
611 |
|
|
|
612 |
|
|
struct loop *
|
613 |
|
|
create_empty_loop_on_edge (edge entry_edge,
|
614 |
|
|
tree initial_value,
|
615 |
|
|
tree stride, tree upper_bound,
|
616 |
|
|
tree iv,
|
617 |
|
|
tree *iv_before,
|
618 |
|
|
tree *iv_after,
|
619 |
|
|
struct loop *outer)
|
620 |
|
|
{
|
621 |
|
|
basic_block loop_header, loop_latch, succ_bb, pred_bb;
|
622 |
|
|
struct loop *loop;
|
623 |
|
|
gimple_stmt_iterator gsi;
|
624 |
|
|
gimple_seq stmts;
|
625 |
|
|
gimple cond_expr;
|
626 |
|
|
tree exit_test;
|
627 |
|
|
edge exit_e;
|
628 |
|
|
int prob;
|
629 |
|
|
|
630 |
|
|
gcc_assert (entry_edge && initial_value && stride && upper_bound && iv);
|
631 |
|
|
|
632 |
|
|
/* Create header, latch and wire up the loop. */
|
633 |
|
|
pred_bb = entry_edge->src;
|
634 |
|
|
loop_header = split_edge (entry_edge);
|
635 |
|
|
loop_latch = split_edge (single_succ_edge (loop_header));
|
636 |
|
|
succ_bb = single_succ (loop_latch);
|
637 |
|
|
make_edge (loop_header, succ_bb, 0);
|
638 |
|
|
redirect_edge_succ_nodup (single_succ_edge (loop_latch), loop_header);
|
639 |
|
|
|
640 |
|
|
/* Set immediate dominator information. */
|
641 |
|
|
set_immediate_dominator (CDI_DOMINATORS, loop_header, pred_bb);
|
642 |
|
|
set_immediate_dominator (CDI_DOMINATORS, loop_latch, loop_header);
|
643 |
|
|
set_immediate_dominator (CDI_DOMINATORS, succ_bb, loop_header);
|
644 |
|
|
|
645 |
|
|
/* Initialize a loop structure and put it in a loop hierarchy. */
|
646 |
|
|
loop = alloc_loop ();
|
647 |
|
|
loop->header = loop_header;
|
648 |
|
|
loop->latch = loop_latch;
|
649 |
|
|
add_loop (loop, outer);
|
650 |
|
|
|
651 |
|
|
/* TODO: Fix frequencies and counts. */
|
652 |
|
|
prob = REG_BR_PROB_BASE / 2;
|
653 |
|
|
|
654 |
|
|
scale_loop_frequencies (loop, REG_BR_PROB_BASE - prob, REG_BR_PROB_BASE);
|
655 |
|
|
|
656 |
|
|
/* Update dominators. */
|
657 |
|
|
update_dominators_in_loop (loop);
|
658 |
|
|
|
659 |
|
|
/* Modify edge flags. */
|
660 |
|
|
exit_e = single_exit (loop);
|
661 |
|
|
exit_e->flags = EDGE_LOOP_EXIT | EDGE_FALSE_VALUE;
|
662 |
|
|
single_pred_edge (loop_latch)->flags = EDGE_TRUE_VALUE;
|
663 |
|
|
|
664 |
|
|
/* Construct IV code in loop. */
|
665 |
|
|
initial_value = force_gimple_operand (initial_value, &stmts, true, iv);
|
666 |
|
|
if (stmts)
|
667 |
|
|
{
|
668 |
|
|
gsi_insert_seq_on_edge (loop_preheader_edge (loop), stmts);
|
669 |
|
|
gsi_commit_edge_inserts ();
|
670 |
|
|
}
|
671 |
|
|
|
672 |
|
|
upper_bound = force_gimple_operand (upper_bound, &stmts, true, NULL);
|
673 |
|
|
if (stmts)
|
674 |
|
|
{
|
675 |
|
|
gsi_insert_seq_on_edge (loop_preheader_edge (loop), stmts);
|
676 |
|
|
gsi_commit_edge_inserts ();
|
677 |
|
|
}
|
678 |
|
|
|
679 |
|
|
gsi = gsi_last_bb (loop_header);
|
680 |
|
|
create_iv (initial_value, stride, iv, loop, &gsi, false,
|
681 |
|
|
iv_before, iv_after);
|
682 |
|
|
|
683 |
|
|
/* Insert loop exit condition. */
|
684 |
|
|
cond_expr = gimple_build_cond
|
685 |
|
|
(LT_EXPR, *iv_before, upper_bound, NULL_TREE, NULL_TREE);
|
686 |
|
|
|
687 |
|
|
exit_test = gimple_cond_lhs (cond_expr);
|
688 |
|
|
exit_test = force_gimple_operand_gsi (&gsi, exit_test, true, NULL,
|
689 |
|
|
false, GSI_NEW_STMT);
|
690 |
|
|
gimple_cond_set_lhs (cond_expr, exit_test);
|
691 |
|
|
gsi = gsi_last_bb (exit_e->src);
|
692 |
|
|
gsi_insert_after (&gsi, cond_expr, GSI_NEW_STMT);
|
693 |
|
|
|
694 |
|
|
split_block_after_labels (loop_header);
|
695 |
|
|
|
696 |
|
|
return loop;
|
697 |
|
|
}
|
698 |
|
|
|
699 |
|
|
/* Make area between HEADER_EDGE and LATCH_EDGE a loop by connecting
|
700 |
|
|
latch to header and update loop tree and dominators
|
701 |
|
|
accordingly. Everything between them plus LATCH_EDGE destination must
|
702 |
|
|
be dominated by HEADER_EDGE destination, and back-reachable from
|
703 |
|
|
LATCH_EDGE source. HEADER_EDGE is redirected to basic block SWITCH_BB,
|
704 |
|
|
FALSE_EDGE of SWITCH_BB to original destination of HEADER_EDGE and
|
705 |
|
|
TRUE_EDGE of SWITCH_BB to original destination of LATCH_EDGE.
|
706 |
|
|
Returns the newly created loop. Frequencies and counts in the new loop
|
707 |
|
|
are scaled by FALSE_SCALE and in the old one by TRUE_SCALE. */
|
708 |
|
|
|
709 |
|
|
struct loop *
|
710 |
|
|
loopify (edge latch_edge, edge header_edge,
|
711 |
|
|
basic_block switch_bb, edge true_edge, edge false_edge,
|
712 |
|
|
bool redirect_all_edges, unsigned true_scale, unsigned false_scale)
|
713 |
|
|
{
|
714 |
|
|
basic_block succ_bb = latch_edge->dest;
|
715 |
|
|
basic_block pred_bb = header_edge->src;
|
716 |
|
|
struct loop *loop = alloc_loop ();
|
717 |
|
|
struct loop *outer = loop_outer (succ_bb->loop_father);
|
718 |
|
|
int freq;
|
719 |
|
|
gcov_type cnt;
|
720 |
|
|
edge e;
|
721 |
|
|
edge_iterator ei;
|
722 |
|
|
|
723 |
|
|
loop->header = header_edge->dest;
|
724 |
|
|
loop->latch = latch_edge->src;
|
725 |
|
|
|
726 |
|
|
freq = EDGE_FREQUENCY (header_edge);
|
727 |
|
|
cnt = header_edge->count;
|
728 |
|
|
|
729 |
|
|
/* Redirect edges. */
|
730 |
|
|
loop_redirect_edge (latch_edge, loop->header);
|
731 |
|
|
loop_redirect_edge (true_edge, succ_bb);
|
732 |
|
|
|
733 |
|
|
/* During loop versioning, one of the switch_bb edge is already properly
|
734 |
|
|
set. Do not redirect it again unless redirect_all_edges is true. */
|
735 |
|
|
if (redirect_all_edges)
|
736 |
|
|
{
|
737 |
|
|
loop_redirect_edge (header_edge, switch_bb);
|
738 |
|
|
loop_redirect_edge (false_edge, loop->header);
|
739 |
|
|
|
740 |
|
|
/* Update dominators. */
|
741 |
|
|
set_immediate_dominator (CDI_DOMINATORS, switch_bb, pred_bb);
|
742 |
|
|
set_immediate_dominator (CDI_DOMINATORS, loop->header, switch_bb);
|
743 |
|
|
}
|
744 |
|
|
|
745 |
|
|
set_immediate_dominator (CDI_DOMINATORS, succ_bb, switch_bb);
|
746 |
|
|
|
747 |
|
|
/* Compute new loop. */
|
748 |
|
|
add_loop (loop, outer);
|
749 |
|
|
|
750 |
|
|
/* Add switch_bb to appropriate loop. */
|
751 |
|
|
if (switch_bb->loop_father)
|
752 |
|
|
remove_bb_from_loops (switch_bb);
|
753 |
|
|
add_bb_to_loop (switch_bb, outer);
|
754 |
|
|
|
755 |
|
|
/* Fix frequencies. */
|
756 |
|
|
if (redirect_all_edges)
|
757 |
|
|
{
|
758 |
|
|
switch_bb->frequency = freq;
|
759 |
|
|
switch_bb->count = cnt;
|
760 |
|
|
FOR_EACH_EDGE (e, ei, switch_bb->succs)
|
761 |
|
|
{
|
762 |
|
|
e->count = (switch_bb->count * e->probability) / REG_BR_PROB_BASE;
|
763 |
|
|
}
|
764 |
|
|
}
|
765 |
|
|
scale_loop_frequencies (loop, false_scale, REG_BR_PROB_BASE);
|
766 |
|
|
scale_loop_frequencies (succ_bb->loop_father, true_scale, REG_BR_PROB_BASE);
|
767 |
|
|
update_dominators_in_loop (loop);
|
768 |
|
|
|
769 |
|
|
return loop;
|
770 |
|
|
}
|
771 |
|
|
|
772 |
|
|
/* Remove the latch edge of a LOOP and update loops to indicate that
|
773 |
|
|
the LOOP was removed. After this function, original loop latch will
|
774 |
|
|
have no successor, which caller is expected to fix somehow.
|
775 |
|
|
|
776 |
|
|
If this may cause the information about irreducible regions to become
|
777 |
|
|
invalid, IRRED_INVALIDATED is set to true. */
|
778 |
|
|
|
779 |
|
|
static void
|
780 |
|
|
unloop (struct loop *loop, bool *irred_invalidated)
|
781 |
|
|
{
|
782 |
|
|
basic_block *body;
|
783 |
|
|
struct loop *ploop;
|
784 |
|
|
unsigned i, n;
|
785 |
|
|
basic_block latch = loop->latch;
|
786 |
|
|
bool dummy = false;
|
787 |
|
|
|
788 |
|
|
if (loop_preheader_edge (loop)->flags & EDGE_IRREDUCIBLE_LOOP)
|
789 |
|
|
*irred_invalidated = true;
|
790 |
|
|
|
791 |
|
|
/* This is relatively straightforward. The dominators are unchanged, as
|
792 |
|
|
loop header dominates loop latch, so the only thing we have to care of
|
793 |
|
|
is the placement of loops and basic blocks inside the loop tree. We
|
794 |
|
|
move them all to the loop->outer, and then let fix_bb_placements do
|
795 |
|
|
its work. */
|
796 |
|
|
|
797 |
|
|
body = get_loop_body (loop);
|
798 |
|
|
n = loop->num_nodes;
|
799 |
|
|
for (i = 0; i < n; i++)
|
800 |
|
|
if (body[i]->loop_father == loop)
|
801 |
|
|
{
|
802 |
|
|
remove_bb_from_loops (body[i]);
|
803 |
|
|
add_bb_to_loop (body[i], loop_outer (loop));
|
804 |
|
|
}
|
805 |
|
|
free(body);
|
806 |
|
|
|
807 |
|
|
while (loop->inner)
|
808 |
|
|
{
|
809 |
|
|
ploop = loop->inner;
|
810 |
|
|
flow_loop_tree_node_remove (ploop);
|
811 |
|
|
flow_loop_tree_node_add (loop_outer (loop), ploop);
|
812 |
|
|
}
|
813 |
|
|
|
814 |
|
|
/* Remove the loop and free its data. */
|
815 |
|
|
delete_loop (loop);
|
816 |
|
|
|
817 |
|
|
remove_edge (single_succ_edge (latch));
|
818 |
|
|
|
819 |
|
|
/* We do not pass IRRED_INVALIDATED to fix_bb_placements here, as even if
|
820 |
|
|
there is an irreducible region inside the cancelled loop, the flags will
|
821 |
|
|
be still correct. */
|
822 |
|
|
fix_bb_placements (latch, &dummy);
|
823 |
|
|
}
|
824 |
|
|
|
825 |
|
|
/* Fix placement of superloops of LOOP inside loop tree, i.e. ensure that
|
826 |
|
|
condition stated in description of fix_loop_placement holds for them.
|
827 |
|
|
It is used in case when we removed some edges coming out of LOOP, which
|
828 |
|
|
may cause the right placement of LOOP inside loop tree to change.
|
829 |
|
|
|
830 |
|
|
IRRED_INVALIDATED is set to true if a change in the loop structures might
|
831 |
|
|
invalidate the information about irreducible regions. */
|
832 |
|
|
|
833 |
|
|
static void
|
834 |
|
|
fix_loop_placements (struct loop *loop, bool *irred_invalidated)
|
835 |
|
|
{
|
836 |
|
|
struct loop *outer;
|
837 |
|
|
|
838 |
|
|
while (loop_outer (loop))
|
839 |
|
|
{
|
840 |
|
|
outer = loop_outer (loop);
|
841 |
|
|
if (!fix_loop_placement (loop))
|
842 |
|
|
break;
|
843 |
|
|
|
844 |
|
|
/* Changing the placement of a loop in the loop tree may alter the
|
845 |
|
|
validity of condition 2) of the description of fix_bb_placement
|
846 |
|
|
for its preheader, because the successor is the header and belongs
|
847 |
|
|
to the loop. So call fix_bb_placements to fix up the placement
|
848 |
|
|
of the preheader and (possibly) of its predecessors. */
|
849 |
|
|
fix_bb_placements (loop_preheader_edge (loop)->src,
|
850 |
|
|
irred_invalidated);
|
851 |
|
|
loop = outer;
|
852 |
|
|
}
|
853 |
|
|
}
|
854 |
|
|
|
855 |
|
|
/* Copies copy of LOOP as subloop of TARGET loop, placing newly
|
856 |
|
|
created loop into loops structure. */
|
857 |
|
|
struct loop *
|
858 |
|
|
duplicate_loop (struct loop *loop, struct loop *target)
|
859 |
|
|
{
|
860 |
|
|
struct loop *cloop;
|
861 |
|
|
cloop = alloc_loop ();
|
862 |
|
|
place_new_loop (cloop);
|
863 |
|
|
|
864 |
|
|
/* Mark the new loop as copy of LOOP. */
|
865 |
|
|
set_loop_copy (loop, cloop);
|
866 |
|
|
|
867 |
|
|
/* Add it to target. */
|
868 |
|
|
flow_loop_tree_node_add (target, cloop);
|
869 |
|
|
|
870 |
|
|
return cloop;
|
871 |
|
|
}
|
872 |
|
|
|
873 |
|
|
/* Copies structure of subloops of LOOP into TARGET loop, placing
|
874 |
|
|
newly created loops into loop tree. */
|
875 |
|
|
void
|
876 |
|
|
duplicate_subloops (struct loop *loop, struct loop *target)
|
877 |
|
|
{
|
878 |
|
|
struct loop *aloop, *cloop;
|
879 |
|
|
|
880 |
|
|
for (aloop = loop->inner; aloop; aloop = aloop->next)
|
881 |
|
|
{
|
882 |
|
|
cloop = duplicate_loop (aloop, target);
|
883 |
|
|
duplicate_subloops (aloop, cloop);
|
884 |
|
|
}
|
885 |
|
|
}
|
886 |
|
|
|
887 |
|
|
/* Copies structure of subloops of N loops, stored in array COPIED_LOOPS,
|
888 |
|
|
into TARGET loop, placing newly created loops into loop tree. */
|
889 |
|
|
static void
|
890 |
|
|
copy_loops_to (struct loop **copied_loops, int n, struct loop *target)
|
891 |
|
|
{
|
892 |
|
|
struct loop *aloop;
|
893 |
|
|
int i;
|
894 |
|
|
|
895 |
|
|
for (i = 0; i < n; i++)
|
896 |
|
|
{
|
897 |
|
|
aloop = duplicate_loop (copied_loops[i], target);
|
898 |
|
|
duplicate_subloops (copied_loops[i], aloop);
|
899 |
|
|
}
|
900 |
|
|
}
|
901 |
|
|
|
902 |
|
|
/* Redirects edge E to basic block DEST. */
|
903 |
|
|
static void
|
904 |
|
|
loop_redirect_edge (edge e, basic_block dest)
|
905 |
|
|
{
|
906 |
|
|
if (e->dest == dest)
|
907 |
|
|
return;
|
908 |
|
|
|
909 |
|
|
redirect_edge_and_branch_force (e, dest);
|
910 |
|
|
}
|
911 |
|
|
|
912 |
|
|
/* Check whether LOOP's body can be duplicated. */
|
913 |
|
|
bool
|
914 |
|
|
can_duplicate_loop_p (const struct loop *loop)
|
915 |
|
|
{
|
916 |
|
|
int ret;
|
917 |
|
|
basic_block *bbs = get_loop_body (loop);
|
918 |
|
|
|
919 |
|
|
ret = can_copy_bbs_p (bbs, loop->num_nodes);
|
920 |
|
|
free (bbs);
|
921 |
|
|
|
922 |
|
|
return ret;
|
923 |
|
|
}
|
924 |
|
|
|
925 |
|
|
/* Sets probability and count of edge E to zero. The probability and count
|
926 |
|
|
is redistributed evenly to the remaining edges coming from E->src. */
|
927 |
|
|
|
928 |
|
|
static void
|
929 |
|
|
set_zero_probability (edge e)
|
930 |
|
|
{
|
931 |
|
|
basic_block bb = e->src;
|
932 |
|
|
edge_iterator ei;
|
933 |
|
|
edge ae, last = NULL;
|
934 |
|
|
unsigned n = EDGE_COUNT (bb->succs);
|
935 |
|
|
gcov_type cnt = e->count, cnt1;
|
936 |
|
|
unsigned prob = e->probability, prob1;
|
937 |
|
|
|
938 |
|
|
gcc_assert (n > 1);
|
939 |
|
|
cnt1 = cnt / (n - 1);
|
940 |
|
|
prob1 = prob / (n - 1);
|
941 |
|
|
|
942 |
|
|
FOR_EACH_EDGE (ae, ei, bb->succs)
|
943 |
|
|
{
|
944 |
|
|
if (ae == e)
|
945 |
|
|
continue;
|
946 |
|
|
|
947 |
|
|
ae->probability += prob1;
|
948 |
|
|
ae->count += cnt1;
|
949 |
|
|
last = ae;
|
950 |
|
|
}
|
951 |
|
|
|
952 |
|
|
/* Move the rest to one of the edges. */
|
953 |
|
|
last->probability += prob % (n - 1);
|
954 |
|
|
last->count += cnt % (n - 1);
|
955 |
|
|
|
956 |
|
|
e->probability = 0;
|
957 |
|
|
e->count = 0;
|
958 |
|
|
}
|
959 |
|
|
|
960 |
|
|
/* Duplicates body of LOOP to given edge E NDUPL times. Takes care of updating
|
961 |
|
|
loop structure and dominators. E's destination must be LOOP header for
|
962 |
|
|
this to work, i.e. it must be entry or latch edge of this loop; these are
|
963 |
|
|
unique, as the loops must have preheaders for this function to work
|
964 |
|
|
correctly (in case E is latch, the function unrolls the loop, if E is entry
|
965 |
|
|
edge, it peels the loop). Store edges created by copying ORIG edge from
|
966 |
|
|
copies corresponding to set bits in WONT_EXIT bitmap (bit 0 corresponds to
|
967 |
|
|
original LOOP body, the other copies are numbered in order given by control
|
968 |
|
|
flow through them) into TO_REMOVE array. Returns false if duplication is
|
969 |
|
|
impossible. */
|
970 |
|
|
|
971 |
|
|
bool
|
972 |
|
|
duplicate_loop_to_header_edge (struct loop *loop, edge e,
|
973 |
|
|
unsigned int ndupl, sbitmap wont_exit,
|
974 |
|
|
edge orig, VEC (edge, heap) **to_remove,
|
975 |
|
|
int flags)
|
976 |
|
|
{
|
977 |
|
|
struct loop *target, *aloop;
|
978 |
|
|
struct loop **orig_loops;
|
979 |
|
|
unsigned n_orig_loops;
|
980 |
|
|
basic_block header = loop->header, latch = loop->latch;
|
981 |
|
|
basic_block *new_bbs, *bbs, *first_active;
|
982 |
|
|
basic_block new_bb, bb, first_active_latch = NULL;
|
983 |
|
|
edge ae, latch_edge;
|
984 |
|
|
edge spec_edges[2], new_spec_edges[2];
|
985 |
|
|
#define SE_LATCH 0
|
986 |
|
|
#define SE_ORIG 1
|
987 |
|
|
unsigned i, j, n;
|
988 |
|
|
int is_latch = (latch == e->src);
|
989 |
|
|
int scale_act = 0, *scale_step = NULL, scale_main = 0;
|
990 |
|
|
int scale_after_exit = 0;
|
991 |
|
|
int p, freq_in, freq_le, freq_out_orig;
|
992 |
|
|
int prob_pass_thru, prob_pass_wont_exit, prob_pass_main;
|
993 |
|
|
int add_irreducible_flag;
|
994 |
|
|
basic_block place_after;
|
995 |
|
|
bitmap bbs_to_scale = NULL;
|
996 |
|
|
bitmap_iterator bi;
|
997 |
|
|
|
998 |
|
|
gcc_assert (e->dest == loop->header);
|
999 |
|
|
gcc_assert (ndupl > 0);
|
1000 |
|
|
|
1001 |
|
|
if (orig)
|
1002 |
|
|
{
|
1003 |
|
|
/* Orig must be edge out of the loop. */
|
1004 |
|
|
gcc_assert (flow_bb_inside_loop_p (loop, orig->src));
|
1005 |
|
|
gcc_assert (!flow_bb_inside_loop_p (loop, orig->dest));
|
1006 |
|
|
}
|
1007 |
|
|
|
1008 |
|
|
n = loop->num_nodes;
|
1009 |
|
|
bbs = get_loop_body_in_dom_order (loop);
|
1010 |
|
|
gcc_assert (bbs[0] == loop->header);
|
1011 |
|
|
gcc_assert (bbs[n - 1] == loop->latch);
|
1012 |
|
|
|
1013 |
|
|
/* Check whether duplication is possible. */
|
1014 |
|
|
if (!can_copy_bbs_p (bbs, loop->num_nodes))
|
1015 |
|
|
{
|
1016 |
|
|
free (bbs);
|
1017 |
|
|
return false;
|
1018 |
|
|
}
|
1019 |
|
|
new_bbs = XNEWVEC (basic_block, loop->num_nodes);
|
1020 |
|
|
|
1021 |
|
|
/* In case we are doing loop peeling and the loop is in the middle of
|
1022 |
|
|
irreducible region, the peeled copies will be inside it too. */
|
1023 |
|
|
add_irreducible_flag = e->flags & EDGE_IRREDUCIBLE_LOOP;
|
1024 |
|
|
gcc_assert (!is_latch || !add_irreducible_flag);
|
1025 |
|
|
|
1026 |
|
|
/* Find edge from latch. */
|
1027 |
|
|
latch_edge = loop_latch_edge (loop);
|
1028 |
|
|
|
1029 |
|
|
if (flags & DLTHE_FLAG_UPDATE_FREQ)
|
1030 |
|
|
{
|
1031 |
|
|
/* Calculate coefficients by that we have to scale frequencies
|
1032 |
|
|
of duplicated loop bodies. */
|
1033 |
|
|
freq_in = header->frequency;
|
1034 |
|
|
freq_le = EDGE_FREQUENCY (latch_edge);
|
1035 |
|
|
if (freq_in == 0)
|
1036 |
|
|
freq_in = 1;
|
1037 |
|
|
if (freq_in < freq_le)
|
1038 |
|
|
freq_in = freq_le;
|
1039 |
|
|
freq_out_orig = orig ? EDGE_FREQUENCY (orig) : freq_in - freq_le;
|
1040 |
|
|
if (freq_out_orig > freq_in - freq_le)
|
1041 |
|
|
freq_out_orig = freq_in - freq_le;
|
1042 |
|
|
prob_pass_thru = RDIV (REG_BR_PROB_BASE * freq_le, freq_in);
|
1043 |
|
|
prob_pass_wont_exit =
|
1044 |
|
|
RDIV (REG_BR_PROB_BASE * (freq_le + freq_out_orig), freq_in);
|
1045 |
|
|
|
1046 |
|
|
if (orig
|
1047 |
|
|
&& REG_BR_PROB_BASE - orig->probability != 0)
|
1048 |
|
|
{
|
1049 |
|
|
/* The blocks that are dominated by a removed exit edge ORIG have
|
1050 |
|
|
frequencies scaled by this. */
|
1051 |
|
|
scale_after_exit = RDIV (REG_BR_PROB_BASE * REG_BR_PROB_BASE,
|
1052 |
|
|
REG_BR_PROB_BASE - orig->probability);
|
1053 |
|
|
bbs_to_scale = BITMAP_ALLOC (NULL);
|
1054 |
|
|
for (i = 0; i < n; i++)
|
1055 |
|
|
{
|
1056 |
|
|
if (bbs[i] != orig->src
|
1057 |
|
|
&& dominated_by_p (CDI_DOMINATORS, bbs[i], orig->src))
|
1058 |
|
|
bitmap_set_bit (bbs_to_scale, i);
|
1059 |
|
|
}
|
1060 |
|
|
}
|
1061 |
|
|
|
1062 |
|
|
scale_step = XNEWVEC (int, ndupl);
|
1063 |
|
|
|
1064 |
|
|
for (i = 1; i <= ndupl; i++)
|
1065 |
|
|
scale_step[i - 1] = TEST_BIT (wont_exit, i)
|
1066 |
|
|
? prob_pass_wont_exit
|
1067 |
|
|
: prob_pass_thru;
|
1068 |
|
|
|
1069 |
|
|
/* Complete peeling is special as the probability of exit in last
|
1070 |
|
|
copy becomes 1. */
|
1071 |
|
|
if (flags & DLTHE_FLAG_COMPLETTE_PEEL)
|
1072 |
|
|
{
|
1073 |
|
|
int wanted_freq = EDGE_FREQUENCY (e);
|
1074 |
|
|
|
1075 |
|
|
if (wanted_freq > freq_in)
|
1076 |
|
|
wanted_freq = freq_in;
|
1077 |
|
|
|
1078 |
|
|
gcc_assert (!is_latch);
|
1079 |
|
|
/* First copy has frequency of incoming edge. Each subsequent
|
1080 |
|
|
frequency should be reduced by prob_pass_wont_exit. Caller
|
1081 |
|
|
should've managed the flags so all except for original loop
|
1082 |
|
|
has won't exist set. */
|
1083 |
|
|
scale_act = RDIV (wanted_freq * REG_BR_PROB_BASE, freq_in);
|
1084 |
|
|
/* Now simulate the duplication adjustments and compute header
|
1085 |
|
|
frequency of the last copy. */
|
1086 |
|
|
for (i = 0; i < ndupl; i++)
|
1087 |
|
|
wanted_freq = RDIV (wanted_freq * scale_step[i], REG_BR_PROB_BASE);
|
1088 |
|
|
scale_main = RDIV (wanted_freq * REG_BR_PROB_BASE, freq_in);
|
1089 |
|
|
}
|
1090 |
|
|
else if (is_latch)
|
1091 |
|
|
{
|
1092 |
|
|
prob_pass_main = TEST_BIT (wont_exit, 0)
|
1093 |
|
|
? prob_pass_wont_exit
|
1094 |
|
|
: prob_pass_thru;
|
1095 |
|
|
p = prob_pass_main;
|
1096 |
|
|
scale_main = REG_BR_PROB_BASE;
|
1097 |
|
|
for (i = 0; i < ndupl; i++)
|
1098 |
|
|
{
|
1099 |
|
|
scale_main += p;
|
1100 |
|
|
p = RDIV (p * scale_step[i], REG_BR_PROB_BASE);
|
1101 |
|
|
}
|
1102 |
|
|
scale_main = RDIV (REG_BR_PROB_BASE * REG_BR_PROB_BASE, scale_main);
|
1103 |
|
|
scale_act = RDIV (scale_main * prob_pass_main, REG_BR_PROB_BASE);
|
1104 |
|
|
}
|
1105 |
|
|
else
|
1106 |
|
|
{
|
1107 |
|
|
scale_main = REG_BR_PROB_BASE;
|
1108 |
|
|
for (i = 0; i < ndupl; i++)
|
1109 |
|
|
scale_main = RDIV (scale_main * scale_step[i], REG_BR_PROB_BASE);
|
1110 |
|
|
scale_act = REG_BR_PROB_BASE - prob_pass_thru;
|
1111 |
|
|
}
|
1112 |
|
|
for (i = 0; i < ndupl; i++)
|
1113 |
|
|
gcc_assert (scale_step[i] >= 0 && scale_step[i] <= REG_BR_PROB_BASE);
|
1114 |
|
|
gcc_assert (scale_main >= 0 && scale_main <= REG_BR_PROB_BASE
|
1115 |
|
|
&& scale_act >= 0 && scale_act <= REG_BR_PROB_BASE);
|
1116 |
|
|
}
|
1117 |
|
|
|
1118 |
|
|
/* Loop the new bbs will belong to. */
|
1119 |
|
|
target = e->src->loop_father;
|
1120 |
|
|
|
1121 |
|
|
/* Original loops. */
|
1122 |
|
|
n_orig_loops = 0;
|
1123 |
|
|
for (aloop = loop->inner; aloop; aloop = aloop->next)
|
1124 |
|
|
n_orig_loops++;
|
1125 |
|
|
orig_loops = XCNEWVEC (struct loop *, n_orig_loops);
|
1126 |
|
|
for (aloop = loop->inner, i = 0; aloop; aloop = aloop->next, i++)
|
1127 |
|
|
orig_loops[i] = aloop;
|
1128 |
|
|
|
1129 |
|
|
set_loop_copy (loop, target);
|
1130 |
|
|
|
1131 |
|
|
first_active = XNEWVEC (basic_block, n);
|
1132 |
|
|
if (is_latch)
|
1133 |
|
|
{
|
1134 |
|
|
memcpy (first_active, bbs, n * sizeof (basic_block));
|
1135 |
|
|
first_active_latch = latch;
|
1136 |
|
|
}
|
1137 |
|
|
|
1138 |
|
|
spec_edges[SE_ORIG] = orig;
|
1139 |
|
|
spec_edges[SE_LATCH] = latch_edge;
|
1140 |
|
|
|
1141 |
|
|
place_after = e->src;
|
1142 |
|
|
for (j = 0; j < ndupl; j++)
|
1143 |
|
|
{
|
1144 |
|
|
/* Copy loops. */
|
1145 |
|
|
copy_loops_to (orig_loops, n_orig_loops, target);
|
1146 |
|
|
|
1147 |
|
|
/* Copy bbs. */
|
1148 |
|
|
copy_bbs (bbs, n, new_bbs, spec_edges, 2, new_spec_edges, loop,
|
1149 |
|
|
place_after);
|
1150 |
|
|
place_after = new_spec_edges[SE_LATCH]->src;
|
1151 |
|
|
|
1152 |
|
|
if (flags & DLTHE_RECORD_COPY_NUMBER)
|
1153 |
|
|
for (i = 0; i < n; i++)
|
1154 |
|
|
{
|
1155 |
|
|
gcc_assert (!new_bbs[i]->aux);
|
1156 |
|
|
new_bbs[i]->aux = (void *)(size_t)(j + 1);
|
1157 |
|
|
}
|
1158 |
|
|
|
1159 |
|
|
/* Note whether the blocks and edges belong to an irreducible loop. */
|
1160 |
|
|
if (add_irreducible_flag)
|
1161 |
|
|
{
|
1162 |
|
|
for (i = 0; i < n; i++)
|
1163 |
|
|
new_bbs[i]->flags |= BB_DUPLICATED;
|
1164 |
|
|
for (i = 0; i < n; i++)
|
1165 |
|
|
{
|
1166 |
|
|
edge_iterator ei;
|
1167 |
|
|
new_bb = new_bbs[i];
|
1168 |
|
|
if (new_bb->loop_father == target)
|
1169 |
|
|
new_bb->flags |= BB_IRREDUCIBLE_LOOP;
|
1170 |
|
|
|
1171 |
|
|
FOR_EACH_EDGE (ae, ei, new_bb->succs)
|
1172 |
|
|
if ((ae->dest->flags & BB_DUPLICATED)
|
1173 |
|
|
&& (ae->src->loop_father == target
|
1174 |
|
|
|| ae->dest->loop_father == target))
|
1175 |
|
|
ae->flags |= EDGE_IRREDUCIBLE_LOOP;
|
1176 |
|
|
}
|
1177 |
|
|
for (i = 0; i < n; i++)
|
1178 |
|
|
new_bbs[i]->flags &= ~BB_DUPLICATED;
|
1179 |
|
|
}
|
1180 |
|
|
|
1181 |
|
|
/* Redirect the special edges. */
|
1182 |
|
|
if (is_latch)
|
1183 |
|
|
{
|
1184 |
|
|
redirect_edge_and_branch_force (latch_edge, new_bbs[0]);
|
1185 |
|
|
redirect_edge_and_branch_force (new_spec_edges[SE_LATCH],
|
1186 |
|
|
loop->header);
|
1187 |
|
|
set_immediate_dominator (CDI_DOMINATORS, new_bbs[0], latch);
|
1188 |
|
|
latch = loop->latch = new_bbs[n - 1];
|
1189 |
|
|
e = latch_edge = new_spec_edges[SE_LATCH];
|
1190 |
|
|
}
|
1191 |
|
|
else
|
1192 |
|
|
{
|
1193 |
|
|
redirect_edge_and_branch_force (new_spec_edges[SE_LATCH],
|
1194 |
|
|
loop->header);
|
1195 |
|
|
redirect_edge_and_branch_force (e, new_bbs[0]);
|
1196 |
|
|
set_immediate_dominator (CDI_DOMINATORS, new_bbs[0], e->src);
|
1197 |
|
|
e = new_spec_edges[SE_LATCH];
|
1198 |
|
|
}
|
1199 |
|
|
|
1200 |
|
|
/* Record exit edge in this copy. */
|
1201 |
|
|
if (orig && TEST_BIT (wont_exit, j + 1))
|
1202 |
|
|
{
|
1203 |
|
|
if (to_remove)
|
1204 |
|
|
VEC_safe_push (edge, heap, *to_remove, new_spec_edges[SE_ORIG]);
|
1205 |
|
|
set_zero_probability (new_spec_edges[SE_ORIG]);
|
1206 |
|
|
|
1207 |
|
|
/* Scale the frequencies of the blocks dominated by the exit. */
|
1208 |
|
|
if (bbs_to_scale)
|
1209 |
|
|
{
|
1210 |
|
|
EXECUTE_IF_SET_IN_BITMAP (bbs_to_scale, 0, i, bi)
|
1211 |
|
|
{
|
1212 |
|
|
scale_bbs_frequencies_int (new_bbs + i, 1, scale_after_exit,
|
1213 |
|
|
REG_BR_PROB_BASE);
|
1214 |
|
|
}
|
1215 |
|
|
}
|
1216 |
|
|
}
|
1217 |
|
|
|
1218 |
|
|
/* Record the first copy in the control flow order if it is not
|
1219 |
|
|
the original loop (i.e. in case of peeling). */
|
1220 |
|
|
if (!first_active_latch)
|
1221 |
|
|
{
|
1222 |
|
|
memcpy (first_active, new_bbs, n * sizeof (basic_block));
|
1223 |
|
|
first_active_latch = new_bbs[n - 1];
|
1224 |
|
|
}
|
1225 |
|
|
|
1226 |
|
|
/* Set counts and frequencies. */
|
1227 |
|
|
if (flags & DLTHE_FLAG_UPDATE_FREQ)
|
1228 |
|
|
{
|
1229 |
|
|
scale_bbs_frequencies_int (new_bbs, n, scale_act, REG_BR_PROB_BASE);
|
1230 |
|
|
scale_act = RDIV (scale_act * scale_step[j], REG_BR_PROB_BASE);
|
1231 |
|
|
}
|
1232 |
|
|
}
|
1233 |
|
|
free (new_bbs);
|
1234 |
|
|
free (orig_loops);
|
1235 |
|
|
|
1236 |
|
|
/* Record the exit edge in the original loop body, and update the frequencies. */
|
1237 |
|
|
if (orig && TEST_BIT (wont_exit, 0))
|
1238 |
|
|
{
|
1239 |
|
|
if (to_remove)
|
1240 |
|
|
VEC_safe_push (edge, heap, *to_remove, orig);
|
1241 |
|
|
set_zero_probability (orig);
|
1242 |
|
|
|
1243 |
|
|
/* Scale the frequencies of the blocks dominated by the exit. */
|
1244 |
|
|
if (bbs_to_scale)
|
1245 |
|
|
{
|
1246 |
|
|
EXECUTE_IF_SET_IN_BITMAP (bbs_to_scale, 0, i, bi)
|
1247 |
|
|
{
|
1248 |
|
|
scale_bbs_frequencies_int (bbs + i, 1, scale_after_exit,
|
1249 |
|
|
REG_BR_PROB_BASE);
|
1250 |
|
|
}
|
1251 |
|
|
}
|
1252 |
|
|
}
|
1253 |
|
|
|
1254 |
|
|
/* Update the original loop. */
|
1255 |
|
|
if (!is_latch)
|
1256 |
|
|
set_immediate_dominator (CDI_DOMINATORS, e->dest, e->src);
|
1257 |
|
|
if (flags & DLTHE_FLAG_UPDATE_FREQ)
|
1258 |
|
|
{
|
1259 |
|
|
scale_bbs_frequencies_int (bbs, n, scale_main, REG_BR_PROB_BASE);
|
1260 |
|
|
free (scale_step);
|
1261 |
|
|
}
|
1262 |
|
|
|
1263 |
|
|
/* Update dominators of outer blocks if affected. */
|
1264 |
|
|
for (i = 0; i < n; i++)
|
1265 |
|
|
{
|
1266 |
|
|
basic_block dominated, dom_bb;
|
1267 |
|
|
VEC (basic_block, heap) *dom_bbs;
|
1268 |
|
|
unsigned j;
|
1269 |
|
|
|
1270 |
|
|
bb = bbs[i];
|
1271 |
|
|
bb->aux = 0;
|
1272 |
|
|
|
1273 |
|
|
dom_bbs = get_dominated_by (CDI_DOMINATORS, bb);
|
1274 |
|
|
for (j = 0; VEC_iterate (basic_block, dom_bbs, j, dominated); j++)
|
1275 |
|
|
{
|
1276 |
|
|
if (flow_bb_inside_loop_p (loop, dominated))
|
1277 |
|
|
continue;
|
1278 |
|
|
dom_bb = nearest_common_dominator (
|
1279 |
|
|
CDI_DOMINATORS, first_active[i], first_active_latch);
|
1280 |
|
|
set_immediate_dominator (CDI_DOMINATORS, dominated, dom_bb);
|
1281 |
|
|
}
|
1282 |
|
|
VEC_free (basic_block, heap, dom_bbs);
|
1283 |
|
|
}
|
1284 |
|
|
free (first_active);
|
1285 |
|
|
|
1286 |
|
|
free (bbs);
|
1287 |
|
|
BITMAP_FREE (bbs_to_scale);
|
1288 |
|
|
|
1289 |
|
|
return true;
|
1290 |
|
|
}
|
1291 |
|
|
|
1292 |
|
|
/* A callback for make_forwarder block, to redirect all edges except for
|
1293 |
|
|
MFB_KJ_EDGE to the entry part. E is the edge for that we should decide
|
1294 |
|
|
whether to redirect it. */
|
1295 |
|
|
|
1296 |
|
|
edge mfb_kj_edge;
|
1297 |
|
|
bool
|
1298 |
|
|
mfb_keep_just (edge e)
|
1299 |
|
|
{
|
1300 |
|
|
return e != mfb_kj_edge;
|
1301 |
|
|
}
|
1302 |
|
|
|
1303 |
|
|
/* True when a candidate preheader BLOCK has predecessors from LOOP. */
|
1304 |
|
|
|
1305 |
|
|
static bool
|
1306 |
|
|
has_preds_from_loop (basic_block block, struct loop *loop)
|
1307 |
|
|
{
|
1308 |
|
|
edge e;
|
1309 |
|
|
edge_iterator ei;
|
1310 |
|
|
|
1311 |
|
|
FOR_EACH_EDGE (e, ei, block->preds)
|
1312 |
|
|
if (e->src->loop_father == loop)
|
1313 |
|
|
return true;
|
1314 |
|
|
return false;
|
1315 |
|
|
}
|
1316 |
|
|
|
1317 |
|
|
/* Creates a pre-header for a LOOP. Returns newly created block. Unless
|
1318 |
|
|
CP_SIMPLE_PREHEADERS is set in FLAGS, we only force LOOP to have single
|
1319 |
|
|
entry; otherwise we also force preheader block to have only one successor.
|
1320 |
|
|
When CP_FALLTHRU_PREHEADERS is set in FLAGS, we force the preheader block
|
1321 |
|
|
to be a fallthru predecessor to the loop header and to have only
|
1322 |
|
|
predecessors from outside of the loop.
|
1323 |
|
|
The function also updates dominators. */
|
1324 |
|
|
|
1325 |
|
|
basic_block
|
1326 |
|
|
create_preheader (struct loop *loop, int flags)
|
1327 |
|
|
{
|
1328 |
|
|
edge e, fallthru;
|
1329 |
|
|
basic_block dummy;
|
1330 |
|
|
int nentry = 0;
|
1331 |
|
|
bool irred = false;
|
1332 |
|
|
bool latch_edge_was_fallthru;
|
1333 |
|
|
edge one_succ_pred = NULL, single_entry = NULL;
|
1334 |
|
|
edge_iterator ei;
|
1335 |
|
|
|
1336 |
|
|
FOR_EACH_EDGE (e, ei, loop->header->preds)
|
1337 |
|
|
{
|
1338 |
|
|
if (e->src == loop->latch)
|
1339 |
|
|
continue;
|
1340 |
|
|
irred |= (e->flags & EDGE_IRREDUCIBLE_LOOP) != 0;
|
1341 |
|
|
nentry++;
|
1342 |
|
|
single_entry = e;
|
1343 |
|
|
if (single_succ_p (e->src))
|
1344 |
|
|
one_succ_pred = e;
|
1345 |
|
|
}
|
1346 |
|
|
gcc_assert (nentry);
|
1347 |
|
|
if (nentry == 1)
|
1348 |
|
|
{
|
1349 |
|
|
bool need_forwarder_block = false;
|
1350 |
|
|
|
1351 |
|
|
/* We do not allow entry block to be the loop preheader, since we
|
1352 |
|
|
cannot emit code there. */
|
1353 |
|
|
if (single_entry->src == ENTRY_BLOCK_PTR)
|
1354 |
|
|
need_forwarder_block = true;
|
1355 |
|
|
else
|
1356 |
|
|
{
|
1357 |
|
|
/* If we want simple preheaders, also force the preheader to have
|
1358 |
|
|
just a single successor. */
|
1359 |
|
|
if ((flags & CP_SIMPLE_PREHEADERS)
|
1360 |
|
|
&& !single_succ_p (single_entry->src))
|
1361 |
|
|
need_forwarder_block = true;
|
1362 |
|
|
/* If we want fallthru preheaders, also create forwarder block when
|
1363 |
|
|
preheader ends with a jump or has predecessors from loop. */
|
1364 |
|
|
else if ((flags & CP_FALLTHRU_PREHEADERS)
|
1365 |
|
|
&& (JUMP_P (BB_END (single_entry->src))
|
1366 |
|
|
|| has_preds_from_loop (single_entry->src, loop)))
|
1367 |
|
|
need_forwarder_block = true;
|
1368 |
|
|
}
|
1369 |
|
|
if (! need_forwarder_block)
|
1370 |
|
|
return NULL;
|
1371 |
|
|
}
|
1372 |
|
|
|
1373 |
|
|
mfb_kj_edge = loop_latch_edge (loop);
|
1374 |
|
|
latch_edge_was_fallthru = (mfb_kj_edge->flags & EDGE_FALLTHRU) != 0;
|
1375 |
|
|
fallthru = make_forwarder_block (loop->header, mfb_keep_just, NULL);
|
1376 |
|
|
dummy = fallthru->src;
|
1377 |
|
|
loop->header = fallthru->dest;
|
1378 |
|
|
|
1379 |
|
|
/* Try to be clever in placing the newly created preheader. The idea is to
|
1380 |
|
|
avoid breaking any "fallthruness" relationship between blocks.
|
1381 |
|
|
|
1382 |
|
|
The preheader was created just before the header and all incoming edges
|
1383 |
|
|
to the header were redirected to the preheader, except the latch edge.
|
1384 |
|
|
So the only problematic case is when this latch edge was a fallthru
|
1385 |
|
|
edge: it is not anymore after the preheader creation so we have broken
|
1386 |
|
|
the fallthruness. We're therefore going to look for a better place. */
|
1387 |
|
|
if (latch_edge_was_fallthru)
|
1388 |
|
|
{
|
1389 |
|
|
if (one_succ_pred)
|
1390 |
|
|
e = one_succ_pred;
|
1391 |
|
|
else
|
1392 |
|
|
e = EDGE_PRED (dummy, 0);
|
1393 |
|
|
|
1394 |
|
|
move_block_after (dummy, e->src);
|
1395 |
|
|
}
|
1396 |
|
|
|
1397 |
|
|
if (irred)
|
1398 |
|
|
{
|
1399 |
|
|
dummy->flags |= BB_IRREDUCIBLE_LOOP;
|
1400 |
|
|
single_succ_edge (dummy)->flags |= EDGE_IRREDUCIBLE_LOOP;
|
1401 |
|
|
}
|
1402 |
|
|
|
1403 |
|
|
if (dump_file)
|
1404 |
|
|
fprintf (dump_file, "Created preheader block for loop %i\n",
|
1405 |
|
|
loop->num);
|
1406 |
|
|
|
1407 |
|
|
if (flags & CP_FALLTHRU_PREHEADERS)
|
1408 |
|
|
gcc_assert ((single_succ_edge (dummy)->flags & EDGE_FALLTHRU)
|
1409 |
|
|
&& !JUMP_P (BB_END (dummy)));
|
1410 |
|
|
|
1411 |
|
|
return dummy;
|
1412 |
|
|
}
|
1413 |
|
|
|
1414 |
|
|
/* Create preheaders for each loop; for meaning of FLAGS see create_preheader. */
|
1415 |
|
|
|
1416 |
|
|
void
|
1417 |
|
|
create_preheaders (int flags)
|
1418 |
|
|
{
|
1419 |
|
|
loop_iterator li;
|
1420 |
|
|
struct loop *loop;
|
1421 |
|
|
|
1422 |
|
|
if (!current_loops)
|
1423 |
|
|
return;
|
1424 |
|
|
|
1425 |
|
|
FOR_EACH_LOOP (li, loop, 0)
|
1426 |
|
|
create_preheader (loop, flags);
|
1427 |
|
|
loops_state_set (LOOPS_HAVE_PREHEADERS);
|
1428 |
|
|
}
|
1429 |
|
|
|
1430 |
|
|
/* Forces all loop latches to have only single successor. */
|
1431 |
|
|
|
1432 |
|
|
void
|
1433 |
|
|
force_single_succ_latches (void)
|
1434 |
|
|
{
|
1435 |
|
|
loop_iterator li;
|
1436 |
|
|
struct loop *loop;
|
1437 |
|
|
edge e;
|
1438 |
|
|
|
1439 |
|
|
FOR_EACH_LOOP (li, loop, 0)
|
1440 |
|
|
{
|
1441 |
|
|
if (loop->latch != loop->header && single_succ_p (loop->latch))
|
1442 |
|
|
continue;
|
1443 |
|
|
|
1444 |
|
|
e = find_edge (loop->latch, loop->header);
|
1445 |
|
|
|
1446 |
|
|
split_edge (e);
|
1447 |
|
|
}
|
1448 |
|
|
loops_state_set (LOOPS_HAVE_SIMPLE_LATCHES);
|
1449 |
|
|
}
|
1450 |
|
|
|
1451 |
|
|
/* This function is called from loop_version. It splits the entry edge
|
1452 |
|
|
of the loop we want to version, adds the versioning condition, and
|
1453 |
|
|
adjust the edges to the two versions of the loop appropriately.
|
1454 |
|
|
e is an incoming edge. Returns the basic block containing the
|
1455 |
|
|
condition.
|
1456 |
|
|
|
1457 |
|
|
--- edge e ---- > [second_head]
|
1458 |
|
|
|
1459 |
|
|
Split it and insert new conditional expression and adjust edges.
|
1460 |
|
|
|
1461 |
|
|
--- edge e ---> [cond expr] ---> [first_head]
|
1462 |
|
|
|
|
1463 |
|
|
+---------> [second_head]
|
1464 |
|
|
|
1465 |
|
|
THEN_PROB is the probability of then branch of the condition. */
|
1466 |
|
|
|
1467 |
|
|
static basic_block
|
1468 |
|
|
lv_adjust_loop_entry_edge (basic_block first_head, basic_block second_head,
|
1469 |
|
|
edge e, void *cond_expr, unsigned then_prob)
|
1470 |
|
|
{
|
1471 |
|
|
basic_block new_head = NULL;
|
1472 |
|
|
edge e1;
|
1473 |
|
|
|
1474 |
|
|
gcc_assert (e->dest == second_head);
|
1475 |
|
|
|
1476 |
|
|
/* Split edge 'e'. This will create a new basic block, where we can
|
1477 |
|
|
insert conditional expr. */
|
1478 |
|
|
new_head = split_edge (e);
|
1479 |
|
|
|
1480 |
|
|
lv_add_condition_to_bb (first_head, second_head, new_head,
|
1481 |
|
|
cond_expr);
|
1482 |
|
|
|
1483 |
|
|
/* Don't set EDGE_TRUE_VALUE in RTL mode, as it's invalid there. */
|
1484 |
|
|
e = single_succ_edge (new_head);
|
1485 |
|
|
e1 = make_edge (new_head, first_head,
|
1486 |
|
|
current_ir_type () == IR_GIMPLE ? EDGE_TRUE_VALUE : 0);
|
1487 |
|
|
e1->probability = then_prob;
|
1488 |
|
|
e->probability = REG_BR_PROB_BASE - then_prob;
|
1489 |
|
|
e1->count = RDIV (e->count * e1->probability, REG_BR_PROB_BASE);
|
1490 |
|
|
e->count = RDIV (e->count * e->probability, REG_BR_PROB_BASE);
|
1491 |
|
|
|
1492 |
|
|
set_immediate_dominator (CDI_DOMINATORS, first_head, new_head);
|
1493 |
|
|
set_immediate_dominator (CDI_DOMINATORS, second_head, new_head);
|
1494 |
|
|
|
1495 |
|
|
/* Adjust loop header phi nodes. */
|
1496 |
|
|
lv_adjust_loop_header_phi (first_head, second_head, new_head, e1);
|
1497 |
|
|
|
1498 |
|
|
return new_head;
|
1499 |
|
|
}
|
1500 |
|
|
|
1501 |
|
|
/* Main entry point for Loop Versioning transformation.
|
1502 |
|
|
|
1503 |
|
|
This transformation given a condition and a loop, creates
|
1504 |
|
|
-if (condition) { loop_copy1 } else { loop_copy2 },
|
1505 |
|
|
where loop_copy1 is the loop transformed in one way, and loop_copy2
|
1506 |
|
|
is the loop transformed in another way (or unchanged). 'condition'
|
1507 |
|
|
may be a run time test for things that were not resolved by static
|
1508 |
|
|
analysis (overlapping ranges (anti-aliasing), alignment, etc.).
|
1509 |
|
|
|
1510 |
|
|
THEN_PROB is the probability of the then edge of the if. THEN_SCALE
|
1511 |
|
|
is the ratio by that the frequencies in the original loop should
|
1512 |
|
|
be scaled. ELSE_SCALE is the ratio by that the frequencies in the
|
1513 |
|
|
new loop should be scaled.
|
1514 |
|
|
|
1515 |
|
|
If PLACE_AFTER is true, we place the new loop after LOOP in the
|
1516 |
|
|
instruction stream, otherwise it is placed before LOOP. */
|
1517 |
|
|
|
1518 |
|
|
struct loop *
|
1519 |
|
|
loop_version (struct loop *loop,
|
1520 |
|
|
void *cond_expr, basic_block *condition_bb,
|
1521 |
|
|
unsigned then_prob, unsigned then_scale, unsigned else_scale,
|
1522 |
|
|
bool place_after)
|
1523 |
|
|
{
|
1524 |
|
|
basic_block first_head, second_head;
|
1525 |
|
|
edge entry, latch_edge, true_edge, false_edge;
|
1526 |
|
|
int irred_flag;
|
1527 |
|
|
struct loop *nloop;
|
1528 |
|
|
basic_block cond_bb;
|
1529 |
|
|
|
1530 |
|
|
/* Record entry and latch edges for the loop */
|
1531 |
|
|
entry = loop_preheader_edge (loop);
|
1532 |
|
|
irred_flag = entry->flags & EDGE_IRREDUCIBLE_LOOP;
|
1533 |
|
|
entry->flags &= ~EDGE_IRREDUCIBLE_LOOP;
|
1534 |
|
|
|
1535 |
|
|
/* Note down head of loop as first_head. */
|
1536 |
|
|
first_head = entry->dest;
|
1537 |
|
|
|
1538 |
|
|
/* Duplicate loop. */
|
1539 |
|
|
if (!cfg_hook_duplicate_loop_to_header_edge (loop, entry, 1,
|
1540 |
|
|
NULL, NULL, NULL, 0))
|
1541 |
|
|
return NULL;
|
1542 |
|
|
|
1543 |
|
|
/* After duplication entry edge now points to new loop head block.
|
1544 |
|
|
Note down new head as second_head. */
|
1545 |
|
|
second_head = entry->dest;
|
1546 |
|
|
|
1547 |
|
|
/* Split loop entry edge and insert new block with cond expr. */
|
1548 |
|
|
cond_bb = lv_adjust_loop_entry_edge (first_head, second_head,
|
1549 |
|
|
entry, cond_expr, then_prob);
|
1550 |
|
|
if (condition_bb)
|
1551 |
|
|
*condition_bb = cond_bb;
|
1552 |
|
|
|
1553 |
|
|
if (!cond_bb)
|
1554 |
|
|
{
|
1555 |
|
|
entry->flags |= irred_flag;
|
1556 |
|
|
return NULL;
|
1557 |
|
|
}
|
1558 |
|
|
|
1559 |
|
|
latch_edge = single_succ_edge (get_bb_copy (loop->latch));
|
1560 |
|
|
|
1561 |
|
|
extract_cond_bb_edges (cond_bb, &true_edge, &false_edge);
|
1562 |
|
|
nloop = loopify (latch_edge,
|
1563 |
|
|
single_pred_edge (get_bb_copy (loop->header)),
|
1564 |
|
|
cond_bb, true_edge, false_edge,
|
1565 |
|
|
false /* Do not redirect all edges. */,
|
1566 |
|
|
then_scale, else_scale);
|
1567 |
|
|
|
1568 |
|
|
/* loopify redirected latch_edge. Update its PENDING_STMTS. */
|
1569 |
|
|
lv_flush_pending_stmts (latch_edge);
|
1570 |
|
|
|
1571 |
|
|
/* loopify redirected condition_bb's succ edge. Update its PENDING_STMTS. */
|
1572 |
|
|
extract_cond_bb_edges (cond_bb, &true_edge, &false_edge);
|
1573 |
|
|
lv_flush_pending_stmts (false_edge);
|
1574 |
|
|
/* Adjust irreducible flag. */
|
1575 |
|
|
if (irred_flag)
|
1576 |
|
|
{
|
1577 |
|
|
cond_bb->flags |= BB_IRREDUCIBLE_LOOP;
|
1578 |
|
|
loop_preheader_edge (loop)->flags |= EDGE_IRREDUCIBLE_LOOP;
|
1579 |
|
|
loop_preheader_edge (nloop)->flags |= EDGE_IRREDUCIBLE_LOOP;
|
1580 |
|
|
single_pred_edge (cond_bb)->flags |= EDGE_IRREDUCIBLE_LOOP;
|
1581 |
|
|
}
|
1582 |
|
|
|
1583 |
|
|
if (place_after)
|
1584 |
|
|
{
|
1585 |
|
|
basic_block *bbs = get_loop_body_in_dom_order (nloop), after;
|
1586 |
|
|
unsigned i;
|
1587 |
|
|
|
1588 |
|
|
after = loop->latch;
|
1589 |
|
|
|
1590 |
|
|
for (i = 0; i < nloop->num_nodes; i++)
|
1591 |
|
|
{
|
1592 |
|
|
move_block_after (bbs[i], after);
|
1593 |
|
|
after = bbs[i];
|
1594 |
|
|
}
|
1595 |
|
|
free (bbs);
|
1596 |
|
|
}
|
1597 |
|
|
|
1598 |
|
|
/* At this point condition_bb is loop preheader with two successors,
|
1599 |
|
|
first_head and second_head. Make sure that loop preheader has only
|
1600 |
|
|
one successor. */
|
1601 |
|
|
split_edge (loop_preheader_edge (loop));
|
1602 |
|
|
split_edge (loop_preheader_edge (nloop));
|
1603 |
|
|
|
1604 |
|
|
return nloop;
|
1605 |
|
|
}
|
1606 |
|
|
|
1607 |
|
|
/* The structure of loops might have changed. Some loops might get removed
|
1608 |
|
|
(and their headers and latches were set to NULL), loop exists might get
|
1609 |
|
|
removed (thus the loop nesting may be wrong), and some blocks and edges
|
1610 |
|
|
were changed (so the information about bb --> loop mapping does not have
|
1611 |
|
|
to be correct). But still for the remaining loops the header dominates
|
1612 |
|
|
the latch, and loops did not get new subloops (new loops might possibly
|
1613 |
|
|
get created, but we are not interested in them). Fix up the mess.
|
1614 |
|
|
|
1615 |
|
|
If CHANGED_BBS is not NULL, basic blocks whose loop has changed are
|
1616 |
|
|
marked in it. */
|
1617 |
|
|
|
1618 |
|
|
void
|
1619 |
|
|
fix_loop_structure (bitmap changed_bbs)
|
1620 |
|
|
{
|
1621 |
|
|
basic_block bb;
|
1622 |
|
|
struct loop *loop, *ploop;
|
1623 |
|
|
loop_iterator li;
|
1624 |
|
|
bool record_exits = false;
|
1625 |
|
|
struct loop **superloop = XNEWVEC (struct loop *, number_of_loops ());
|
1626 |
|
|
|
1627 |
|
|
/* Remove the old bb -> loop mapping. Remember the depth of the blocks in
|
1628 |
|
|
the loop hierarchy, so that we can recognize blocks whose loop nesting
|
1629 |
|
|
relationship has changed. */
|
1630 |
|
|
FOR_EACH_BB (bb)
|
1631 |
|
|
{
|
1632 |
|
|
if (changed_bbs)
|
1633 |
|
|
bb->aux = (void *) (size_t) loop_depth (bb->loop_father);
|
1634 |
|
|
bb->loop_father = current_loops->tree_root;
|
1635 |
|
|
}
|
1636 |
|
|
|
1637 |
|
|
if (loops_state_satisfies_p (LOOPS_HAVE_RECORDED_EXITS))
|
1638 |
|
|
{
|
1639 |
|
|
release_recorded_exits ();
|
1640 |
|
|
record_exits = true;
|
1641 |
|
|
}
|
1642 |
|
|
|
1643 |
|
|
/* Remove the dead loops from structures. We start from the innermost
|
1644 |
|
|
loops, so that when we remove the loops, we know that the loops inside
|
1645 |
|
|
are preserved, and do not waste time relinking loops that will be
|
1646 |
|
|
removed later. */
|
1647 |
|
|
FOR_EACH_LOOP (li, loop, LI_FROM_INNERMOST)
|
1648 |
|
|
{
|
1649 |
|
|
if (loop->header)
|
1650 |
|
|
continue;
|
1651 |
|
|
|
1652 |
|
|
while (loop->inner)
|
1653 |
|
|
{
|
1654 |
|
|
ploop = loop->inner;
|
1655 |
|
|
flow_loop_tree_node_remove (ploop);
|
1656 |
|
|
flow_loop_tree_node_add (loop_outer (loop), ploop);
|
1657 |
|
|
}
|
1658 |
|
|
|
1659 |
|
|
/* Remove the loop and free its data. */
|
1660 |
|
|
delete_loop (loop);
|
1661 |
|
|
}
|
1662 |
|
|
|
1663 |
|
|
/* Rescan the bodies of loops, starting from the outermost ones. We assume
|
1664 |
|
|
that no optimization interchanges the order of the loops, i.e., it cannot
|
1665 |
|
|
happen that L1 was superloop of L2 before and it is subloop of L2 now
|
1666 |
|
|
(without explicitly updating loop information). At the same time, we also
|
1667 |
|
|
determine the new loop structure. */
|
1668 |
|
|
current_loops->tree_root->num_nodes = n_basic_blocks;
|
1669 |
|
|
FOR_EACH_LOOP (li, loop, 0)
|
1670 |
|
|
{
|
1671 |
|
|
superloop[loop->num] = loop->header->loop_father;
|
1672 |
|
|
loop->num_nodes = flow_loop_nodes_find (loop->header, loop);
|
1673 |
|
|
}
|
1674 |
|
|
|
1675 |
|
|
/* Now fix the loop nesting. */
|
1676 |
|
|
FOR_EACH_LOOP (li, loop, 0)
|
1677 |
|
|
{
|
1678 |
|
|
ploop = superloop[loop->num];
|
1679 |
|
|
if (ploop != loop_outer (loop))
|
1680 |
|
|
{
|
1681 |
|
|
flow_loop_tree_node_remove (loop);
|
1682 |
|
|
flow_loop_tree_node_add (ploop, loop);
|
1683 |
|
|
}
|
1684 |
|
|
}
|
1685 |
|
|
free (superloop);
|
1686 |
|
|
|
1687 |
|
|
/* Mark the blocks whose loop has changed. */
|
1688 |
|
|
if (changed_bbs)
|
1689 |
|
|
{
|
1690 |
|
|
FOR_EACH_BB (bb)
|
1691 |
|
|
{
|
1692 |
|
|
if ((void *) (size_t) loop_depth (bb->loop_father) != bb->aux)
|
1693 |
|
|
bitmap_set_bit (changed_bbs, bb->index);
|
1694 |
|
|
|
1695 |
|
|
bb->aux = NULL;
|
1696 |
|
|
}
|
1697 |
|
|
}
|
1698 |
|
|
|
1699 |
|
|
if (loops_state_satisfies_p (LOOPS_HAVE_PREHEADERS))
|
1700 |
|
|
create_preheaders (CP_SIMPLE_PREHEADERS);
|
1701 |
|
|
|
1702 |
|
|
if (loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES))
|
1703 |
|
|
force_single_succ_latches ();
|
1704 |
|
|
|
1705 |
|
|
if (loops_state_satisfies_p (LOOPS_HAVE_MARKED_IRREDUCIBLE_REGIONS))
|
1706 |
|
|
mark_irreducible_loops ();
|
1707 |
|
|
|
1708 |
|
|
if (record_exits)
|
1709 |
|
|
record_loop_exits ();
|
1710 |
|
|
|
1711 |
|
|
#ifdef ENABLE_CHECKING
|
1712 |
|
|
verify_loop_structure ();
|
1713 |
|
|
#endif
|
1714 |
|
|
}
|