1 |
684 |
jeremybenn |
/* Subroutines for manipulating rtx's in semantically interesting ways.
|
2 |
|
|
Copyright (C) 1987, 1991, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
|
3 |
|
|
2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
|
4 |
|
|
Free Software Foundation, Inc.
|
5 |
|
|
|
6 |
|
|
This file is part of GCC.
|
7 |
|
|
|
8 |
|
|
GCC is free software; you can redistribute it and/or modify it under
|
9 |
|
|
the terms of the GNU General Public License as published by the Free
|
10 |
|
|
Software Foundation; either version 3, or (at your option) any later
|
11 |
|
|
version.
|
12 |
|
|
|
13 |
|
|
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
|
14 |
|
|
WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
15 |
|
|
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
16 |
|
|
for more details.
|
17 |
|
|
|
18 |
|
|
You should have received a copy of the GNU General Public License
|
19 |
|
|
along with GCC; see the file COPYING3. If not see
|
20 |
|
|
<http://www.gnu.org/licenses/>. */
|
21 |
|
|
|
22 |
|
|
|
23 |
|
|
#include "config.h"
|
24 |
|
|
#include "system.h"
|
25 |
|
|
#include "coretypes.h"
|
26 |
|
|
#include "tm.h"
|
27 |
|
|
#include "diagnostic-core.h"
|
28 |
|
|
#include "rtl.h"
|
29 |
|
|
#include "tree.h"
|
30 |
|
|
#include "tm_p.h"
|
31 |
|
|
#include "flags.h"
|
32 |
|
|
#include "except.h"
|
33 |
|
|
#include "function.h"
|
34 |
|
|
#include "expr.h"
|
35 |
|
|
#include "optabs.h"
|
36 |
|
|
#include "libfuncs.h"
|
37 |
|
|
#include "hard-reg-set.h"
|
38 |
|
|
#include "insn-config.h"
|
39 |
|
|
#include "ggc.h"
|
40 |
|
|
#include "recog.h"
|
41 |
|
|
#include "langhooks.h"
|
42 |
|
|
#include "target.h"
|
43 |
|
|
#include "common/common-target.h"
|
44 |
|
|
#include "output.h"
|
45 |
|
|
|
46 |
|
|
static rtx break_out_memory_refs (rtx);
|
47 |
|
|
|
48 |
|
|
|
49 |
|
|
/* Truncate and perhaps sign-extend C as appropriate for MODE. */
|
50 |
|
|
|
51 |
|
|
HOST_WIDE_INT
|
52 |
|
|
trunc_int_for_mode (HOST_WIDE_INT c, enum machine_mode mode)
|
53 |
|
|
{
|
54 |
|
|
int width = GET_MODE_PRECISION (mode);
|
55 |
|
|
|
56 |
|
|
/* You want to truncate to a _what_? */
|
57 |
|
|
gcc_assert (SCALAR_INT_MODE_P (mode));
|
58 |
|
|
|
59 |
|
|
/* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */
|
60 |
|
|
if (mode == BImode)
|
61 |
|
|
return c & 1 ? STORE_FLAG_VALUE : 0;
|
62 |
|
|
|
63 |
|
|
/* Sign-extend for the requested mode. */
|
64 |
|
|
|
65 |
|
|
if (width < HOST_BITS_PER_WIDE_INT)
|
66 |
|
|
{
|
67 |
|
|
HOST_WIDE_INT sign = 1;
|
68 |
|
|
sign <<= width - 1;
|
69 |
|
|
c &= (sign << 1) - 1;
|
70 |
|
|
c ^= sign;
|
71 |
|
|
c -= sign;
|
72 |
|
|
}
|
73 |
|
|
|
74 |
|
|
return c;
|
75 |
|
|
}
|
76 |
|
|
|
77 |
|
|
/* Return an rtx for the sum of X and the integer C. */
|
78 |
|
|
|
79 |
|
|
rtx
|
80 |
|
|
plus_constant (rtx x, HOST_WIDE_INT c)
|
81 |
|
|
{
|
82 |
|
|
RTX_CODE code;
|
83 |
|
|
rtx y;
|
84 |
|
|
enum machine_mode mode;
|
85 |
|
|
rtx tem;
|
86 |
|
|
int all_constant = 0;
|
87 |
|
|
|
88 |
|
|
if (c == 0)
|
89 |
|
|
return x;
|
90 |
|
|
|
91 |
|
|
restart:
|
92 |
|
|
|
93 |
|
|
code = GET_CODE (x);
|
94 |
|
|
mode = GET_MODE (x);
|
95 |
|
|
y = x;
|
96 |
|
|
|
97 |
|
|
switch (code)
|
98 |
|
|
{
|
99 |
|
|
case CONST_INT:
|
100 |
|
|
return GEN_INT (INTVAL (x) + c);
|
101 |
|
|
|
102 |
|
|
case CONST_DOUBLE:
|
103 |
|
|
{
|
104 |
|
|
unsigned HOST_WIDE_INT l1 = CONST_DOUBLE_LOW (x);
|
105 |
|
|
HOST_WIDE_INT h1 = CONST_DOUBLE_HIGH (x);
|
106 |
|
|
unsigned HOST_WIDE_INT l2 = c;
|
107 |
|
|
HOST_WIDE_INT h2 = c < 0 ? ~0 : 0;
|
108 |
|
|
unsigned HOST_WIDE_INT lv;
|
109 |
|
|
HOST_WIDE_INT hv;
|
110 |
|
|
|
111 |
|
|
add_double (l1, h1, l2, h2, &lv, &hv);
|
112 |
|
|
|
113 |
|
|
return immed_double_const (lv, hv, VOIDmode);
|
114 |
|
|
}
|
115 |
|
|
|
116 |
|
|
case MEM:
|
117 |
|
|
/* If this is a reference to the constant pool, try replacing it with
|
118 |
|
|
a reference to a new constant. If the resulting address isn't
|
119 |
|
|
valid, don't return it because we have no way to validize it. */
|
120 |
|
|
if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
|
121 |
|
|
&& CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
|
122 |
|
|
{
|
123 |
|
|
tem
|
124 |
|
|
= force_const_mem (GET_MODE (x),
|
125 |
|
|
plus_constant (get_pool_constant (XEXP (x, 0)),
|
126 |
|
|
c));
|
127 |
|
|
if (memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
|
128 |
|
|
return tem;
|
129 |
|
|
}
|
130 |
|
|
break;
|
131 |
|
|
|
132 |
|
|
case CONST:
|
133 |
|
|
/* If adding to something entirely constant, set a flag
|
134 |
|
|
so that we can add a CONST around the result. */
|
135 |
|
|
x = XEXP (x, 0);
|
136 |
|
|
all_constant = 1;
|
137 |
|
|
goto restart;
|
138 |
|
|
|
139 |
|
|
case SYMBOL_REF:
|
140 |
|
|
case LABEL_REF:
|
141 |
|
|
all_constant = 1;
|
142 |
|
|
break;
|
143 |
|
|
|
144 |
|
|
case PLUS:
|
145 |
|
|
/* The interesting case is adding the integer to a sum.
|
146 |
|
|
Look for constant term in the sum and combine
|
147 |
|
|
with C. For an integer constant term, we make a combined
|
148 |
|
|
integer. For a constant term that is not an explicit integer,
|
149 |
|
|
we cannot really combine, but group them together anyway.
|
150 |
|
|
|
151 |
|
|
Restart or use a recursive call in case the remaining operand is
|
152 |
|
|
something that we handle specially, such as a SYMBOL_REF.
|
153 |
|
|
|
154 |
|
|
We may not immediately return from the recursive call here, lest
|
155 |
|
|
all_constant gets lost. */
|
156 |
|
|
|
157 |
|
|
if (CONST_INT_P (XEXP (x, 1)))
|
158 |
|
|
{
|
159 |
|
|
c += INTVAL (XEXP (x, 1));
|
160 |
|
|
|
161 |
|
|
if (GET_MODE (x) != VOIDmode)
|
162 |
|
|
c = trunc_int_for_mode (c, GET_MODE (x));
|
163 |
|
|
|
164 |
|
|
x = XEXP (x, 0);
|
165 |
|
|
goto restart;
|
166 |
|
|
}
|
167 |
|
|
else if (CONSTANT_P (XEXP (x, 1)))
|
168 |
|
|
{
|
169 |
|
|
x = gen_rtx_PLUS (mode, XEXP (x, 0), plus_constant (XEXP (x, 1), c));
|
170 |
|
|
c = 0;
|
171 |
|
|
}
|
172 |
|
|
else if (find_constant_term_loc (&y))
|
173 |
|
|
{
|
174 |
|
|
/* We need to be careful since X may be shared and we can't
|
175 |
|
|
modify it in place. */
|
176 |
|
|
rtx copy = copy_rtx (x);
|
177 |
|
|
rtx *const_loc = find_constant_term_loc (©);
|
178 |
|
|
|
179 |
|
|
*const_loc = plus_constant (*const_loc, c);
|
180 |
|
|
x = copy;
|
181 |
|
|
c = 0;
|
182 |
|
|
}
|
183 |
|
|
break;
|
184 |
|
|
|
185 |
|
|
default:
|
186 |
|
|
break;
|
187 |
|
|
}
|
188 |
|
|
|
189 |
|
|
if (c != 0)
|
190 |
|
|
x = gen_rtx_PLUS (mode, x, GEN_INT (c));
|
191 |
|
|
|
192 |
|
|
if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
|
193 |
|
|
return x;
|
194 |
|
|
else if (all_constant)
|
195 |
|
|
return gen_rtx_CONST (mode, x);
|
196 |
|
|
else
|
197 |
|
|
return x;
|
198 |
|
|
}
|
199 |
|
|
|
200 |
|
|
/* If X is a sum, return a new sum like X but lacking any constant terms.
|
201 |
|
|
Add all the removed constant terms into *CONSTPTR.
|
202 |
|
|
X itself is not altered. The result != X if and only if
|
203 |
|
|
it is not isomorphic to X. */
|
204 |
|
|
|
205 |
|
|
rtx
|
206 |
|
|
eliminate_constant_term (rtx x, rtx *constptr)
|
207 |
|
|
{
|
208 |
|
|
rtx x0, x1;
|
209 |
|
|
rtx tem;
|
210 |
|
|
|
211 |
|
|
if (GET_CODE (x) != PLUS)
|
212 |
|
|
return x;
|
213 |
|
|
|
214 |
|
|
/* First handle constants appearing at this level explicitly. */
|
215 |
|
|
if (CONST_INT_P (XEXP (x, 1))
|
216 |
|
|
&& 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
|
217 |
|
|
XEXP (x, 1)))
|
218 |
|
|
&& CONST_INT_P (tem))
|
219 |
|
|
{
|
220 |
|
|
*constptr = tem;
|
221 |
|
|
return eliminate_constant_term (XEXP (x, 0), constptr);
|
222 |
|
|
}
|
223 |
|
|
|
224 |
|
|
tem = const0_rtx;
|
225 |
|
|
x0 = eliminate_constant_term (XEXP (x, 0), &tem);
|
226 |
|
|
x1 = eliminate_constant_term (XEXP (x, 1), &tem);
|
227 |
|
|
if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
|
228 |
|
|
&& 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x),
|
229 |
|
|
*constptr, tem))
|
230 |
|
|
&& CONST_INT_P (tem))
|
231 |
|
|
{
|
232 |
|
|
*constptr = tem;
|
233 |
|
|
return gen_rtx_PLUS (GET_MODE (x), x0, x1);
|
234 |
|
|
}
|
235 |
|
|
|
236 |
|
|
return x;
|
237 |
|
|
}
|
238 |
|
|
|
239 |
|
|
/* Return an rtx for the size in bytes of the value of EXP. */
|
240 |
|
|
|
241 |
|
|
rtx
|
242 |
|
|
expr_size (tree exp)
|
243 |
|
|
{
|
244 |
|
|
tree size;
|
245 |
|
|
|
246 |
|
|
if (TREE_CODE (exp) == WITH_SIZE_EXPR)
|
247 |
|
|
size = TREE_OPERAND (exp, 1);
|
248 |
|
|
else
|
249 |
|
|
{
|
250 |
|
|
size = tree_expr_size (exp);
|
251 |
|
|
gcc_assert (size);
|
252 |
|
|
gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp));
|
253 |
|
|
}
|
254 |
|
|
|
255 |
|
|
return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL);
|
256 |
|
|
}
|
257 |
|
|
|
258 |
|
|
/* Return a wide integer for the size in bytes of the value of EXP, or -1
|
259 |
|
|
if the size can vary or is larger than an integer. */
|
260 |
|
|
|
261 |
|
|
HOST_WIDE_INT
|
262 |
|
|
int_expr_size (tree exp)
|
263 |
|
|
{
|
264 |
|
|
tree size;
|
265 |
|
|
|
266 |
|
|
if (TREE_CODE (exp) == WITH_SIZE_EXPR)
|
267 |
|
|
size = TREE_OPERAND (exp, 1);
|
268 |
|
|
else
|
269 |
|
|
{
|
270 |
|
|
size = tree_expr_size (exp);
|
271 |
|
|
gcc_assert (size);
|
272 |
|
|
}
|
273 |
|
|
|
274 |
|
|
if (size == 0 || !host_integerp (size, 0))
|
275 |
|
|
return -1;
|
276 |
|
|
|
277 |
|
|
return tree_low_cst (size, 0);
|
278 |
|
|
}
|
279 |
|
|
|
280 |
|
|
/* Return a copy of X in which all memory references
|
281 |
|
|
and all constants that involve symbol refs
|
282 |
|
|
have been replaced with new temporary registers.
|
283 |
|
|
Also emit code to load the memory locations and constants
|
284 |
|
|
into those registers.
|
285 |
|
|
|
286 |
|
|
If X contains no such constants or memory references,
|
287 |
|
|
X itself (not a copy) is returned.
|
288 |
|
|
|
289 |
|
|
If a constant is found in the address that is not a legitimate constant
|
290 |
|
|
in an insn, it is left alone in the hope that it might be valid in the
|
291 |
|
|
address.
|
292 |
|
|
|
293 |
|
|
X may contain no arithmetic except addition, subtraction and multiplication.
|
294 |
|
|
Values returned by expand_expr with 1 for sum_ok fit this constraint. */
|
295 |
|
|
|
296 |
|
|
static rtx
|
297 |
|
|
break_out_memory_refs (rtx x)
|
298 |
|
|
{
|
299 |
|
|
if (MEM_P (x)
|
300 |
|
|
|| (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
|
301 |
|
|
&& GET_MODE (x) != VOIDmode))
|
302 |
|
|
x = force_reg (GET_MODE (x), x);
|
303 |
|
|
else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
|
304 |
|
|
|| GET_CODE (x) == MULT)
|
305 |
|
|
{
|
306 |
|
|
rtx op0 = break_out_memory_refs (XEXP (x, 0));
|
307 |
|
|
rtx op1 = break_out_memory_refs (XEXP (x, 1));
|
308 |
|
|
|
309 |
|
|
if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
|
310 |
|
|
x = simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0, op1);
|
311 |
|
|
}
|
312 |
|
|
|
313 |
|
|
return x;
|
314 |
|
|
}
|
315 |
|
|
|
316 |
|
|
/* Given X, a memory address in address space AS' pointer mode, convert it to
|
317 |
|
|
an address in the address space's address mode, or vice versa (TO_MODE says
|
318 |
|
|
which way). We take advantage of the fact that pointers are not allowed to
|
319 |
|
|
overflow by commuting arithmetic operations over conversions so that address
|
320 |
|
|
arithmetic insns can be used. */
|
321 |
|
|
|
322 |
|
|
rtx
|
323 |
|
|
convert_memory_address_addr_space (enum machine_mode to_mode ATTRIBUTE_UNUSED,
|
324 |
|
|
rtx x, addr_space_t as ATTRIBUTE_UNUSED)
|
325 |
|
|
{
|
326 |
|
|
#ifndef POINTERS_EXTEND_UNSIGNED
|
327 |
|
|
gcc_assert (GET_MODE (x) == to_mode || GET_MODE (x) == VOIDmode);
|
328 |
|
|
return x;
|
329 |
|
|
#else /* defined(POINTERS_EXTEND_UNSIGNED) */
|
330 |
|
|
enum machine_mode pointer_mode, address_mode, from_mode;
|
331 |
|
|
rtx temp;
|
332 |
|
|
enum rtx_code code;
|
333 |
|
|
|
334 |
|
|
/* If X already has the right mode, just return it. */
|
335 |
|
|
if (GET_MODE (x) == to_mode)
|
336 |
|
|
return x;
|
337 |
|
|
|
338 |
|
|
pointer_mode = targetm.addr_space.pointer_mode (as);
|
339 |
|
|
address_mode = targetm.addr_space.address_mode (as);
|
340 |
|
|
from_mode = to_mode == pointer_mode ? address_mode : pointer_mode;
|
341 |
|
|
|
342 |
|
|
/* Here we handle some special cases. If none of them apply, fall through
|
343 |
|
|
to the default case. */
|
344 |
|
|
switch (GET_CODE (x))
|
345 |
|
|
{
|
346 |
|
|
case CONST_INT:
|
347 |
|
|
case CONST_DOUBLE:
|
348 |
|
|
if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode))
|
349 |
|
|
code = TRUNCATE;
|
350 |
|
|
else if (POINTERS_EXTEND_UNSIGNED < 0)
|
351 |
|
|
break;
|
352 |
|
|
else if (POINTERS_EXTEND_UNSIGNED > 0)
|
353 |
|
|
code = ZERO_EXTEND;
|
354 |
|
|
else
|
355 |
|
|
code = SIGN_EXTEND;
|
356 |
|
|
temp = simplify_unary_operation (code, to_mode, x, from_mode);
|
357 |
|
|
if (temp)
|
358 |
|
|
return temp;
|
359 |
|
|
break;
|
360 |
|
|
|
361 |
|
|
case SUBREG:
|
362 |
|
|
if ((SUBREG_PROMOTED_VAR_P (x) || REG_POINTER (SUBREG_REG (x)))
|
363 |
|
|
&& GET_MODE (SUBREG_REG (x)) == to_mode)
|
364 |
|
|
return SUBREG_REG (x);
|
365 |
|
|
break;
|
366 |
|
|
|
367 |
|
|
case LABEL_REF:
|
368 |
|
|
temp = gen_rtx_LABEL_REF (to_mode, XEXP (x, 0));
|
369 |
|
|
LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
|
370 |
|
|
return temp;
|
371 |
|
|
break;
|
372 |
|
|
|
373 |
|
|
case SYMBOL_REF:
|
374 |
|
|
temp = shallow_copy_rtx (x);
|
375 |
|
|
PUT_MODE (temp, to_mode);
|
376 |
|
|
return temp;
|
377 |
|
|
break;
|
378 |
|
|
|
379 |
|
|
case CONST:
|
380 |
|
|
return gen_rtx_CONST (to_mode,
|
381 |
|
|
convert_memory_address_addr_space
|
382 |
|
|
(to_mode, XEXP (x, 0), as));
|
383 |
|
|
break;
|
384 |
|
|
|
385 |
|
|
case PLUS:
|
386 |
|
|
case MULT:
|
387 |
|
|
/* FIXME: For addition, we used to permute the conversion and
|
388 |
|
|
addition operation only if one operand is a constant and
|
389 |
|
|
converting the constant does not change it or if one operand
|
390 |
|
|
is a constant and we are using a ptr_extend instruction
|
391 |
|
|
(POINTERS_EXTEND_UNSIGNED < 0) even if the resulting address
|
392 |
|
|
may overflow/underflow. We relax the condition to include
|
393 |
|
|
zero-extend (POINTERS_EXTEND_UNSIGNED > 0) since the other
|
394 |
|
|
parts of the compiler depend on it. See PR 49721.
|
395 |
|
|
|
396 |
|
|
We can always safely permute them if we are making the address
|
397 |
|
|
narrower. */
|
398 |
|
|
if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
|
399 |
|
|
|| (GET_CODE (x) == PLUS
|
400 |
|
|
&& CONST_INT_P (XEXP (x, 1))
|
401 |
|
|
&& (POINTERS_EXTEND_UNSIGNED != 0
|
402 |
|
|
|| XEXP (x, 1) == convert_memory_address_addr_space
|
403 |
|
|
(to_mode, XEXP (x, 1), as))))
|
404 |
|
|
return gen_rtx_fmt_ee (GET_CODE (x), to_mode,
|
405 |
|
|
convert_memory_address_addr_space
|
406 |
|
|
(to_mode, XEXP (x, 0), as),
|
407 |
|
|
XEXP (x, 1));
|
408 |
|
|
break;
|
409 |
|
|
|
410 |
|
|
default:
|
411 |
|
|
break;
|
412 |
|
|
}
|
413 |
|
|
|
414 |
|
|
return convert_modes (to_mode, from_mode,
|
415 |
|
|
x, POINTERS_EXTEND_UNSIGNED);
|
416 |
|
|
#endif /* defined(POINTERS_EXTEND_UNSIGNED) */
|
417 |
|
|
}
|
418 |
|
|
|
419 |
|
|
/* Return something equivalent to X but valid as a memory address for something
|
420 |
|
|
of mode MODE in the named address space AS. When X is not itself valid,
|
421 |
|
|
this works by copying X or subexpressions of it into registers. */
|
422 |
|
|
|
423 |
|
|
rtx
|
424 |
|
|
memory_address_addr_space (enum machine_mode mode, rtx x, addr_space_t as)
|
425 |
|
|
{
|
426 |
|
|
rtx oldx = x;
|
427 |
|
|
enum machine_mode address_mode = targetm.addr_space.address_mode (as);
|
428 |
|
|
|
429 |
|
|
x = convert_memory_address_addr_space (address_mode, x, as);
|
430 |
|
|
|
431 |
|
|
/* By passing constant addresses through registers
|
432 |
|
|
we get a chance to cse them. */
|
433 |
|
|
if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
|
434 |
|
|
x = force_reg (address_mode, x);
|
435 |
|
|
|
436 |
|
|
/* We get better cse by rejecting indirect addressing at this stage.
|
437 |
|
|
Let the combiner create indirect addresses where appropriate.
|
438 |
|
|
For now, generate the code so that the subexpressions useful to share
|
439 |
|
|
are visible. But not if cse won't be done! */
|
440 |
|
|
else
|
441 |
|
|
{
|
442 |
|
|
if (! cse_not_expected && !REG_P (x))
|
443 |
|
|
x = break_out_memory_refs (x);
|
444 |
|
|
|
445 |
|
|
/* At this point, any valid address is accepted. */
|
446 |
|
|
if (memory_address_addr_space_p (mode, x, as))
|
447 |
|
|
goto done;
|
448 |
|
|
|
449 |
|
|
/* If it was valid before but breaking out memory refs invalidated it,
|
450 |
|
|
use it the old way. */
|
451 |
|
|
if (memory_address_addr_space_p (mode, oldx, as))
|
452 |
|
|
{
|
453 |
|
|
x = oldx;
|
454 |
|
|
goto done;
|
455 |
|
|
}
|
456 |
|
|
|
457 |
|
|
/* Perform machine-dependent transformations on X
|
458 |
|
|
in certain cases. This is not necessary since the code
|
459 |
|
|
below can handle all possible cases, but machine-dependent
|
460 |
|
|
transformations can make better code. */
|
461 |
|
|
{
|
462 |
|
|
rtx orig_x = x;
|
463 |
|
|
x = targetm.addr_space.legitimize_address (x, oldx, mode, as);
|
464 |
|
|
if (orig_x != x && memory_address_addr_space_p (mode, x, as))
|
465 |
|
|
goto done;
|
466 |
|
|
}
|
467 |
|
|
|
468 |
|
|
/* PLUS and MULT can appear in special ways
|
469 |
|
|
as the result of attempts to make an address usable for indexing.
|
470 |
|
|
Usually they are dealt with by calling force_operand, below.
|
471 |
|
|
But a sum containing constant terms is special
|
472 |
|
|
if removing them makes the sum a valid address:
|
473 |
|
|
then we generate that address in a register
|
474 |
|
|
and index off of it. We do this because it often makes
|
475 |
|
|
shorter code, and because the addresses thus generated
|
476 |
|
|
in registers often become common subexpressions. */
|
477 |
|
|
if (GET_CODE (x) == PLUS)
|
478 |
|
|
{
|
479 |
|
|
rtx constant_term = const0_rtx;
|
480 |
|
|
rtx y = eliminate_constant_term (x, &constant_term);
|
481 |
|
|
if (constant_term == const0_rtx
|
482 |
|
|
|| ! memory_address_addr_space_p (mode, y, as))
|
483 |
|
|
x = force_operand (x, NULL_RTX);
|
484 |
|
|
else
|
485 |
|
|
{
|
486 |
|
|
y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
|
487 |
|
|
if (! memory_address_addr_space_p (mode, y, as))
|
488 |
|
|
x = force_operand (x, NULL_RTX);
|
489 |
|
|
else
|
490 |
|
|
x = y;
|
491 |
|
|
}
|
492 |
|
|
}
|
493 |
|
|
|
494 |
|
|
else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
|
495 |
|
|
x = force_operand (x, NULL_RTX);
|
496 |
|
|
|
497 |
|
|
/* If we have a register that's an invalid address,
|
498 |
|
|
it must be a hard reg of the wrong class. Copy it to a pseudo. */
|
499 |
|
|
else if (REG_P (x))
|
500 |
|
|
x = copy_to_reg (x);
|
501 |
|
|
|
502 |
|
|
/* Last resort: copy the value to a register, since
|
503 |
|
|
the register is a valid address. */
|
504 |
|
|
else
|
505 |
|
|
x = force_reg (address_mode, x);
|
506 |
|
|
}
|
507 |
|
|
|
508 |
|
|
done:
|
509 |
|
|
|
510 |
|
|
gcc_assert (memory_address_addr_space_p (mode, x, as));
|
511 |
|
|
/* If we didn't change the address, we are done. Otherwise, mark
|
512 |
|
|
a reg as a pointer if we have REG or REG + CONST_INT. */
|
513 |
|
|
if (oldx == x)
|
514 |
|
|
return x;
|
515 |
|
|
else if (REG_P (x))
|
516 |
|
|
mark_reg_pointer (x, BITS_PER_UNIT);
|
517 |
|
|
else if (GET_CODE (x) == PLUS
|
518 |
|
|
&& REG_P (XEXP (x, 0))
|
519 |
|
|
&& CONST_INT_P (XEXP (x, 1)))
|
520 |
|
|
mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT);
|
521 |
|
|
|
522 |
|
|
/* OLDX may have been the address on a temporary. Update the address
|
523 |
|
|
to indicate that X is now used. */
|
524 |
|
|
update_temp_slot_address (oldx, x);
|
525 |
|
|
|
526 |
|
|
return x;
|
527 |
|
|
}
|
528 |
|
|
|
529 |
|
|
/* Convert a mem ref into one with a valid memory address.
|
530 |
|
|
Pass through anything else unchanged. */
|
531 |
|
|
|
532 |
|
|
rtx
|
533 |
|
|
validize_mem (rtx ref)
|
534 |
|
|
{
|
535 |
|
|
if (!MEM_P (ref))
|
536 |
|
|
return ref;
|
537 |
|
|
ref = use_anchored_address (ref);
|
538 |
|
|
if (memory_address_addr_space_p (GET_MODE (ref), XEXP (ref, 0),
|
539 |
|
|
MEM_ADDR_SPACE (ref)))
|
540 |
|
|
return ref;
|
541 |
|
|
|
542 |
|
|
/* Don't alter REF itself, since that is probably a stack slot. */
|
543 |
|
|
return replace_equiv_address (ref, XEXP (ref, 0));
|
544 |
|
|
}
|
545 |
|
|
|
546 |
|
|
/* If X is a memory reference to a member of an object block, try rewriting
|
547 |
|
|
it to use an anchor instead. Return the new memory reference on success
|
548 |
|
|
and the old one on failure. */
|
549 |
|
|
|
550 |
|
|
rtx
|
551 |
|
|
use_anchored_address (rtx x)
|
552 |
|
|
{
|
553 |
|
|
rtx base;
|
554 |
|
|
HOST_WIDE_INT offset;
|
555 |
|
|
|
556 |
|
|
if (!flag_section_anchors)
|
557 |
|
|
return x;
|
558 |
|
|
|
559 |
|
|
if (!MEM_P (x))
|
560 |
|
|
return x;
|
561 |
|
|
|
562 |
|
|
/* Split the address into a base and offset. */
|
563 |
|
|
base = XEXP (x, 0);
|
564 |
|
|
offset = 0;
|
565 |
|
|
if (GET_CODE (base) == CONST
|
566 |
|
|
&& GET_CODE (XEXP (base, 0)) == PLUS
|
567 |
|
|
&& CONST_INT_P (XEXP (XEXP (base, 0), 1)))
|
568 |
|
|
{
|
569 |
|
|
offset += INTVAL (XEXP (XEXP (base, 0), 1));
|
570 |
|
|
base = XEXP (XEXP (base, 0), 0);
|
571 |
|
|
}
|
572 |
|
|
|
573 |
|
|
/* Check whether BASE is suitable for anchors. */
|
574 |
|
|
if (GET_CODE (base) != SYMBOL_REF
|
575 |
|
|
|| !SYMBOL_REF_HAS_BLOCK_INFO_P (base)
|
576 |
|
|
|| SYMBOL_REF_ANCHOR_P (base)
|
577 |
|
|
|| SYMBOL_REF_BLOCK (base) == NULL
|
578 |
|
|
|| !targetm.use_anchors_for_symbol_p (base))
|
579 |
|
|
return x;
|
580 |
|
|
|
581 |
|
|
/* Decide where BASE is going to be. */
|
582 |
|
|
place_block_symbol (base);
|
583 |
|
|
|
584 |
|
|
/* Get the anchor we need to use. */
|
585 |
|
|
offset += SYMBOL_REF_BLOCK_OFFSET (base);
|
586 |
|
|
base = get_section_anchor (SYMBOL_REF_BLOCK (base), offset,
|
587 |
|
|
SYMBOL_REF_TLS_MODEL (base));
|
588 |
|
|
|
589 |
|
|
/* Work out the offset from the anchor. */
|
590 |
|
|
offset -= SYMBOL_REF_BLOCK_OFFSET (base);
|
591 |
|
|
|
592 |
|
|
/* If we're going to run a CSE pass, force the anchor into a register.
|
593 |
|
|
We will then be able to reuse registers for several accesses, if the
|
594 |
|
|
target costs say that that's worthwhile. */
|
595 |
|
|
if (!cse_not_expected)
|
596 |
|
|
base = force_reg (GET_MODE (base), base);
|
597 |
|
|
|
598 |
|
|
return replace_equiv_address (x, plus_constant (base, offset));
|
599 |
|
|
}
|
600 |
|
|
|
601 |
|
|
/* Copy the value or contents of X to a new temp reg and return that reg. */
|
602 |
|
|
|
603 |
|
|
rtx
|
604 |
|
|
copy_to_reg (rtx x)
|
605 |
|
|
{
|
606 |
|
|
rtx temp = gen_reg_rtx (GET_MODE (x));
|
607 |
|
|
|
608 |
|
|
/* If not an operand, must be an address with PLUS and MULT so
|
609 |
|
|
do the computation. */
|
610 |
|
|
if (! general_operand (x, VOIDmode))
|
611 |
|
|
x = force_operand (x, temp);
|
612 |
|
|
|
613 |
|
|
if (x != temp)
|
614 |
|
|
emit_move_insn (temp, x);
|
615 |
|
|
|
616 |
|
|
return temp;
|
617 |
|
|
}
|
618 |
|
|
|
619 |
|
|
/* Like copy_to_reg but always give the new register mode Pmode
|
620 |
|
|
in case X is a constant. */
|
621 |
|
|
|
622 |
|
|
rtx
|
623 |
|
|
copy_addr_to_reg (rtx x)
|
624 |
|
|
{
|
625 |
|
|
return copy_to_mode_reg (Pmode, x);
|
626 |
|
|
}
|
627 |
|
|
|
628 |
|
|
/* Like copy_to_reg but always give the new register mode MODE
|
629 |
|
|
in case X is a constant. */
|
630 |
|
|
|
631 |
|
|
rtx
|
632 |
|
|
copy_to_mode_reg (enum machine_mode mode, rtx x)
|
633 |
|
|
{
|
634 |
|
|
rtx temp = gen_reg_rtx (mode);
|
635 |
|
|
|
636 |
|
|
/* If not an operand, must be an address with PLUS and MULT so
|
637 |
|
|
do the computation. */
|
638 |
|
|
if (! general_operand (x, VOIDmode))
|
639 |
|
|
x = force_operand (x, temp);
|
640 |
|
|
|
641 |
|
|
gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
|
642 |
|
|
if (x != temp)
|
643 |
|
|
emit_move_insn (temp, x);
|
644 |
|
|
return temp;
|
645 |
|
|
}
|
646 |
|
|
|
647 |
|
|
/* Load X into a register if it is not already one.
|
648 |
|
|
Use mode MODE for the register.
|
649 |
|
|
X should be valid for mode MODE, but it may be a constant which
|
650 |
|
|
is valid for all integer modes; that's why caller must specify MODE.
|
651 |
|
|
|
652 |
|
|
The caller must not alter the value in the register we return,
|
653 |
|
|
since we mark it as a "constant" register. */
|
654 |
|
|
|
655 |
|
|
rtx
|
656 |
|
|
force_reg (enum machine_mode mode, rtx x)
|
657 |
|
|
{
|
658 |
|
|
rtx temp, insn, set;
|
659 |
|
|
|
660 |
|
|
if (REG_P (x))
|
661 |
|
|
return x;
|
662 |
|
|
|
663 |
|
|
if (general_operand (x, mode))
|
664 |
|
|
{
|
665 |
|
|
temp = gen_reg_rtx (mode);
|
666 |
|
|
insn = emit_move_insn (temp, x);
|
667 |
|
|
}
|
668 |
|
|
else
|
669 |
|
|
{
|
670 |
|
|
temp = force_operand (x, NULL_RTX);
|
671 |
|
|
if (REG_P (temp))
|
672 |
|
|
insn = get_last_insn ();
|
673 |
|
|
else
|
674 |
|
|
{
|
675 |
|
|
rtx temp2 = gen_reg_rtx (mode);
|
676 |
|
|
insn = emit_move_insn (temp2, temp);
|
677 |
|
|
temp = temp2;
|
678 |
|
|
}
|
679 |
|
|
}
|
680 |
|
|
|
681 |
|
|
/* Let optimizers know that TEMP's value never changes
|
682 |
|
|
and that X can be substituted for it. Don't get confused
|
683 |
|
|
if INSN set something else (such as a SUBREG of TEMP). */
|
684 |
|
|
if (CONSTANT_P (x)
|
685 |
|
|
&& (set = single_set (insn)) != 0
|
686 |
|
|
&& SET_DEST (set) == temp
|
687 |
|
|
&& ! rtx_equal_p (x, SET_SRC (set)))
|
688 |
|
|
set_unique_reg_note (insn, REG_EQUAL, x);
|
689 |
|
|
|
690 |
|
|
/* Let optimizers know that TEMP is a pointer, and if so, the
|
691 |
|
|
known alignment of that pointer. */
|
692 |
|
|
{
|
693 |
|
|
unsigned align = 0;
|
694 |
|
|
if (GET_CODE (x) == SYMBOL_REF)
|
695 |
|
|
{
|
696 |
|
|
align = BITS_PER_UNIT;
|
697 |
|
|
if (SYMBOL_REF_DECL (x) && DECL_P (SYMBOL_REF_DECL (x)))
|
698 |
|
|
align = DECL_ALIGN (SYMBOL_REF_DECL (x));
|
699 |
|
|
}
|
700 |
|
|
else if (GET_CODE (x) == LABEL_REF)
|
701 |
|
|
align = BITS_PER_UNIT;
|
702 |
|
|
else if (GET_CODE (x) == CONST
|
703 |
|
|
&& GET_CODE (XEXP (x, 0)) == PLUS
|
704 |
|
|
&& GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
|
705 |
|
|
&& CONST_INT_P (XEXP (XEXP (x, 0), 1)))
|
706 |
|
|
{
|
707 |
|
|
rtx s = XEXP (XEXP (x, 0), 0);
|
708 |
|
|
rtx c = XEXP (XEXP (x, 0), 1);
|
709 |
|
|
unsigned sa, ca;
|
710 |
|
|
|
711 |
|
|
sa = BITS_PER_UNIT;
|
712 |
|
|
if (SYMBOL_REF_DECL (s) && DECL_P (SYMBOL_REF_DECL (s)))
|
713 |
|
|
sa = DECL_ALIGN (SYMBOL_REF_DECL (s));
|
714 |
|
|
|
715 |
|
|
if (INTVAL (c) == 0)
|
716 |
|
|
align = sa;
|
717 |
|
|
else
|
718 |
|
|
{
|
719 |
|
|
ca = ctz_hwi (INTVAL (c)) * BITS_PER_UNIT;
|
720 |
|
|
align = MIN (sa, ca);
|
721 |
|
|
}
|
722 |
|
|
}
|
723 |
|
|
|
724 |
|
|
if (align || (MEM_P (x) && MEM_POINTER (x)))
|
725 |
|
|
mark_reg_pointer (temp, align);
|
726 |
|
|
}
|
727 |
|
|
|
728 |
|
|
return temp;
|
729 |
|
|
}
|
730 |
|
|
|
731 |
|
|
/* If X is a memory ref, copy its contents to a new temp reg and return
|
732 |
|
|
that reg. Otherwise, return X. */
|
733 |
|
|
|
734 |
|
|
rtx
|
735 |
|
|
force_not_mem (rtx x)
|
736 |
|
|
{
|
737 |
|
|
rtx temp;
|
738 |
|
|
|
739 |
|
|
if (!MEM_P (x) || GET_MODE (x) == BLKmode)
|
740 |
|
|
return x;
|
741 |
|
|
|
742 |
|
|
temp = gen_reg_rtx (GET_MODE (x));
|
743 |
|
|
|
744 |
|
|
if (MEM_POINTER (x))
|
745 |
|
|
REG_POINTER (temp) = 1;
|
746 |
|
|
|
747 |
|
|
emit_move_insn (temp, x);
|
748 |
|
|
return temp;
|
749 |
|
|
}
|
750 |
|
|
|
751 |
|
|
/* Copy X to TARGET (if it's nonzero and a reg)
|
752 |
|
|
or to a new temp reg and return that reg.
|
753 |
|
|
MODE is the mode to use for X in case it is a constant. */
|
754 |
|
|
|
755 |
|
|
rtx
|
756 |
|
|
copy_to_suggested_reg (rtx x, rtx target, enum machine_mode mode)
|
757 |
|
|
{
|
758 |
|
|
rtx temp;
|
759 |
|
|
|
760 |
|
|
if (target && REG_P (target))
|
761 |
|
|
temp = target;
|
762 |
|
|
else
|
763 |
|
|
temp = gen_reg_rtx (mode);
|
764 |
|
|
|
765 |
|
|
emit_move_insn (temp, x);
|
766 |
|
|
return temp;
|
767 |
|
|
}
|
768 |
|
|
|
769 |
|
|
/* Return the mode to use to pass or return a scalar of TYPE and MODE.
|
770 |
|
|
PUNSIGNEDP points to the signedness of the type and may be adjusted
|
771 |
|
|
to show what signedness to use on extension operations.
|
772 |
|
|
|
773 |
|
|
FOR_RETURN is nonzero if the caller is promoting the return value
|
774 |
|
|
of FNDECL, else it is for promoting args. */
|
775 |
|
|
|
776 |
|
|
enum machine_mode
|
777 |
|
|
promote_function_mode (const_tree type, enum machine_mode mode, int *punsignedp,
|
778 |
|
|
const_tree funtype, int for_return)
|
779 |
|
|
{
|
780 |
|
|
/* Called without a type node for a libcall. */
|
781 |
|
|
if (type == NULL_TREE)
|
782 |
|
|
{
|
783 |
|
|
if (INTEGRAL_MODE_P (mode))
|
784 |
|
|
return targetm.calls.promote_function_mode (NULL_TREE, mode,
|
785 |
|
|
punsignedp, funtype,
|
786 |
|
|
for_return);
|
787 |
|
|
else
|
788 |
|
|
return mode;
|
789 |
|
|
}
|
790 |
|
|
|
791 |
|
|
switch (TREE_CODE (type))
|
792 |
|
|
{
|
793 |
|
|
case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
|
794 |
|
|
case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
|
795 |
|
|
case POINTER_TYPE: case REFERENCE_TYPE:
|
796 |
|
|
return targetm.calls.promote_function_mode (type, mode, punsignedp, funtype,
|
797 |
|
|
for_return);
|
798 |
|
|
|
799 |
|
|
default:
|
800 |
|
|
return mode;
|
801 |
|
|
}
|
802 |
|
|
}
|
803 |
|
|
/* Return the mode to use to store a scalar of TYPE and MODE.
|
804 |
|
|
PUNSIGNEDP points to the signedness of the type and may be adjusted
|
805 |
|
|
to show what signedness to use on extension operations. */
|
806 |
|
|
|
807 |
|
|
enum machine_mode
|
808 |
|
|
promote_mode (const_tree type ATTRIBUTE_UNUSED, enum machine_mode mode,
|
809 |
|
|
int *punsignedp ATTRIBUTE_UNUSED)
|
810 |
|
|
{
|
811 |
|
|
#ifdef PROMOTE_MODE
|
812 |
|
|
enum tree_code code;
|
813 |
|
|
int unsignedp;
|
814 |
|
|
#endif
|
815 |
|
|
|
816 |
|
|
/* For libcalls this is invoked without TYPE from the backends
|
817 |
|
|
TARGET_PROMOTE_FUNCTION_MODE hooks. Don't do anything in that
|
818 |
|
|
case. */
|
819 |
|
|
if (type == NULL_TREE)
|
820 |
|
|
return mode;
|
821 |
|
|
|
822 |
|
|
/* FIXME: this is the same logic that was there until GCC 4.4, but we
|
823 |
|
|
probably want to test POINTERS_EXTEND_UNSIGNED even if PROMOTE_MODE
|
824 |
|
|
is not defined. The affected targets are M32C, S390, SPARC. */
|
825 |
|
|
#ifdef PROMOTE_MODE
|
826 |
|
|
code = TREE_CODE (type);
|
827 |
|
|
unsignedp = *punsignedp;
|
828 |
|
|
|
829 |
|
|
switch (code)
|
830 |
|
|
{
|
831 |
|
|
case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
|
832 |
|
|
case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
|
833 |
|
|
PROMOTE_MODE (mode, unsignedp, type);
|
834 |
|
|
*punsignedp = unsignedp;
|
835 |
|
|
return mode;
|
836 |
|
|
break;
|
837 |
|
|
|
838 |
|
|
#ifdef POINTERS_EXTEND_UNSIGNED
|
839 |
|
|
case REFERENCE_TYPE:
|
840 |
|
|
case POINTER_TYPE:
|
841 |
|
|
*punsignedp = POINTERS_EXTEND_UNSIGNED;
|
842 |
|
|
return targetm.addr_space.address_mode
|
843 |
|
|
(TYPE_ADDR_SPACE (TREE_TYPE (type)));
|
844 |
|
|
break;
|
845 |
|
|
#endif
|
846 |
|
|
|
847 |
|
|
default:
|
848 |
|
|
return mode;
|
849 |
|
|
}
|
850 |
|
|
#else
|
851 |
|
|
return mode;
|
852 |
|
|
#endif
|
853 |
|
|
}
|
854 |
|
|
|
855 |
|
|
|
856 |
|
|
/* Use one of promote_mode or promote_function_mode to find the promoted
|
857 |
|
|
mode of DECL. If PUNSIGNEDP is not NULL, store there the unsignedness
|
858 |
|
|
of DECL after promotion. */
|
859 |
|
|
|
860 |
|
|
enum machine_mode
|
861 |
|
|
promote_decl_mode (const_tree decl, int *punsignedp)
|
862 |
|
|
{
|
863 |
|
|
tree type = TREE_TYPE (decl);
|
864 |
|
|
int unsignedp = TYPE_UNSIGNED (type);
|
865 |
|
|
enum machine_mode mode = DECL_MODE (decl);
|
866 |
|
|
enum machine_mode pmode;
|
867 |
|
|
|
868 |
|
|
if (TREE_CODE (decl) == RESULT_DECL
|
869 |
|
|
|| TREE_CODE (decl) == PARM_DECL)
|
870 |
|
|
pmode = promote_function_mode (type, mode, &unsignedp,
|
871 |
|
|
TREE_TYPE (current_function_decl), 2);
|
872 |
|
|
else
|
873 |
|
|
pmode = promote_mode (type, mode, &unsignedp);
|
874 |
|
|
|
875 |
|
|
if (punsignedp)
|
876 |
|
|
*punsignedp = unsignedp;
|
877 |
|
|
return pmode;
|
878 |
|
|
}
|
879 |
|
|
|
880 |
|
|
|
881 |
|
|
/* Controls the behaviour of {anti_,}adjust_stack. */
|
882 |
|
|
static bool suppress_reg_args_size;
|
883 |
|
|
|
884 |
|
|
/* A helper for adjust_stack and anti_adjust_stack. */
|
885 |
|
|
|
886 |
|
|
static void
|
887 |
|
|
adjust_stack_1 (rtx adjust, bool anti_p)
|
888 |
|
|
{
|
889 |
|
|
rtx temp, insn;
|
890 |
|
|
|
891 |
|
|
#ifndef STACK_GROWS_DOWNWARD
|
892 |
|
|
/* Hereafter anti_p means subtract_p. */
|
893 |
|
|
anti_p = !anti_p;
|
894 |
|
|
#endif
|
895 |
|
|
|
896 |
|
|
temp = expand_binop (Pmode,
|
897 |
|
|
anti_p ? sub_optab : add_optab,
|
898 |
|
|
stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
|
899 |
|
|
OPTAB_LIB_WIDEN);
|
900 |
|
|
|
901 |
|
|
if (temp != stack_pointer_rtx)
|
902 |
|
|
insn = emit_move_insn (stack_pointer_rtx, temp);
|
903 |
|
|
else
|
904 |
|
|
{
|
905 |
|
|
insn = get_last_insn ();
|
906 |
|
|
temp = single_set (insn);
|
907 |
|
|
gcc_assert (temp != NULL && SET_DEST (temp) == stack_pointer_rtx);
|
908 |
|
|
}
|
909 |
|
|
|
910 |
|
|
if (!suppress_reg_args_size)
|
911 |
|
|
add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
|
912 |
|
|
}
|
913 |
|
|
|
914 |
|
|
/* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
|
915 |
|
|
This pops when ADJUST is positive. ADJUST need not be constant. */
|
916 |
|
|
|
917 |
|
|
void
|
918 |
|
|
adjust_stack (rtx adjust)
|
919 |
|
|
{
|
920 |
|
|
if (adjust == const0_rtx)
|
921 |
|
|
return;
|
922 |
|
|
|
923 |
|
|
/* We expect all variable sized adjustments to be multiple of
|
924 |
|
|
PREFERRED_STACK_BOUNDARY. */
|
925 |
|
|
if (CONST_INT_P (adjust))
|
926 |
|
|
stack_pointer_delta -= INTVAL (adjust);
|
927 |
|
|
|
928 |
|
|
adjust_stack_1 (adjust, false);
|
929 |
|
|
}
|
930 |
|
|
|
931 |
|
|
/* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
|
932 |
|
|
This pushes when ADJUST is positive. ADJUST need not be constant. */
|
933 |
|
|
|
934 |
|
|
void
|
935 |
|
|
anti_adjust_stack (rtx adjust)
|
936 |
|
|
{
|
937 |
|
|
if (adjust == const0_rtx)
|
938 |
|
|
return;
|
939 |
|
|
|
940 |
|
|
/* We expect all variable sized adjustments to be multiple of
|
941 |
|
|
PREFERRED_STACK_BOUNDARY. */
|
942 |
|
|
if (CONST_INT_P (adjust))
|
943 |
|
|
stack_pointer_delta += INTVAL (adjust);
|
944 |
|
|
|
945 |
|
|
adjust_stack_1 (adjust, true);
|
946 |
|
|
}
|
947 |
|
|
|
948 |
|
|
/* Round the size of a block to be pushed up to the boundary required
|
949 |
|
|
by this machine. SIZE is the desired size, which need not be constant. */
|
950 |
|
|
|
951 |
|
|
static rtx
|
952 |
|
|
round_push (rtx size)
|
953 |
|
|
{
|
954 |
|
|
rtx align_rtx, alignm1_rtx;
|
955 |
|
|
|
956 |
|
|
if (!SUPPORTS_STACK_ALIGNMENT
|
957 |
|
|
|| crtl->preferred_stack_boundary == MAX_SUPPORTED_STACK_ALIGNMENT)
|
958 |
|
|
{
|
959 |
|
|
int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
|
960 |
|
|
|
961 |
|
|
if (align == 1)
|
962 |
|
|
return size;
|
963 |
|
|
|
964 |
|
|
if (CONST_INT_P (size))
|
965 |
|
|
{
|
966 |
|
|
HOST_WIDE_INT new_size = (INTVAL (size) + align - 1) / align * align;
|
967 |
|
|
|
968 |
|
|
if (INTVAL (size) != new_size)
|
969 |
|
|
size = GEN_INT (new_size);
|
970 |
|
|
return size;
|
971 |
|
|
}
|
972 |
|
|
|
973 |
|
|
align_rtx = GEN_INT (align);
|
974 |
|
|
alignm1_rtx = GEN_INT (align - 1);
|
975 |
|
|
}
|
976 |
|
|
else
|
977 |
|
|
{
|
978 |
|
|
/* If crtl->preferred_stack_boundary might still grow, use
|
979 |
|
|
virtual_preferred_stack_boundary_rtx instead. This will be
|
980 |
|
|
substituted by the right value in vregs pass and optimized
|
981 |
|
|
during combine. */
|
982 |
|
|
align_rtx = virtual_preferred_stack_boundary_rtx;
|
983 |
|
|
alignm1_rtx = force_operand (plus_constant (align_rtx, -1), NULL_RTX);
|
984 |
|
|
}
|
985 |
|
|
|
986 |
|
|
/* CEIL_DIV_EXPR needs to worry about the addition overflowing,
|
987 |
|
|
but we know it can't. So add ourselves and then do
|
988 |
|
|
TRUNC_DIV_EXPR. */
|
989 |
|
|
size = expand_binop (Pmode, add_optab, size, alignm1_rtx,
|
990 |
|
|
NULL_RTX, 1, OPTAB_LIB_WIDEN);
|
991 |
|
|
size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, align_rtx,
|
992 |
|
|
NULL_RTX, 1);
|
993 |
|
|
size = expand_mult (Pmode, size, align_rtx, NULL_RTX, 1);
|
994 |
|
|
|
995 |
|
|
return size;
|
996 |
|
|
}
|
997 |
|
|
|
998 |
|
|
/* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer
|
999 |
|
|
to a previously-created save area. If no save area has been allocated,
|
1000 |
|
|
this function will allocate one. If a save area is specified, it
|
1001 |
|
|
must be of the proper mode. */
|
1002 |
|
|
|
1003 |
|
|
void
|
1004 |
|
|
emit_stack_save (enum save_level save_level, rtx *psave)
|
1005 |
|
|
{
|
1006 |
|
|
rtx sa = *psave;
|
1007 |
|
|
/* The default is that we use a move insn and save in a Pmode object. */
|
1008 |
|
|
rtx (*fcn) (rtx, rtx) = gen_move_insn;
|
1009 |
|
|
enum machine_mode mode = STACK_SAVEAREA_MODE (save_level);
|
1010 |
|
|
|
1011 |
|
|
/* See if this machine has anything special to do for this kind of save. */
|
1012 |
|
|
switch (save_level)
|
1013 |
|
|
{
|
1014 |
|
|
#ifdef HAVE_save_stack_block
|
1015 |
|
|
case SAVE_BLOCK:
|
1016 |
|
|
if (HAVE_save_stack_block)
|
1017 |
|
|
fcn = gen_save_stack_block;
|
1018 |
|
|
break;
|
1019 |
|
|
#endif
|
1020 |
|
|
#ifdef HAVE_save_stack_function
|
1021 |
|
|
case SAVE_FUNCTION:
|
1022 |
|
|
if (HAVE_save_stack_function)
|
1023 |
|
|
fcn = gen_save_stack_function;
|
1024 |
|
|
break;
|
1025 |
|
|
#endif
|
1026 |
|
|
#ifdef HAVE_save_stack_nonlocal
|
1027 |
|
|
case SAVE_NONLOCAL:
|
1028 |
|
|
if (HAVE_save_stack_nonlocal)
|
1029 |
|
|
fcn = gen_save_stack_nonlocal;
|
1030 |
|
|
break;
|
1031 |
|
|
#endif
|
1032 |
|
|
default:
|
1033 |
|
|
break;
|
1034 |
|
|
}
|
1035 |
|
|
|
1036 |
|
|
/* If there is no save area and we have to allocate one, do so. Otherwise
|
1037 |
|
|
verify the save area is the proper mode. */
|
1038 |
|
|
|
1039 |
|
|
if (sa == 0)
|
1040 |
|
|
{
|
1041 |
|
|
if (mode != VOIDmode)
|
1042 |
|
|
{
|
1043 |
|
|
if (save_level == SAVE_NONLOCAL)
|
1044 |
|
|
*psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
|
1045 |
|
|
else
|
1046 |
|
|
*psave = sa = gen_reg_rtx (mode);
|
1047 |
|
|
}
|
1048 |
|
|
}
|
1049 |
|
|
|
1050 |
|
|
do_pending_stack_adjust ();
|
1051 |
|
|
if (sa != 0)
|
1052 |
|
|
sa = validize_mem (sa);
|
1053 |
|
|
emit_insn (fcn (sa, stack_pointer_rtx));
|
1054 |
|
|
}
|
1055 |
|
|
|
1056 |
|
|
/* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save
|
1057 |
|
|
area made by emit_stack_save. If it is zero, we have nothing to do. */
|
1058 |
|
|
|
1059 |
|
|
void
|
1060 |
|
|
emit_stack_restore (enum save_level save_level, rtx sa)
|
1061 |
|
|
{
|
1062 |
|
|
/* The default is that we use a move insn. */
|
1063 |
|
|
rtx (*fcn) (rtx, rtx) = gen_move_insn;
|
1064 |
|
|
|
1065 |
|
|
/* If stack_realign_drap, the x86 backend emits a prologue that aligns both
|
1066 |
|
|
STACK_POINTER and HARD_FRAME_POINTER.
|
1067 |
|
|
If stack_realign_fp, the x86 backend emits a prologue that aligns only
|
1068 |
|
|
STACK_POINTER. This renders the HARD_FRAME_POINTER unusable for accessing
|
1069 |
|
|
aligned variables, which is reflected in ix86_can_eliminate.
|
1070 |
|
|
We normally still have the realigned STACK_POINTER that we can use.
|
1071 |
|
|
But if there is a stack restore still present at reload, it can trigger
|
1072 |
|
|
mark_not_eliminable for the STACK_POINTER, leaving no way to eliminate
|
1073 |
|
|
FRAME_POINTER into a hard reg.
|
1074 |
|
|
To prevent this situation, we force need_drap if we emit a stack
|
1075 |
|
|
restore. */
|
1076 |
|
|
if (SUPPORTS_STACK_ALIGNMENT)
|
1077 |
|
|
crtl->need_drap = true;
|
1078 |
|
|
|
1079 |
|
|
/* See if this machine has anything special to do for this kind of save. */
|
1080 |
|
|
switch (save_level)
|
1081 |
|
|
{
|
1082 |
|
|
#ifdef HAVE_restore_stack_block
|
1083 |
|
|
case SAVE_BLOCK:
|
1084 |
|
|
if (HAVE_restore_stack_block)
|
1085 |
|
|
fcn = gen_restore_stack_block;
|
1086 |
|
|
break;
|
1087 |
|
|
#endif
|
1088 |
|
|
#ifdef HAVE_restore_stack_function
|
1089 |
|
|
case SAVE_FUNCTION:
|
1090 |
|
|
if (HAVE_restore_stack_function)
|
1091 |
|
|
fcn = gen_restore_stack_function;
|
1092 |
|
|
break;
|
1093 |
|
|
#endif
|
1094 |
|
|
#ifdef HAVE_restore_stack_nonlocal
|
1095 |
|
|
case SAVE_NONLOCAL:
|
1096 |
|
|
if (HAVE_restore_stack_nonlocal)
|
1097 |
|
|
fcn = gen_restore_stack_nonlocal;
|
1098 |
|
|
break;
|
1099 |
|
|
#endif
|
1100 |
|
|
default:
|
1101 |
|
|
break;
|
1102 |
|
|
}
|
1103 |
|
|
|
1104 |
|
|
if (sa != 0)
|
1105 |
|
|
{
|
1106 |
|
|
sa = validize_mem (sa);
|
1107 |
|
|
/* These clobbers prevent the scheduler from moving
|
1108 |
|
|
references to variable arrays below the code
|
1109 |
|
|
that deletes (pops) the arrays. */
|
1110 |
|
|
emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
|
1111 |
|
|
emit_clobber (gen_rtx_MEM (BLKmode, stack_pointer_rtx));
|
1112 |
|
|
}
|
1113 |
|
|
|
1114 |
|
|
discard_pending_stack_adjust ();
|
1115 |
|
|
|
1116 |
|
|
emit_insn (fcn (stack_pointer_rtx, sa));
|
1117 |
|
|
}
|
1118 |
|
|
|
1119 |
|
|
/* Invoke emit_stack_save on the nonlocal_goto_save_area for the current
|
1120 |
|
|
function. This function should be called whenever we allocate or
|
1121 |
|
|
deallocate dynamic stack space. */
|
1122 |
|
|
|
1123 |
|
|
void
|
1124 |
|
|
update_nonlocal_goto_save_area (void)
|
1125 |
|
|
{
|
1126 |
|
|
tree t_save;
|
1127 |
|
|
rtx r_save;
|
1128 |
|
|
|
1129 |
|
|
/* The nonlocal_goto_save_area object is an array of N pointers. The
|
1130 |
|
|
first one is used for the frame pointer save; the rest are sized by
|
1131 |
|
|
STACK_SAVEAREA_MODE. Create a reference to array index 1, the first
|
1132 |
|
|
of the stack save area slots. */
|
1133 |
|
|
t_save = build4 (ARRAY_REF,
|
1134 |
|
|
TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
|
1135 |
|
|
cfun->nonlocal_goto_save_area,
|
1136 |
|
|
integer_one_node, NULL_TREE, NULL_TREE);
|
1137 |
|
|
r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
|
1138 |
|
|
|
1139 |
|
|
emit_stack_save (SAVE_NONLOCAL, &r_save);
|
1140 |
|
|
}
|
1141 |
|
|
|
1142 |
|
|
/* Return an rtx representing the address of an area of memory dynamically
|
1143 |
|
|
pushed on the stack.
|
1144 |
|
|
|
1145 |
|
|
Any required stack pointer alignment is preserved.
|
1146 |
|
|
|
1147 |
|
|
SIZE is an rtx representing the size of the area.
|
1148 |
|
|
|
1149 |
|
|
SIZE_ALIGN is the alignment (in bits) that we know SIZE has. This
|
1150 |
|
|
parameter may be zero. If so, a proper value will be extracted
|
1151 |
|
|
from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed.
|
1152 |
|
|
|
1153 |
|
|
REQUIRED_ALIGN is the alignment (in bits) required for the region
|
1154 |
|
|
of memory.
|
1155 |
|
|
|
1156 |
|
|
If CANNOT_ACCUMULATE is set to TRUE, the caller guarantees that the
|
1157 |
|
|
stack space allocated by the generated code cannot be added with itself
|
1158 |
|
|
in the course of the execution of the function. It is always safe to
|
1159 |
|
|
pass FALSE here and the following criterion is sufficient in order to
|
1160 |
|
|
pass TRUE: every path in the CFG that starts at the allocation point and
|
1161 |
|
|
loops to it executes the associated deallocation code. */
|
1162 |
|
|
|
1163 |
|
|
rtx
|
1164 |
|
|
allocate_dynamic_stack_space (rtx size, unsigned size_align,
|
1165 |
|
|
unsigned required_align, bool cannot_accumulate)
|
1166 |
|
|
{
|
1167 |
|
|
HOST_WIDE_INT stack_usage_size = -1;
|
1168 |
|
|
rtx final_label, final_target, target;
|
1169 |
|
|
unsigned extra_align = 0;
|
1170 |
|
|
bool must_align;
|
1171 |
|
|
|
1172 |
|
|
/* If we're asking for zero bytes, it doesn't matter what we point
|
1173 |
|
|
to since we can't dereference it. But return a reasonable
|
1174 |
|
|
address anyway. */
|
1175 |
|
|
if (size == const0_rtx)
|
1176 |
|
|
return virtual_stack_dynamic_rtx;
|
1177 |
|
|
|
1178 |
|
|
/* Otherwise, show we're calling alloca or equivalent. */
|
1179 |
|
|
cfun->calls_alloca = 1;
|
1180 |
|
|
|
1181 |
|
|
/* If stack usage info is requested, look into the size we are passed.
|
1182 |
|
|
We need to do so this early to avoid the obfuscation that may be
|
1183 |
|
|
introduced later by the various alignment operations. */
|
1184 |
|
|
if (flag_stack_usage_info)
|
1185 |
|
|
{
|
1186 |
|
|
if (CONST_INT_P (size))
|
1187 |
|
|
stack_usage_size = INTVAL (size);
|
1188 |
|
|
else if (REG_P (size))
|
1189 |
|
|
{
|
1190 |
|
|
/* Look into the last emitted insn and see if we can deduce
|
1191 |
|
|
something for the register. */
|
1192 |
|
|
rtx insn, set, note;
|
1193 |
|
|
insn = get_last_insn ();
|
1194 |
|
|
if ((set = single_set (insn)) && rtx_equal_p (SET_DEST (set), size))
|
1195 |
|
|
{
|
1196 |
|
|
if (CONST_INT_P (SET_SRC (set)))
|
1197 |
|
|
stack_usage_size = INTVAL (SET_SRC (set));
|
1198 |
|
|
else if ((note = find_reg_equal_equiv_note (insn))
|
1199 |
|
|
&& CONST_INT_P (XEXP (note, 0)))
|
1200 |
|
|
stack_usage_size = INTVAL (XEXP (note, 0));
|
1201 |
|
|
}
|
1202 |
|
|
}
|
1203 |
|
|
|
1204 |
|
|
/* If the size is not constant, we can't say anything. */
|
1205 |
|
|
if (stack_usage_size == -1)
|
1206 |
|
|
{
|
1207 |
|
|
current_function_has_unbounded_dynamic_stack_size = 1;
|
1208 |
|
|
stack_usage_size = 0;
|
1209 |
|
|
}
|
1210 |
|
|
}
|
1211 |
|
|
|
1212 |
|
|
/* Ensure the size is in the proper mode. */
|
1213 |
|
|
if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
|
1214 |
|
|
size = convert_to_mode (Pmode, size, 1);
|
1215 |
|
|
|
1216 |
|
|
/* Adjust SIZE_ALIGN, if needed. */
|
1217 |
|
|
if (CONST_INT_P (size))
|
1218 |
|
|
{
|
1219 |
|
|
unsigned HOST_WIDE_INT lsb;
|
1220 |
|
|
|
1221 |
|
|
lsb = INTVAL (size);
|
1222 |
|
|
lsb &= -lsb;
|
1223 |
|
|
|
1224 |
|
|
/* Watch out for overflow truncating to "unsigned". */
|
1225 |
|
|
if (lsb > UINT_MAX / BITS_PER_UNIT)
|
1226 |
|
|
size_align = 1u << (HOST_BITS_PER_INT - 1);
|
1227 |
|
|
else
|
1228 |
|
|
size_align = (unsigned)lsb * BITS_PER_UNIT;
|
1229 |
|
|
}
|
1230 |
|
|
else if (size_align < BITS_PER_UNIT)
|
1231 |
|
|
size_align = BITS_PER_UNIT;
|
1232 |
|
|
|
1233 |
|
|
/* We can't attempt to minimize alignment necessary, because we don't
|
1234 |
|
|
know the final value of preferred_stack_boundary yet while executing
|
1235 |
|
|
this code. */
|
1236 |
|
|
if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
|
1237 |
|
|
crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
|
1238 |
|
|
|
1239 |
|
|
/* We will need to ensure that the address we return is aligned to
|
1240 |
|
|
REQUIRED_ALIGN. If STACK_DYNAMIC_OFFSET is defined, we don't
|
1241 |
|
|
always know its final value at this point in the compilation (it
|
1242 |
|
|
might depend on the size of the outgoing parameter lists, for
|
1243 |
|
|
example), so we must align the value to be returned in that case.
|
1244 |
|
|
(Note that STACK_DYNAMIC_OFFSET will have a default nonzero value if
|
1245 |
|
|
STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined).
|
1246 |
|
|
We must also do an alignment operation on the returned value if
|
1247 |
|
|
the stack pointer alignment is less strict than REQUIRED_ALIGN.
|
1248 |
|
|
|
1249 |
|
|
If we have to align, we must leave space in SIZE for the hole
|
1250 |
|
|
that might result from the alignment operation. */
|
1251 |
|
|
|
1252 |
|
|
must_align = (crtl->preferred_stack_boundary < required_align);
|
1253 |
|
|
if (must_align)
|
1254 |
|
|
{
|
1255 |
|
|
if (required_align > PREFERRED_STACK_BOUNDARY)
|
1256 |
|
|
extra_align = PREFERRED_STACK_BOUNDARY;
|
1257 |
|
|
else if (required_align > STACK_BOUNDARY)
|
1258 |
|
|
extra_align = STACK_BOUNDARY;
|
1259 |
|
|
else
|
1260 |
|
|
extra_align = BITS_PER_UNIT;
|
1261 |
|
|
}
|
1262 |
|
|
|
1263 |
|
|
/* ??? STACK_POINTER_OFFSET is always defined now. */
|
1264 |
|
|
#if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET)
|
1265 |
|
|
must_align = true;
|
1266 |
|
|
extra_align = BITS_PER_UNIT;
|
1267 |
|
|
#endif
|
1268 |
|
|
|
1269 |
|
|
if (must_align)
|
1270 |
|
|
{
|
1271 |
|
|
unsigned extra = (required_align - extra_align) / BITS_PER_UNIT;
|
1272 |
|
|
|
1273 |
|
|
size = plus_constant (size, extra);
|
1274 |
|
|
size = force_operand (size, NULL_RTX);
|
1275 |
|
|
|
1276 |
|
|
if (flag_stack_usage_info)
|
1277 |
|
|
stack_usage_size += extra;
|
1278 |
|
|
|
1279 |
|
|
if (extra && size_align > extra_align)
|
1280 |
|
|
size_align = extra_align;
|
1281 |
|
|
}
|
1282 |
|
|
|
1283 |
|
|
/* Round the size to a multiple of the required stack alignment.
|
1284 |
|
|
Since the stack if presumed to be rounded before this allocation,
|
1285 |
|
|
this will maintain the required alignment.
|
1286 |
|
|
|
1287 |
|
|
If the stack grows downward, we could save an insn by subtracting
|
1288 |
|
|
SIZE from the stack pointer and then aligning the stack pointer.
|
1289 |
|
|
The problem with this is that the stack pointer may be unaligned
|
1290 |
|
|
between the execution of the subtraction and alignment insns and
|
1291 |
|
|
some machines do not allow this. Even on those that do, some
|
1292 |
|
|
signal handlers malfunction if a signal should occur between those
|
1293 |
|
|
insns. Since this is an extremely rare event, we have no reliable
|
1294 |
|
|
way of knowing which systems have this problem. So we avoid even
|
1295 |
|
|
momentarily mis-aligning the stack. */
|
1296 |
|
|
if (size_align % MAX_SUPPORTED_STACK_ALIGNMENT != 0)
|
1297 |
|
|
{
|
1298 |
|
|
size = round_push (size);
|
1299 |
|
|
|
1300 |
|
|
if (flag_stack_usage_info)
|
1301 |
|
|
{
|
1302 |
|
|
int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
|
1303 |
|
|
stack_usage_size = (stack_usage_size + align - 1) / align * align;
|
1304 |
|
|
}
|
1305 |
|
|
}
|
1306 |
|
|
|
1307 |
|
|
target = gen_reg_rtx (Pmode);
|
1308 |
|
|
|
1309 |
|
|
/* The size is supposed to be fully adjusted at this point so record it
|
1310 |
|
|
if stack usage info is requested. */
|
1311 |
|
|
if (flag_stack_usage_info)
|
1312 |
|
|
{
|
1313 |
|
|
current_function_dynamic_stack_size += stack_usage_size;
|
1314 |
|
|
|
1315 |
|
|
/* ??? This is gross but the only safe stance in the absence
|
1316 |
|
|
of stack usage oriented flow analysis. */
|
1317 |
|
|
if (!cannot_accumulate)
|
1318 |
|
|
current_function_has_unbounded_dynamic_stack_size = 1;
|
1319 |
|
|
}
|
1320 |
|
|
|
1321 |
|
|
final_label = NULL_RTX;
|
1322 |
|
|
final_target = NULL_RTX;
|
1323 |
|
|
|
1324 |
|
|
/* If we are splitting the stack, we need to ask the backend whether
|
1325 |
|
|
there is enough room on the current stack. If there isn't, or if
|
1326 |
|
|
the backend doesn't know how to tell is, then we need to call a
|
1327 |
|
|
function to allocate memory in some other way. This memory will
|
1328 |
|
|
be released when we release the current stack segment. The
|
1329 |
|
|
effect is that stack allocation becomes less efficient, but at
|
1330 |
|
|
least it doesn't cause a stack overflow. */
|
1331 |
|
|
if (flag_split_stack)
|
1332 |
|
|
{
|
1333 |
|
|
rtx available_label, ask, space, func;
|
1334 |
|
|
|
1335 |
|
|
available_label = NULL_RTX;
|
1336 |
|
|
|
1337 |
|
|
#ifdef HAVE_split_stack_space_check
|
1338 |
|
|
if (HAVE_split_stack_space_check)
|
1339 |
|
|
{
|
1340 |
|
|
available_label = gen_label_rtx ();
|
1341 |
|
|
|
1342 |
|
|
/* This instruction will branch to AVAILABLE_LABEL if there
|
1343 |
|
|
are SIZE bytes available on the stack. */
|
1344 |
|
|
emit_insn (gen_split_stack_space_check (size, available_label));
|
1345 |
|
|
}
|
1346 |
|
|
#endif
|
1347 |
|
|
|
1348 |
|
|
/* The __morestack_allocate_stack_space function will allocate
|
1349 |
|
|
memory using malloc. If the alignment of the memory returned
|
1350 |
|
|
by malloc does not meet REQUIRED_ALIGN, we increase SIZE to
|
1351 |
|
|
make sure we allocate enough space. */
|
1352 |
|
|
if (MALLOC_ABI_ALIGNMENT >= required_align)
|
1353 |
|
|
ask = size;
|
1354 |
|
|
else
|
1355 |
|
|
{
|
1356 |
|
|
ask = expand_binop (Pmode, add_optab, size,
|
1357 |
|
|
GEN_INT (required_align / BITS_PER_UNIT - 1),
|
1358 |
|
|
NULL_RTX, 1, OPTAB_LIB_WIDEN);
|
1359 |
|
|
must_align = true;
|
1360 |
|
|
}
|
1361 |
|
|
|
1362 |
|
|
func = init_one_libfunc ("__morestack_allocate_stack_space");
|
1363 |
|
|
|
1364 |
|
|
space = emit_library_call_value (func, target, LCT_NORMAL, Pmode,
|
1365 |
|
|
1, ask, Pmode);
|
1366 |
|
|
|
1367 |
|
|
if (available_label == NULL_RTX)
|
1368 |
|
|
return space;
|
1369 |
|
|
|
1370 |
|
|
final_target = gen_reg_rtx (Pmode);
|
1371 |
|
|
|
1372 |
|
|
emit_move_insn (final_target, space);
|
1373 |
|
|
|
1374 |
|
|
final_label = gen_label_rtx ();
|
1375 |
|
|
emit_jump (final_label);
|
1376 |
|
|
|
1377 |
|
|
emit_label (available_label);
|
1378 |
|
|
}
|
1379 |
|
|
|
1380 |
|
|
do_pending_stack_adjust ();
|
1381 |
|
|
|
1382 |
|
|
/* We ought to be called always on the toplevel and stack ought to be aligned
|
1383 |
|
|
properly. */
|
1384 |
|
|
gcc_assert (!(stack_pointer_delta
|
1385 |
|
|
% (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)));
|
1386 |
|
|
|
1387 |
|
|
/* If needed, check that we have the required amount of stack. Take into
|
1388 |
|
|
account what has already been checked. */
|
1389 |
|
|
if (STACK_CHECK_MOVING_SP)
|
1390 |
|
|
;
|
1391 |
|
|
else if (flag_stack_check == GENERIC_STACK_CHECK)
|
1392 |
|
|
probe_stack_range (STACK_OLD_CHECK_PROTECT + STACK_CHECK_MAX_FRAME_SIZE,
|
1393 |
|
|
size);
|
1394 |
|
|
else if (flag_stack_check == STATIC_BUILTIN_STACK_CHECK)
|
1395 |
|
|
probe_stack_range (STACK_CHECK_PROTECT, size);
|
1396 |
|
|
|
1397 |
|
|
/* Don't let anti_adjust_stack emit notes. */
|
1398 |
|
|
suppress_reg_args_size = true;
|
1399 |
|
|
|
1400 |
|
|
/* Perform the required allocation from the stack. Some systems do
|
1401 |
|
|
this differently than simply incrementing/decrementing from the
|
1402 |
|
|
stack pointer, such as acquiring the space by calling malloc(). */
|
1403 |
|
|
#ifdef HAVE_allocate_stack
|
1404 |
|
|
if (HAVE_allocate_stack)
|
1405 |
|
|
{
|
1406 |
|
|
struct expand_operand ops[2];
|
1407 |
|
|
/* We don't have to check against the predicate for operand 0 since
|
1408 |
|
|
TARGET is known to be a pseudo of the proper mode, which must
|
1409 |
|
|
be valid for the operand. */
|
1410 |
|
|
create_fixed_operand (&ops[0], target);
|
1411 |
|
|
create_convert_operand_to (&ops[1], size, STACK_SIZE_MODE, true);
|
1412 |
|
|
expand_insn (CODE_FOR_allocate_stack, 2, ops);
|
1413 |
|
|
}
|
1414 |
|
|
else
|
1415 |
|
|
#endif
|
1416 |
|
|
{
|
1417 |
|
|
int saved_stack_pointer_delta;
|
1418 |
|
|
|
1419 |
|
|
#ifndef STACK_GROWS_DOWNWARD
|
1420 |
|
|
emit_move_insn (target, virtual_stack_dynamic_rtx);
|
1421 |
|
|
#endif
|
1422 |
|
|
|
1423 |
|
|
/* Check stack bounds if necessary. */
|
1424 |
|
|
if (crtl->limit_stack)
|
1425 |
|
|
{
|
1426 |
|
|
rtx available;
|
1427 |
|
|
rtx space_available = gen_label_rtx ();
|
1428 |
|
|
#ifdef STACK_GROWS_DOWNWARD
|
1429 |
|
|
available = expand_binop (Pmode, sub_optab,
|
1430 |
|
|
stack_pointer_rtx, stack_limit_rtx,
|
1431 |
|
|
NULL_RTX, 1, OPTAB_WIDEN);
|
1432 |
|
|
#else
|
1433 |
|
|
available = expand_binop (Pmode, sub_optab,
|
1434 |
|
|
stack_limit_rtx, stack_pointer_rtx,
|
1435 |
|
|
NULL_RTX, 1, OPTAB_WIDEN);
|
1436 |
|
|
#endif
|
1437 |
|
|
emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1,
|
1438 |
|
|
space_available);
|
1439 |
|
|
#ifdef HAVE_trap
|
1440 |
|
|
if (HAVE_trap)
|
1441 |
|
|
emit_insn (gen_trap ());
|
1442 |
|
|
else
|
1443 |
|
|
#endif
|
1444 |
|
|
error ("stack limits not supported on this target");
|
1445 |
|
|
emit_barrier ();
|
1446 |
|
|
emit_label (space_available);
|
1447 |
|
|
}
|
1448 |
|
|
|
1449 |
|
|
saved_stack_pointer_delta = stack_pointer_delta;
|
1450 |
|
|
|
1451 |
|
|
if (flag_stack_check && STACK_CHECK_MOVING_SP)
|
1452 |
|
|
anti_adjust_stack_and_probe (size, false);
|
1453 |
|
|
else
|
1454 |
|
|
anti_adjust_stack (size);
|
1455 |
|
|
|
1456 |
|
|
/* Even if size is constant, don't modify stack_pointer_delta.
|
1457 |
|
|
The constant size alloca should preserve
|
1458 |
|
|
crtl->preferred_stack_boundary alignment. */
|
1459 |
|
|
stack_pointer_delta = saved_stack_pointer_delta;
|
1460 |
|
|
|
1461 |
|
|
#ifdef STACK_GROWS_DOWNWARD
|
1462 |
|
|
emit_move_insn (target, virtual_stack_dynamic_rtx);
|
1463 |
|
|
#endif
|
1464 |
|
|
}
|
1465 |
|
|
|
1466 |
|
|
suppress_reg_args_size = false;
|
1467 |
|
|
|
1468 |
|
|
/* Finish up the split stack handling. */
|
1469 |
|
|
if (final_label != NULL_RTX)
|
1470 |
|
|
{
|
1471 |
|
|
gcc_assert (flag_split_stack);
|
1472 |
|
|
emit_move_insn (final_target, target);
|
1473 |
|
|
emit_label (final_label);
|
1474 |
|
|
target = final_target;
|
1475 |
|
|
}
|
1476 |
|
|
|
1477 |
|
|
if (must_align)
|
1478 |
|
|
{
|
1479 |
|
|
/* CEIL_DIV_EXPR needs to worry about the addition overflowing,
|
1480 |
|
|
but we know it can't. So add ourselves and then do
|
1481 |
|
|
TRUNC_DIV_EXPR. */
|
1482 |
|
|
target = expand_binop (Pmode, add_optab, target,
|
1483 |
|
|
GEN_INT (required_align / BITS_PER_UNIT - 1),
|
1484 |
|
|
NULL_RTX, 1, OPTAB_LIB_WIDEN);
|
1485 |
|
|
target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
|
1486 |
|
|
GEN_INT (required_align / BITS_PER_UNIT),
|
1487 |
|
|
NULL_RTX, 1);
|
1488 |
|
|
target = expand_mult (Pmode, target,
|
1489 |
|
|
GEN_INT (required_align / BITS_PER_UNIT),
|
1490 |
|
|
NULL_RTX, 1);
|
1491 |
|
|
}
|
1492 |
|
|
|
1493 |
|
|
/* Now that we've committed to a return value, mark its alignment. */
|
1494 |
|
|
mark_reg_pointer (target, required_align);
|
1495 |
|
|
|
1496 |
|
|
/* Record the new stack level for nonlocal gotos. */
|
1497 |
|
|
if (cfun->nonlocal_goto_save_area != 0)
|
1498 |
|
|
update_nonlocal_goto_save_area ();
|
1499 |
|
|
|
1500 |
|
|
return target;
|
1501 |
|
|
}
|
1502 |
|
|
|
1503 |
|
|
/* A front end may want to override GCC's stack checking by providing a
|
1504 |
|
|
run-time routine to call to check the stack, so provide a mechanism for
|
1505 |
|
|
calling that routine. */
|
1506 |
|
|
|
1507 |
|
|
static GTY(()) rtx stack_check_libfunc;
|
1508 |
|
|
|
1509 |
|
|
void
|
1510 |
|
|
set_stack_check_libfunc (const char *libfunc_name)
|
1511 |
|
|
{
|
1512 |
|
|
gcc_assert (stack_check_libfunc == NULL_RTX);
|
1513 |
|
|
stack_check_libfunc = gen_rtx_SYMBOL_REF (Pmode, libfunc_name);
|
1514 |
|
|
}
|
1515 |
|
|
|
1516 |
|
|
/* Emit one stack probe at ADDRESS, an address within the stack. */
|
1517 |
|
|
|
1518 |
|
|
void
|
1519 |
|
|
emit_stack_probe (rtx address)
|
1520 |
|
|
{
|
1521 |
|
|
rtx memref = gen_rtx_MEM (word_mode, address);
|
1522 |
|
|
|
1523 |
|
|
MEM_VOLATILE_P (memref) = 1;
|
1524 |
|
|
|
1525 |
|
|
/* See if we have an insn to probe the stack. */
|
1526 |
|
|
#ifdef HAVE_probe_stack
|
1527 |
|
|
if (HAVE_probe_stack)
|
1528 |
|
|
emit_insn (gen_probe_stack (memref));
|
1529 |
|
|
else
|
1530 |
|
|
#endif
|
1531 |
|
|
emit_move_insn (memref, const0_rtx);
|
1532 |
|
|
}
|
1533 |
|
|
|
1534 |
|
|
/* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
|
1535 |
|
|
FIRST is a constant and size is a Pmode RTX. These are offsets from
|
1536 |
|
|
the current stack pointer. STACK_GROWS_DOWNWARD says whether to add
|
1537 |
|
|
or subtract them from the stack pointer. */
|
1538 |
|
|
|
1539 |
|
|
#define PROBE_INTERVAL (1 << STACK_CHECK_PROBE_INTERVAL_EXP)
|
1540 |
|
|
|
1541 |
|
|
#ifdef STACK_GROWS_DOWNWARD
|
1542 |
|
|
#define STACK_GROW_OP MINUS
|
1543 |
|
|
#define STACK_GROW_OPTAB sub_optab
|
1544 |
|
|
#define STACK_GROW_OFF(off) -(off)
|
1545 |
|
|
#else
|
1546 |
|
|
#define STACK_GROW_OP PLUS
|
1547 |
|
|
#define STACK_GROW_OPTAB add_optab
|
1548 |
|
|
#define STACK_GROW_OFF(off) (off)
|
1549 |
|
|
#endif
|
1550 |
|
|
|
1551 |
|
|
void
|
1552 |
|
|
probe_stack_range (HOST_WIDE_INT first, rtx size)
|
1553 |
|
|
{
|
1554 |
|
|
/* First ensure SIZE is Pmode. */
|
1555 |
|
|
if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
|
1556 |
|
|
size = convert_to_mode (Pmode, size, 1);
|
1557 |
|
|
|
1558 |
|
|
/* Next see if we have a function to check the stack. */
|
1559 |
|
|
if (stack_check_libfunc)
|
1560 |
|
|
{
|
1561 |
|
|
rtx addr = memory_address (Pmode,
|
1562 |
|
|
gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
|
1563 |
|
|
stack_pointer_rtx,
|
1564 |
|
|
plus_constant (size, first)));
|
1565 |
|
|
emit_library_call (stack_check_libfunc, LCT_NORMAL, VOIDmode, 1, addr,
|
1566 |
|
|
Pmode);
|
1567 |
|
|
return;
|
1568 |
|
|
}
|
1569 |
|
|
|
1570 |
|
|
/* Next see if we have an insn to check the stack. */
|
1571 |
|
|
#ifdef HAVE_check_stack
|
1572 |
|
|
if (HAVE_check_stack)
|
1573 |
|
|
{
|
1574 |
|
|
struct expand_operand ops[1];
|
1575 |
|
|
rtx addr = memory_address (Pmode,
|
1576 |
|
|
gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
|
1577 |
|
|
stack_pointer_rtx,
|
1578 |
|
|
plus_constant (size, first)));
|
1579 |
|
|
|
1580 |
|
|
create_input_operand (&ops[0], addr, Pmode);
|
1581 |
|
|
if (maybe_expand_insn (CODE_FOR_check_stack, 1, ops))
|
1582 |
|
|
return;
|
1583 |
|
|
}
|
1584 |
|
|
#endif
|
1585 |
|
|
|
1586 |
|
|
/* Otherwise we have to generate explicit probes. If we have a constant
|
1587 |
|
|
small number of them to generate, that's the easy case. */
|
1588 |
|
|
else if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
|
1589 |
|
|
{
|
1590 |
|
|
HOST_WIDE_INT isize = INTVAL (size), i;
|
1591 |
|
|
rtx addr;
|
1592 |
|
|
|
1593 |
|
|
/* Probe at FIRST + N * PROBE_INTERVAL for values of N from 1 until
|
1594 |
|
|
it exceeds SIZE. If only one probe is needed, this will not
|
1595 |
|
|
generate any code. Then probe at FIRST + SIZE. */
|
1596 |
|
|
for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
|
1597 |
|
|
{
|
1598 |
|
|
addr = memory_address (Pmode,
|
1599 |
|
|
plus_constant (stack_pointer_rtx,
|
1600 |
|
|
STACK_GROW_OFF (first + i)));
|
1601 |
|
|
emit_stack_probe (addr);
|
1602 |
|
|
}
|
1603 |
|
|
|
1604 |
|
|
addr = memory_address (Pmode,
|
1605 |
|
|
plus_constant (stack_pointer_rtx,
|
1606 |
|
|
STACK_GROW_OFF (first + isize)));
|
1607 |
|
|
emit_stack_probe (addr);
|
1608 |
|
|
}
|
1609 |
|
|
|
1610 |
|
|
/* In the variable case, do the same as above, but in a loop. Note that we
|
1611 |
|
|
must be extra careful with variables wrapping around because we might be
|
1612 |
|
|
at the very top (or the very bottom) of the address space and we have to
|
1613 |
|
|
be able to handle this case properly; in particular, we use an equality
|
1614 |
|
|
test for the loop condition. */
|
1615 |
|
|
else
|
1616 |
|
|
{
|
1617 |
|
|
rtx rounded_size, rounded_size_op, test_addr, last_addr, temp;
|
1618 |
|
|
rtx loop_lab = gen_label_rtx ();
|
1619 |
|
|
rtx end_lab = gen_label_rtx ();
|
1620 |
|
|
|
1621 |
|
|
|
1622 |
|
|
/* Step 1: round SIZE to the previous multiple of the interval. */
|
1623 |
|
|
|
1624 |
|
|
/* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
|
1625 |
|
|
rounded_size
|
1626 |
|
|
= simplify_gen_binary (AND, Pmode, size, GEN_INT (-PROBE_INTERVAL));
|
1627 |
|
|
rounded_size_op = force_operand (rounded_size, NULL_RTX);
|
1628 |
|
|
|
1629 |
|
|
|
1630 |
|
|
/* Step 2: compute initial and final value of the loop counter. */
|
1631 |
|
|
|
1632 |
|
|
/* TEST_ADDR = SP + FIRST. */
|
1633 |
|
|
test_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
|
1634 |
|
|
stack_pointer_rtx,
|
1635 |
|
|
GEN_INT (first)), NULL_RTX);
|
1636 |
|
|
|
1637 |
|
|
/* LAST_ADDR = SP + FIRST + ROUNDED_SIZE. */
|
1638 |
|
|
last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
|
1639 |
|
|
test_addr,
|
1640 |
|
|
rounded_size_op), NULL_RTX);
|
1641 |
|
|
|
1642 |
|
|
|
1643 |
|
|
/* Step 3: the loop
|
1644 |
|
|
|
1645 |
|
|
while (TEST_ADDR != LAST_ADDR)
|
1646 |
|
|
{
|
1647 |
|
|
TEST_ADDR = TEST_ADDR + PROBE_INTERVAL
|
1648 |
|
|
probe at TEST_ADDR
|
1649 |
|
|
}
|
1650 |
|
|
|
1651 |
|
|
probes at FIRST + N * PROBE_INTERVAL for values of N from 1
|
1652 |
|
|
until it is equal to ROUNDED_SIZE. */
|
1653 |
|
|
|
1654 |
|
|
emit_label (loop_lab);
|
1655 |
|
|
|
1656 |
|
|
/* Jump to END_LAB if TEST_ADDR == LAST_ADDR. */
|
1657 |
|
|
emit_cmp_and_jump_insns (test_addr, last_addr, EQ, NULL_RTX, Pmode, 1,
|
1658 |
|
|
end_lab);
|
1659 |
|
|
|
1660 |
|
|
/* TEST_ADDR = TEST_ADDR + PROBE_INTERVAL. */
|
1661 |
|
|
temp = expand_binop (Pmode, STACK_GROW_OPTAB, test_addr,
|
1662 |
|
|
GEN_INT (PROBE_INTERVAL), test_addr,
|
1663 |
|
|
1, OPTAB_WIDEN);
|
1664 |
|
|
|
1665 |
|
|
gcc_assert (temp == test_addr);
|
1666 |
|
|
|
1667 |
|
|
/* Probe at TEST_ADDR. */
|
1668 |
|
|
emit_stack_probe (test_addr);
|
1669 |
|
|
|
1670 |
|
|
emit_jump (loop_lab);
|
1671 |
|
|
|
1672 |
|
|
emit_label (end_lab);
|
1673 |
|
|
|
1674 |
|
|
|
1675 |
|
|
/* Step 4: probe at FIRST + SIZE if we cannot assert at compile-time
|
1676 |
|
|
that SIZE is equal to ROUNDED_SIZE. */
|
1677 |
|
|
|
1678 |
|
|
/* TEMP = SIZE - ROUNDED_SIZE. */
|
1679 |
|
|
temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
|
1680 |
|
|
if (temp != const0_rtx)
|
1681 |
|
|
{
|
1682 |
|
|
rtx addr;
|
1683 |
|
|
|
1684 |
|
|
if (CONST_INT_P (temp))
|
1685 |
|
|
{
|
1686 |
|
|
/* Use [base + disp} addressing mode if supported. */
|
1687 |
|
|
HOST_WIDE_INT offset = INTVAL (temp);
|
1688 |
|
|
addr = memory_address (Pmode,
|
1689 |
|
|
plus_constant (last_addr,
|
1690 |
|
|
STACK_GROW_OFF (offset)));
|
1691 |
|
|
}
|
1692 |
|
|
else
|
1693 |
|
|
{
|
1694 |
|
|
/* Manual CSE if the difference is not known at compile-time. */
|
1695 |
|
|
temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
|
1696 |
|
|
addr = memory_address (Pmode,
|
1697 |
|
|
gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
|
1698 |
|
|
last_addr, temp));
|
1699 |
|
|
}
|
1700 |
|
|
|
1701 |
|
|
emit_stack_probe (addr);
|
1702 |
|
|
}
|
1703 |
|
|
}
|
1704 |
|
|
}
|
1705 |
|
|
|
1706 |
|
|
/* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes)
|
1707 |
|
|
while probing it. This pushes when SIZE is positive. SIZE need not
|
1708 |
|
|
be constant. If ADJUST_BACK is true, adjust back the stack pointer
|
1709 |
|
|
by plus SIZE at the end. */
|
1710 |
|
|
|
1711 |
|
|
void
|
1712 |
|
|
anti_adjust_stack_and_probe (rtx size, bool adjust_back)
|
1713 |
|
|
{
|
1714 |
|
|
/* We skip the probe for the first interval + a small dope of 4 words and
|
1715 |
|
|
probe that many bytes past the specified size to maintain a protection
|
1716 |
|
|
area at the botton of the stack. */
|
1717 |
|
|
const int dope = 4 * UNITS_PER_WORD;
|
1718 |
|
|
|
1719 |
|
|
/* First ensure SIZE is Pmode. */
|
1720 |
|
|
if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
|
1721 |
|
|
size = convert_to_mode (Pmode, size, 1);
|
1722 |
|
|
|
1723 |
|
|
/* If we have a constant small number of probes to generate, that's the
|
1724 |
|
|
easy case. */
|
1725 |
|
|
if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
|
1726 |
|
|
{
|
1727 |
|
|
HOST_WIDE_INT isize = INTVAL (size), i;
|
1728 |
|
|
bool first_probe = true;
|
1729 |
|
|
|
1730 |
|
|
/* Adjust SP and probe at PROBE_INTERVAL + N * PROBE_INTERVAL for
|
1731 |
|
|
values of N from 1 until it exceeds SIZE. If only one probe is
|
1732 |
|
|
needed, this will not generate any code. Then adjust and probe
|
1733 |
|
|
to PROBE_INTERVAL + SIZE. */
|
1734 |
|
|
for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
|
1735 |
|
|
{
|
1736 |
|
|
if (first_probe)
|
1737 |
|
|
{
|
1738 |
|
|
anti_adjust_stack (GEN_INT (2 * PROBE_INTERVAL + dope));
|
1739 |
|
|
first_probe = false;
|
1740 |
|
|
}
|
1741 |
|
|
else
|
1742 |
|
|
anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
|
1743 |
|
|
emit_stack_probe (stack_pointer_rtx);
|
1744 |
|
|
}
|
1745 |
|
|
|
1746 |
|
|
if (first_probe)
|
1747 |
|
|
anti_adjust_stack (plus_constant (size, PROBE_INTERVAL + dope));
|
1748 |
|
|
else
|
1749 |
|
|
anti_adjust_stack (plus_constant (size, PROBE_INTERVAL - i));
|
1750 |
|
|
emit_stack_probe (stack_pointer_rtx);
|
1751 |
|
|
}
|
1752 |
|
|
|
1753 |
|
|
/* In the variable case, do the same as above, but in a loop. Note that we
|
1754 |
|
|
must be extra careful with variables wrapping around because we might be
|
1755 |
|
|
at the very top (or the very bottom) of the address space and we have to
|
1756 |
|
|
be able to handle this case properly; in particular, we use an equality
|
1757 |
|
|
test for the loop condition. */
|
1758 |
|
|
else
|
1759 |
|
|
{
|
1760 |
|
|
rtx rounded_size, rounded_size_op, last_addr, temp;
|
1761 |
|
|
rtx loop_lab = gen_label_rtx ();
|
1762 |
|
|
rtx end_lab = gen_label_rtx ();
|
1763 |
|
|
|
1764 |
|
|
|
1765 |
|
|
/* Step 1: round SIZE to the previous multiple of the interval. */
|
1766 |
|
|
|
1767 |
|
|
/* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
|
1768 |
|
|
rounded_size
|
1769 |
|
|
= simplify_gen_binary (AND, Pmode, size, GEN_INT (-PROBE_INTERVAL));
|
1770 |
|
|
rounded_size_op = force_operand (rounded_size, NULL_RTX);
|
1771 |
|
|
|
1772 |
|
|
|
1773 |
|
|
/* Step 2: compute initial and final value of the loop counter. */
|
1774 |
|
|
|
1775 |
|
|
/* SP = SP_0 + PROBE_INTERVAL. */
|
1776 |
|
|
anti_adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
|
1777 |
|
|
|
1778 |
|
|
/* LAST_ADDR = SP_0 + PROBE_INTERVAL + ROUNDED_SIZE. */
|
1779 |
|
|
last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
|
1780 |
|
|
stack_pointer_rtx,
|
1781 |
|
|
rounded_size_op), NULL_RTX);
|
1782 |
|
|
|
1783 |
|
|
|
1784 |
|
|
/* Step 3: the loop
|
1785 |
|
|
|
1786 |
|
|
while (SP != LAST_ADDR)
|
1787 |
|
|
{
|
1788 |
|
|
SP = SP + PROBE_INTERVAL
|
1789 |
|
|
probe at SP
|
1790 |
|
|
}
|
1791 |
|
|
|
1792 |
|
|
adjusts SP and probes at PROBE_INTERVAL + N * PROBE_INTERVAL for
|
1793 |
|
|
values of N from 1 until it is equal to ROUNDED_SIZE. */
|
1794 |
|
|
|
1795 |
|
|
emit_label (loop_lab);
|
1796 |
|
|
|
1797 |
|
|
/* Jump to END_LAB if SP == LAST_ADDR. */
|
1798 |
|
|
emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, EQ, NULL_RTX,
|
1799 |
|
|
Pmode, 1, end_lab);
|
1800 |
|
|
|
1801 |
|
|
/* SP = SP + PROBE_INTERVAL and probe at SP. */
|
1802 |
|
|
anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
|
1803 |
|
|
emit_stack_probe (stack_pointer_rtx);
|
1804 |
|
|
|
1805 |
|
|
emit_jump (loop_lab);
|
1806 |
|
|
|
1807 |
|
|
emit_label (end_lab);
|
1808 |
|
|
|
1809 |
|
|
|
1810 |
|
|
/* Step 4: adjust SP and probe at PROBE_INTERVAL + SIZE if we cannot
|
1811 |
|
|
assert at compile-time that SIZE is equal to ROUNDED_SIZE. */
|
1812 |
|
|
|
1813 |
|
|
/* TEMP = SIZE - ROUNDED_SIZE. */
|
1814 |
|
|
temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
|
1815 |
|
|
if (temp != const0_rtx)
|
1816 |
|
|
{
|
1817 |
|
|
/* Manual CSE if the difference is not known at compile-time. */
|
1818 |
|
|
if (GET_CODE (temp) != CONST_INT)
|
1819 |
|
|
temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
|
1820 |
|
|
anti_adjust_stack (temp);
|
1821 |
|
|
emit_stack_probe (stack_pointer_rtx);
|
1822 |
|
|
}
|
1823 |
|
|
}
|
1824 |
|
|
|
1825 |
|
|
/* Adjust back and account for the additional first interval. */
|
1826 |
|
|
if (adjust_back)
|
1827 |
|
|
adjust_stack (plus_constant (size, PROBE_INTERVAL + dope));
|
1828 |
|
|
else
|
1829 |
|
|
adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
|
1830 |
|
|
}
|
1831 |
|
|
|
1832 |
|
|
/* Return an rtx representing the register or memory location
|
1833 |
|
|
in which a scalar value of data type VALTYPE
|
1834 |
|
|
was returned by a function call to function FUNC.
|
1835 |
|
|
FUNC is a FUNCTION_DECL, FNTYPE a FUNCTION_TYPE node if the precise
|
1836 |
|
|
function is known, otherwise 0.
|
1837 |
|
|
OUTGOING is 1 if on a machine with register windows this function
|
1838 |
|
|
should return the register in which the function will put its result
|
1839 |
|
|
and 0 otherwise. */
|
1840 |
|
|
|
1841 |
|
|
rtx
|
1842 |
|
|
hard_function_value (const_tree valtype, const_tree func, const_tree fntype,
|
1843 |
|
|
int outgoing ATTRIBUTE_UNUSED)
|
1844 |
|
|
{
|
1845 |
|
|
rtx val;
|
1846 |
|
|
|
1847 |
|
|
val = targetm.calls.function_value (valtype, func ? func : fntype, outgoing);
|
1848 |
|
|
|
1849 |
|
|
if (REG_P (val)
|
1850 |
|
|
&& GET_MODE (val) == BLKmode)
|
1851 |
|
|
{
|
1852 |
|
|
unsigned HOST_WIDE_INT bytes = int_size_in_bytes (valtype);
|
1853 |
|
|
enum machine_mode tmpmode;
|
1854 |
|
|
|
1855 |
|
|
/* int_size_in_bytes can return -1. We don't need a check here
|
1856 |
|
|
since the value of bytes will then be large enough that no
|
1857 |
|
|
mode will match anyway. */
|
1858 |
|
|
|
1859 |
|
|
for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
|
1860 |
|
|
tmpmode != VOIDmode;
|
1861 |
|
|
tmpmode = GET_MODE_WIDER_MODE (tmpmode))
|
1862 |
|
|
{
|
1863 |
|
|
/* Have we found a large enough mode? */
|
1864 |
|
|
if (GET_MODE_SIZE (tmpmode) >= bytes)
|
1865 |
|
|
break;
|
1866 |
|
|
}
|
1867 |
|
|
|
1868 |
|
|
/* No suitable mode found. */
|
1869 |
|
|
gcc_assert (tmpmode != VOIDmode);
|
1870 |
|
|
|
1871 |
|
|
PUT_MODE (val, tmpmode);
|
1872 |
|
|
}
|
1873 |
|
|
return val;
|
1874 |
|
|
}
|
1875 |
|
|
|
1876 |
|
|
/* Return an rtx representing the register or memory location
|
1877 |
|
|
in which a scalar value of mode MODE was returned by a library call. */
|
1878 |
|
|
|
1879 |
|
|
rtx
|
1880 |
|
|
hard_libcall_value (enum machine_mode mode, rtx fun)
|
1881 |
|
|
{
|
1882 |
|
|
return targetm.calls.libcall_value (mode, fun);
|
1883 |
|
|
}
|
1884 |
|
|
|
1885 |
|
|
/* Look up the tree code for a given rtx code
|
1886 |
|
|
to provide the arithmetic operation for REAL_ARITHMETIC.
|
1887 |
|
|
The function returns an int because the caller may not know
|
1888 |
|
|
what `enum tree_code' means. */
|
1889 |
|
|
|
1890 |
|
|
int
|
1891 |
|
|
rtx_to_tree_code (enum rtx_code code)
|
1892 |
|
|
{
|
1893 |
|
|
enum tree_code tcode;
|
1894 |
|
|
|
1895 |
|
|
switch (code)
|
1896 |
|
|
{
|
1897 |
|
|
case PLUS:
|
1898 |
|
|
tcode = PLUS_EXPR;
|
1899 |
|
|
break;
|
1900 |
|
|
case MINUS:
|
1901 |
|
|
tcode = MINUS_EXPR;
|
1902 |
|
|
break;
|
1903 |
|
|
case MULT:
|
1904 |
|
|
tcode = MULT_EXPR;
|
1905 |
|
|
break;
|
1906 |
|
|
case DIV:
|
1907 |
|
|
tcode = RDIV_EXPR;
|
1908 |
|
|
break;
|
1909 |
|
|
case SMIN:
|
1910 |
|
|
tcode = MIN_EXPR;
|
1911 |
|
|
break;
|
1912 |
|
|
case SMAX:
|
1913 |
|
|
tcode = MAX_EXPR;
|
1914 |
|
|
break;
|
1915 |
|
|
default:
|
1916 |
|
|
tcode = LAST_AND_UNUSED_TREE_CODE;
|
1917 |
|
|
break;
|
1918 |
|
|
}
|
1919 |
|
|
return ((int) tcode);
|
1920 |
|
|
}
|
1921 |
|
|
|
1922 |
|
|
#include "gt-explow.h"
|