1 |
684 |
jeremybenn |
/* Statement simplification on GIMPLE.
|
2 |
|
|
Copyright (C) 2010, 2011, 2012 Free Software Foundation, Inc.
|
3 |
|
|
Split out from tree-ssa-ccp.c.
|
4 |
|
|
|
5 |
|
|
This file is part of GCC.
|
6 |
|
|
|
7 |
|
|
GCC is free software; you can redistribute it and/or modify it
|
8 |
|
|
under the terms of the GNU General Public License as published by the
|
9 |
|
|
Free Software Foundation; either version 3, or (at your option) any
|
10 |
|
|
later version.
|
11 |
|
|
|
12 |
|
|
GCC is distributed in the hope that it will be useful, but WITHOUT
|
13 |
|
|
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
14 |
|
|
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
15 |
|
|
for more details.
|
16 |
|
|
|
17 |
|
|
You should have received a copy of the GNU General Public License
|
18 |
|
|
along with GCC; see the file COPYING3. If not see
|
19 |
|
|
<http://www.gnu.org/licenses/>. */
|
20 |
|
|
|
21 |
|
|
#include "config.h"
|
22 |
|
|
#include "system.h"
|
23 |
|
|
#include "coretypes.h"
|
24 |
|
|
#include "tm.h"
|
25 |
|
|
#include "tree.h"
|
26 |
|
|
#include "flags.h"
|
27 |
|
|
#include "function.h"
|
28 |
|
|
#include "tree-dump.h"
|
29 |
|
|
#include "tree-flow.h"
|
30 |
|
|
#include "tree-pass.h"
|
31 |
|
|
#include "tree-ssa-propagate.h"
|
32 |
|
|
#include "target.h"
|
33 |
|
|
#include "gimple-fold.h"
|
34 |
|
|
|
35 |
|
|
/* Return true when DECL can be referenced from current unit.
|
36 |
|
|
We can get declarations that are not possible to reference for
|
37 |
|
|
various reasons:
|
38 |
|
|
|
39 |
|
|
1) When analyzing C++ virtual tables.
|
40 |
|
|
C++ virtual tables do have known constructors even
|
41 |
|
|
when they are keyed to other compilation unit.
|
42 |
|
|
Those tables can contain pointers to methods and vars
|
43 |
|
|
in other units. Those methods have both STATIC and EXTERNAL
|
44 |
|
|
set.
|
45 |
|
|
2) In WHOPR mode devirtualization might lead to reference
|
46 |
|
|
to method that was partitioned elsehwere.
|
47 |
|
|
In this case we have static VAR_DECL or FUNCTION_DECL
|
48 |
|
|
that has no corresponding callgraph/varpool node
|
49 |
|
|
declaring the body.
|
50 |
|
|
3) COMDAT functions referred by external vtables that
|
51 |
|
|
we devirtualize only during final copmilation stage.
|
52 |
|
|
At this time we already decided that we will not output
|
53 |
|
|
the function body and thus we can't reference the symbol
|
54 |
|
|
directly. */
|
55 |
|
|
|
56 |
|
|
static bool
|
57 |
|
|
can_refer_decl_in_current_unit_p (tree decl)
|
58 |
|
|
{
|
59 |
|
|
struct varpool_node *vnode;
|
60 |
|
|
struct cgraph_node *node;
|
61 |
|
|
|
62 |
|
|
if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
|
63 |
|
|
return true;
|
64 |
|
|
/* External flag is set, so we deal with C++ reference
|
65 |
|
|
to static object from other file. */
|
66 |
|
|
if (DECL_EXTERNAL (decl) && TREE_STATIC (decl)
|
67 |
|
|
&& TREE_CODE (decl) == VAR_DECL)
|
68 |
|
|
{
|
69 |
|
|
/* Just be sure it is not big in frontend setting
|
70 |
|
|
flags incorrectly. Those variables should never
|
71 |
|
|
be finalized. */
|
72 |
|
|
gcc_checking_assert (!(vnode = varpool_get_node (decl))
|
73 |
|
|
|| !vnode->finalized);
|
74 |
|
|
return false;
|
75 |
|
|
}
|
76 |
|
|
/* When function is public, we always can introduce new reference.
|
77 |
|
|
Exception are the COMDAT functions where introducing a direct
|
78 |
|
|
reference imply need to include function body in the curren tunit. */
|
79 |
|
|
if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
|
80 |
|
|
return true;
|
81 |
|
|
/* We are not at ltrans stage; so don't worry about WHOPR.
|
82 |
|
|
Also when still gimplifying all referred comdat functions will be
|
83 |
|
|
produced.
|
84 |
|
|
??? as observed in PR20991 for already optimized out comdat virtual functions
|
85 |
|
|
we may not neccesarily give up because the copy will be output elsewhere when
|
86 |
|
|
corresponding vtable is output. */
|
87 |
|
|
if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
|
88 |
|
|
return true;
|
89 |
|
|
/* If we already output the function body, we are safe. */
|
90 |
|
|
if (TREE_ASM_WRITTEN (decl))
|
91 |
|
|
return true;
|
92 |
|
|
if (TREE_CODE (decl) == FUNCTION_DECL)
|
93 |
|
|
{
|
94 |
|
|
node = cgraph_get_node (decl);
|
95 |
|
|
/* Check that we still have function body and that we didn't took
|
96 |
|
|
the decision to eliminate offline copy of the function yet.
|
97 |
|
|
The second is important when devirtualization happens during final
|
98 |
|
|
compilation stage when making a new reference no longer makes callee
|
99 |
|
|
to be compiled. */
|
100 |
|
|
if (!node || !node->analyzed || node->global.inlined_to)
|
101 |
|
|
return false;
|
102 |
|
|
}
|
103 |
|
|
else if (TREE_CODE (decl) == VAR_DECL)
|
104 |
|
|
{
|
105 |
|
|
vnode = varpool_get_node (decl);
|
106 |
|
|
if (!vnode || !vnode->finalized)
|
107 |
|
|
return false;
|
108 |
|
|
}
|
109 |
|
|
return true;
|
110 |
|
|
}
|
111 |
|
|
|
112 |
|
|
/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
|
113 |
|
|
acceptable form for is_gimple_min_invariant. */
|
114 |
|
|
|
115 |
|
|
tree
|
116 |
|
|
canonicalize_constructor_val (tree cval)
|
117 |
|
|
{
|
118 |
|
|
STRIP_NOPS (cval);
|
119 |
|
|
if (TREE_CODE (cval) == POINTER_PLUS_EXPR
|
120 |
|
|
&& TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
|
121 |
|
|
{
|
122 |
|
|
tree ptr = TREE_OPERAND (cval, 0);
|
123 |
|
|
if (is_gimple_min_invariant (ptr))
|
124 |
|
|
cval = build1_loc (EXPR_LOCATION (cval),
|
125 |
|
|
ADDR_EXPR, TREE_TYPE (ptr),
|
126 |
|
|
fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
|
127 |
|
|
ptr,
|
128 |
|
|
fold_convert (ptr_type_node,
|
129 |
|
|
TREE_OPERAND (cval, 1))));
|
130 |
|
|
}
|
131 |
|
|
if (TREE_CODE (cval) == ADDR_EXPR)
|
132 |
|
|
{
|
133 |
|
|
tree base = get_base_address (TREE_OPERAND (cval, 0));
|
134 |
|
|
|
135 |
|
|
if (base
|
136 |
|
|
&& (TREE_CODE (base) == VAR_DECL
|
137 |
|
|
|| TREE_CODE (base) == FUNCTION_DECL)
|
138 |
|
|
&& !can_refer_decl_in_current_unit_p (base))
|
139 |
|
|
return NULL_TREE;
|
140 |
|
|
if (base && TREE_CODE (base) == VAR_DECL)
|
141 |
|
|
{
|
142 |
|
|
TREE_ADDRESSABLE (base) = 1;
|
143 |
|
|
if (cfun && gimple_referenced_vars (cfun))
|
144 |
|
|
add_referenced_var (base);
|
145 |
|
|
}
|
146 |
|
|
/* Fixup types in global initializers. */
|
147 |
|
|
if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
|
148 |
|
|
cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
|
149 |
|
|
}
|
150 |
|
|
return cval;
|
151 |
|
|
}
|
152 |
|
|
|
153 |
|
|
/* If SYM is a constant variable with known value, return the value.
|
154 |
|
|
NULL_TREE is returned otherwise. */
|
155 |
|
|
|
156 |
|
|
tree
|
157 |
|
|
get_symbol_constant_value (tree sym)
|
158 |
|
|
{
|
159 |
|
|
if (const_value_known_p (sym))
|
160 |
|
|
{
|
161 |
|
|
tree val = DECL_INITIAL (sym);
|
162 |
|
|
if (val)
|
163 |
|
|
{
|
164 |
|
|
val = canonicalize_constructor_val (val);
|
165 |
|
|
if (val && is_gimple_min_invariant (val))
|
166 |
|
|
return val;
|
167 |
|
|
else
|
168 |
|
|
return NULL_TREE;
|
169 |
|
|
}
|
170 |
|
|
/* Variables declared 'const' without an initializer
|
171 |
|
|
have zero as the initializer if they may not be
|
172 |
|
|
overridden at link or run time. */
|
173 |
|
|
if (!val
|
174 |
|
|
&& (INTEGRAL_TYPE_P (TREE_TYPE (sym))
|
175 |
|
|
|| SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
|
176 |
|
|
return build_zero_cst (TREE_TYPE (sym));
|
177 |
|
|
}
|
178 |
|
|
|
179 |
|
|
return NULL_TREE;
|
180 |
|
|
}
|
181 |
|
|
|
182 |
|
|
|
183 |
|
|
|
184 |
|
|
/* Subroutine of fold_stmt. We perform several simplifications of the
|
185 |
|
|
memory reference tree EXPR and make sure to re-gimplify them properly
|
186 |
|
|
after propagation of constant addresses. IS_LHS is true if the
|
187 |
|
|
reference is supposed to be an lvalue. */
|
188 |
|
|
|
189 |
|
|
static tree
|
190 |
|
|
maybe_fold_reference (tree expr, bool is_lhs)
|
191 |
|
|
{
|
192 |
|
|
tree *t = &expr;
|
193 |
|
|
tree result;
|
194 |
|
|
|
195 |
|
|
if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
|
196 |
|
|
|| TREE_CODE (expr) == REALPART_EXPR
|
197 |
|
|
|| TREE_CODE (expr) == IMAGPART_EXPR)
|
198 |
|
|
&& CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
|
199 |
|
|
return fold_unary_loc (EXPR_LOCATION (expr),
|
200 |
|
|
TREE_CODE (expr),
|
201 |
|
|
TREE_TYPE (expr),
|
202 |
|
|
TREE_OPERAND (expr, 0));
|
203 |
|
|
else if (TREE_CODE (expr) == BIT_FIELD_REF
|
204 |
|
|
&& CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
|
205 |
|
|
return fold_ternary_loc (EXPR_LOCATION (expr),
|
206 |
|
|
TREE_CODE (expr),
|
207 |
|
|
TREE_TYPE (expr),
|
208 |
|
|
TREE_OPERAND (expr, 0),
|
209 |
|
|
TREE_OPERAND (expr, 1),
|
210 |
|
|
TREE_OPERAND (expr, 2));
|
211 |
|
|
|
212 |
|
|
while (handled_component_p (*t))
|
213 |
|
|
t = &TREE_OPERAND (*t, 0);
|
214 |
|
|
|
215 |
|
|
/* Canonicalize MEM_REFs invariant address operand. Do this first
|
216 |
|
|
to avoid feeding non-canonical MEM_REFs elsewhere. */
|
217 |
|
|
if (TREE_CODE (*t) == MEM_REF
|
218 |
|
|
&& !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
|
219 |
|
|
{
|
220 |
|
|
bool volatile_p = TREE_THIS_VOLATILE (*t);
|
221 |
|
|
tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
|
222 |
|
|
TREE_OPERAND (*t, 0),
|
223 |
|
|
TREE_OPERAND (*t, 1));
|
224 |
|
|
if (tem)
|
225 |
|
|
{
|
226 |
|
|
TREE_THIS_VOLATILE (tem) = volatile_p;
|
227 |
|
|
*t = tem;
|
228 |
|
|
tem = maybe_fold_reference (expr, is_lhs);
|
229 |
|
|
if (tem)
|
230 |
|
|
return tem;
|
231 |
|
|
return expr;
|
232 |
|
|
}
|
233 |
|
|
}
|
234 |
|
|
|
235 |
|
|
if (!is_lhs
|
236 |
|
|
&& (result = fold_const_aggregate_ref (expr))
|
237 |
|
|
&& is_gimple_min_invariant (result))
|
238 |
|
|
return result;
|
239 |
|
|
|
240 |
|
|
/* Fold back MEM_REFs to reference trees. */
|
241 |
|
|
if (TREE_CODE (*t) == MEM_REF
|
242 |
|
|
&& TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
|
243 |
|
|
&& integer_zerop (TREE_OPERAND (*t, 1))
|
244 |
|
|
&& (TREE_THIS_VOLATILE (*t)
|
245 |
|
|
== TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
|
246 |
|
|
&& !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
|
247 |
|
|
&& (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
|
248 |
|
|
== TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
|
249 |
|
|
/* We have to look out here to not drop a required conversion
|
250 |
|
|
from the rhs to the lhs if is_lhs, but we don't have the
|
251 |
|
|
rhs here to verify that. Thus require strict type
|
252 |
|
|
compatibility. */
|
253 |
|
|
&& types_compatible_p (TREE_TYPE (*t),
|
254 |
|
|
TREE_TYPE (TREE_OPERAND
|
255 |
|
|
(TREE_OPERAND (*t, 0), 0))))
|
256 |
|
|
{
|
257 |
|
|
tree tem;
|
258 |
|
|
*t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
|
259 |
|
|
tem = maybe_fold_reference (expr, is_lhs);
|
260 |
|
|
if (tem)
|
261 |
|
|
return tem;
|
262 |
|
|
return expr;
|
263 |
|
|
}
|
264 |
|
|
else if (TREE_CODE (*t) == TARGET_MEM_REF)
|
265 |
|
|
{
|
266 |
|
|
tree tem = maybe_fold_tmr (*t);
|
267 |
|
|
if (tem)
|
268 |
|
|
{
|
269 |
|
|
*t = tem;
|
270 |
|
|
tem = maybe_fold_reference (expr, is_lhs);
|
271 |
|
|
if (tem)
|
272 |
|
|
return tem;
|
273 |
|
|
return expr;
|
274 |
|
|
}
|
275 |
|
|
}
|
276 |
|
|
|
277 |
|
|
return NULL_TREE;
|
278 |
|
|
}
|
279 |
|
|
|
280 |
|
|
|
281 |
|
|
/* Attempt to fold an assignment statement pointed-to by SI. Returns a
|
282 |
|
|
replacement rhs for the statement or NULL_TREE if no simplification
|
283 |
|
|
could be made. It is assumed that the operands have been previously
|
284 |
|
|
folded. */
|
285 |
|
|
|
286 |
|
|
static tree
|
287 |
|
|
fold_gimple_assign (gimple_stmt_iterator *si)
|
288 |
|
|
{
|
289 |
|
|
gimple stmt = gsi_stmt (*si);
|
290 |
|
|
enum tree_code subcode = gimple_assign_rhs_code (stmt);
|
291 |
|
|
location_t loc = gimple_location (stmt);
|
292 |
|
|
|
293 |
|
|
tree result = NULL_TREE;
|
294 |
|
|
|
295 |
|
|
switch (get_gimple_rhs_class (subcode))
|
296 |
|
|
{
|
297 |
|
|
case GIMPLE_SINGLE_RHS:
|
298 |
|
|
{
|
299 |
|
|
tree rhs = gimple_assign_rhs1 (stmt);
|
300 |
|
|
|
301 |
|
|
if (REFERENCE_CLASS_P (rhs))
|
302 |
|
|
return maybe_fold_reference (rhs, false);
|
303 |
|
|
|
304 |
|
|
else if (TREE_CODE (rhs) == ADDR_EXPR)
|
305 |
|
|
{
|
306 |
|
|
tree ref = TREE_OPERAND (rhs, 0);
|
307 |
|
|
tree tem = maybe_fold_reference (ref, true);
|
308 |
|
|
if (tem
|
309 |
|
|
&& TREE_CODE (tem) == MEM_REF
|
310 |
|
|
&& integer_zerop (TREE_OPERAND (tem, 1)))
|
311 |
|
|
result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
|
312 |
|
|
else if (tem)
|
313 |
|
|
result = fold_convert (TREE_TYPE (rhs),
|
314 |
|
|
build_fold_addr_expr_loc (loc, tem));
|
315 |
|
|
else if (TREE_CODE (ref) == MEM_REF
|
316 |
|
|
&& integer_zerop (TREE_OPERAND (ref, 1)))
|
317 |
|
|
result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
|
318 |
|
|
}
|
319 |
|
|
|
320 |
|
|
else if (TREE_CODE (rhs) == CONSTRUCTOR
|
321 |
|
|
&& TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
|
322 |
|
|
&& (CONSTRUCTOR_NELTS (rhs)
|
323 |
|
|
== TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
|
324 |
|
|
{
|
325 |
|
|
/* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
|
326 |
|
|
unsigned i;
|
327 |
|
|
tree val;
|
328 |
|
|
|
329 |
|
|
FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
|
330 |
|
|
if (TREE_CODE (val) != INTEGER_CST
|
331 |
|
|
&& TREE_CODE (val) != REAL_CST
|
332 |
|
|
&& TREE_CODE (val) != FIXED_CST)
|
333 |
|
|
return NULL_TREE;
|
334 |
|
|
|
335 |
|
|
return build_vector_from_ctor (TREE_TYPE (rhs),
|
336 |
|
|
CONSTRUCTOR_ELTS (rhs));
|
337 |
|
|
}
|
338 |
|
|
|
339 |
|
|
else if (DECL_P (rhs))
|
340 |
|
|
return unshare_expr (get_symbol_constant_value (rhs));
|
341 |
|
|
|
342 |
|
|
/* If we couldn't fold the RHS, hand over to the generic
|
343 |
|
|
fold routines. */
|
344 |
|
|
if (result == NULL_TREE)
|
345 |
|
|
result = fold (rhs);
|
346 |
|
|
|
347 |
|
|
/* Strip away useless type conversions. Both the NON_LVALUE_EXPR
|
348 |
|
|
that may have been added by fold, and "useless" type
|
349 |
|
|
conversions that might now be apparent due to propagation. */
|
350 |
|
|
STRIP_USELESS_TYPE_CONVERSION (result);
|
351 |
|
|
|
352 |
|
|
if (result != rhs && valid_gimple_rhs_p (result))
|
353 |
|
|
return result;
|
354 |
|
|
|
355 |
|
|
return NULL_TREE;
|
356 |
|
|
}
|
357 |
|
|
break;
|
358 |
|
|
|
359 |
|
|
case GIMPLE_UNARY_RHS:
|
360 |
|
|
{
|
361 |
|
|
tree rhs = gimple_assign_rhs1 (stmt);
|
362 |
|
|
|
363 |
|
|
result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
|
364 |
|
|
if (result)
|
365 |
|
|
{
|
366 |
|
|
/* If the operation was a conversion do _not_ mark a
|
367 |
|
|
resulting constant with TREE_OVERFLOW if the original
|
368 |
|
|
constant was not. These conversions have implementation
|
369 |
|
|
defined behavior and retaining the TREE_OVERFLOW flag
|
370 |
|
|
here would confuse later passes such as VRP. */
|
371 |
|
|
if (CONVERT_EXPR_CODE_P (subcode)
|
372 |
|
|
&& TREE_CODE (result) == INTEGER_CST
|
373 |
|
|
&& TREE_CODE (rhs) == INTEGER_CST)
|
374 |
|
|
TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
|
375 |
|
|
|
376 |
|
|
STRIP_USELESS_TYPE_CONVERSION (result);
|
377 |
|
|
if (valid_gimple_rhs_p (result))
|
378 |
|
|
return result;
|
379 |
|
|
}
|
380 |
|
|
}
|
381 |
|
|
break;
|
382 |
|
|
|
383 |
|
|
case GIMPLE_BINARY_RHS:
|
384 |
|
|
/* Try to canonicalize for boolean-typed X the comparisons
|
385 |
|
|
X == 0, X == 1, X != 0, and X != 1. */
|
386 |
|
|
if (gimple_assign_rhs_code (stmt) == EQ_EXPR
|
387 |
|
|
|| gimple_assign_rhs_code (stmt) == NE_EXPR)
|
388 |
|
|
{
|
389 |
|
|
tree lhs = gimple_assign_lhs (stmt);
|
390 |
|
|
tree op1 = gimple_assign_rhs1 (stmt);
|
391 |
|
|
tree op2 = gimple_assign_rhs2 (stmt);
|
392 |
|
|
tree type = TREE_TYPE (op1);
|
393 |
|
|
|
394 |
|
|
/* Check whether the comparison operands are of the same boolean
|
395 |
|
|
type as the result type is.
|
396 |
|
|
Check that second operand is an integer-constant with value
|
397 |
|
|
one or zero. */
|
398 |
|
|
if (TREE_CODE (op2) == INTEGER_CST
|
399 |
|
|
&& (integer_zerop (op2) || integer_onep (op2))
|
400 |
|
|
&& useless_type_conversion_p (TREE_TYPE (lhs), type))
|
401 |
|
|
{
|
402 |
|
|
enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
|
403 |
|
|
bool is_logical_not = false;
|
404 |
|
|
|
405 |
|
|
/* X == 0 and X != 1 is a logical-not.of X
|
406 |
|
|
X == 1 and X != 0 is X */
|
407 |
|
|
if ((cmp_code == EQ_EXPR && integer_zerop (op2))
|
408 |
|
|
|| (cmp_code == NE_EXPR && integer_onep (op2)))
|
409 |
|
|
is_logical_not = true;
|
410 |
|
|
|
411 |
|
|
if (is_logical_not == false)
|
412 |
|
|
result = op1;
|
413 |
|
|
/* Only for one-bit precision typed X the transformation
|
414 |
|
|
!X -> ~X is valied. */
|
415 |
|
|
else if (TYPE_PRECISION (type) == 1)
|
416 |
|
|
result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
|
417 |
|
|
type, op1);
|
418 |
|
|
/* Otherwise we use !X -> X ^ 1. */
|
419 |
|
|
else
|
420 |
|
|
result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
|
421 |
|
|
type, op1, build_int_cst (type, 1));
|
422 |
|
|
|
423 |
|
|
}
|
424 |
|
|
}
|
425 |
|
|
|
426 |
|
|
if (!result)
|
427 |
|
|
result = fold_binary_loc (loc, subcode,
|
428 |
|
|
TREE_TYPE (gimple_assign_lhs (stmt)),
|
429 |
|
|
gimple_assign_rhs1 (stmt),
|
430 |
|
|
gimple_assign_rhs2 (stmt));
|
431 |
|
|
|
432 |
|
|
if (result)
|
433 |
|
|
{
|
434 |
|
|
STRIP_USELESS_TYPE_CONVERSION (result);
|
435 |
|
|
if (valid_gimple_rhs_p (result))
|
436 |
|
|
return result;
|
437 |
|
|
}
|
438 |
|
|
break;
|
439 |
|
|
|
440 |
|
|
case GIMPLE_TERNARY_RHS:
|
441 |
|
|
/* Try to fold a conditional expression. */
|
442 |
|
|
if (gimple_assign_rhs_code (stmt) == COND_EXPR)
|
443 |
|
|
{
|
444 |
|
|
tree op0 = gimple_assign_rhs1 (stmt);
|
445 |
|
|
tree tem;
|
446 |
|
|
bool set = false;
|
447 |
|
|
location_t cond_loc = gimple_location (stmt);
|
448 |
|
|
|
449 |
|
|
if (COMPARISON_CLASS_P (op0))
|
450 |
|
|
{
|
451 |
|
|
fold_defer_overflow_warnings ();
|
452 |
|
|
tem = fold_binary_loc (cond_loc,
|
453 |
|
|
TREE_CODE (op0), TREE_TYPE (op0),
|
454 |
|
|
TREE_OPERAND (op0, 0),
|
455 |
|
|
TREE_OPERAND (op0, 1));
|
456 |
|
|
/* This is actually a conditional expression, not a GIMPLE
|
457 |
|
|
conditional statement, however, the valid_gimple_rhs_p
|
458 |
|
|
test still applies. */
|
459 |
|
|
set = (tem && is_gimple_condexpr (tem)
|
460 |
|
|
&& valid_gimple_rhs_p (tem));
|
461 |
|
|
fold_undefer_overflow_warnings (set, stmt, 0);
|
462 |
|
|
}
|
463 |
|
|
else if (is_gimple_min_invariant (op0))
|
464 |
|
|
{
|
465 |
|
|
tem = op0;
|
466 |
|
|
set = true;
|
467 |
|
|
}
|
468 |
|
|
else
|
469 |
|
|
return NULL_TREE;
|
470 |
|
|
|
471 |
|
|
if (set)
|
472 |
|
|
result = fold_build3_loc (cond_loc, COND_EXPR,
|
473 |
|
|
TREE_TYPE (gimple_assign_lhs (stmt)), tem,
|
474 |
|
|
gimple_assign_rhs2 (stmt),
|
475 |
|
|
gimple_assign_rhs3 (stmt));
|
476 |
|
|
}
|
477 |
|
|
|
478 |
|
|
if (!result)
|
479 |
|
|
result = fold_ternary_loc (loc, subcode,
|
480 |
|
|
TREE_TYPE (gimple_assign_lhs (stmt)),
|
481 |
|
|
gimple_assign_rhs1 (stmt),
|
482 |
|
|
gimple_assign_rhs2 (stmt),
|
483 |
|
|
gimple_assign_rhs3 (stmt));
|
484 |
|
|
|
485 |
|
|
if (result)
|
486 |
|
|
{
|
487 |
|
|
STRIP_USELESS_TYPE_CONVERSION (result);
|
488 |
|
|
if (valid_gimple_rhs_p (result))
|
489 |
|
|
return result;
|
490 |
|
|
}
|
491 |
|
|
break;
|
492 |
|
|
|
493 |
|
|
case GIMPLE_INVALID_RHS:
|
494 |
|
|
gcc_unreachable ();
|
495 |
|
|
}
|
496 |
|
|
|
497 |
|
|
return NULL_TREE;
|
498 |
|
|
}
|
499 |
|
|
|
500 |
|
|
/* Attempt to fold a conditional statement. Return true if any changes were
|
501 |
|
|
made. We only attempt to fold the condition expression, and do not perform
|
502 |
|
|
any transformation that would require alteration of the cfg. It is
|
503 |
|
|
assumed that the operands have been previously folded. */
|
504 |
|
|
|
505 |
|
|
static bool
|
506 |
|
|
fold_gimple_cond (gimple stmt)
|
507 |
|
|
{
|
508 |
|
|
tree result = fold_binary_loc (gimple_location (stmt),
|
509 |
|
|
gimple_cond_code (stmt),
|
510 |
|
|
boolean_type_node,
|
511 |
|
|
gimple_cond_lhs (stmt),
|
512 |
|
|
gimple_cond_rhs (stmt));
|
513 |
|
|
|
514 |
|
|
if (result)
|
515 |
|
|
{
|
516 |
|
|
STRIP_USELESS_TYPE_CONVERSION (result);
|
517 |
|
|
if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
|
518 |
|
|
{
|
519 |
|
|
gimple_cond_set_condition_from_tree (stmt, result);
|
520 |
|
|
return true;
|
521 |
|
|
}
|
522 |
|
|
}
|
523 |
|
|
|
524 |
|
|
return false;
|
525 |
|
|
}
|
526 |
|
|
|
527 |
|
|
/* Convert EXPR into a GIMPLE value suitable for substitution on the
|
528 |
|
|
RHS of an assignment. Insert the necessary statements before
|
529 |
|
|
iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
|
530 |
|
|
is replaced. If the call is expected to produces a result, then it
|
531 |
|
|
is replaced by an assignment of the new RHS to the result variable.
|
532 |
|
|
If the result is to be ignored, then the call is replaced by a
|
533 |
|
|
GIMPLE_NOP. A proper VDEF chain is retained by making the first
|
534 |
|
|
VUSE and the last VDEF of the whole sequence be the same as the replaced
|
535 |
|
|
statement and using new SSA names for stores in between. */
|
536 |
|
|
|
537 |
|
|
void
|
538 |
|
|
gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
|
539 |
|
|
{
|
540 |
|
|
tree lhs;
|
541 |
|
|
gimple stmt, new_stmt;
|
542 |
|
|
gimple_stmt_iterator i;
|
543 |
|
|
gimple_seq stmts = gimple_seq_alloc();
|
544 |
|
|
struct gimplify_ctx gctx;
|
545 |
|
|
gimple last;
|
546 |
|
|
gimple laststore;
|
547 |
|
|
tree reaching_vuse;
|
548 |
|
|
|
549 |
|
|
stmt = gsi_stmt (*si_p);
|
550 |
|
|
|
551 |
|
|
gcc_assert (is_gimple_call (stmt));
|
552 |
|
|
|
553 |
|
|
push_gimplify_context (&gctx);
|
554 |
|
|
gctx.into_ssa = gimple_in_ssa_p (cfun);
|
555 |
|
|
|
556 |
|
|
lhs = gimple_call_lhs (stmt);
|
557 |
|
|
if (lhs == NULL_TREE)
|
558 |
|
|
{
|
559 |
|
|
gimplify_and_add (expr, &stmts);
|
560 |
|
|
/* We can end up with folding a memcpy of an empty class assignment
|
561 |
|
|
which gets optimized away by C++ gimplification. */
|
562 |
|
|
if (gimple_seq_empty_p (stmts))
|
563 |
|
|
{
|
564 |
|
|
pop_gimplify_context (NULL);
|
565 |
|
|
if (gimple_in_ssa_p (cfun))
|
566 |
|
|
{
|
567 |
|
|
unlink_stmt_vdef (stmt);
|
568 |
|
|
release_defs (stmt);
|
569 |
|
|
}
|
570 |
|
|
gsi_remove (si_p, true);
|
571 |
|
|
return;
|
572 |
|
|
}
|
573 |
|
|
}
|
574 |
|
|
else
|
575 |
|
|
{
|
576 |
|
|
tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
|
577 |
|
|
new_stmt = gimple_build_assign (lhs, tmp);
|
578 |
|
|
i = gsi_last (stmts);
|
579 |
|
|
gsi_insert_after_without_update (&i, new_stmt,
|
580 |
|
|
GSI_CONTINUE_LINKING);
|
581 |
|
|
}
|
582 |
|
|
|
583 |
|
|
pop_gimplify_context (NULL);
|
584 |
|
|
|
585 |
|
|
if (gimple_has_location (stmt))
|
586 |
|
|
annotate_all_with_location (stmts, gimple_location (stmt));
|
587 |
|
|
|
588 |
|
|
/* First iterate over the replacement statements backward, assigning
|
589 |
|
|
virtual operands to their defining statements. */
|
590 |
|
|
laststore = NULL;
|
591 |
|
|
for (i = gsi_last (stmts); !gsi_end_p (i); gsi_prev (&i))
|
592 |
|
|
{
|
593 |
|
|
new_stmt = gsi_stmt (i);
|
594 |
|
|
if ((gimple_assign_single_p (new_stmt)
|
595 |
|
|
&& !is_gimple_reg (gimple_assign_lhs (new_stmt)))
|
596 |
|
|
|| (is_gimple_call (new_stmt)
|
597 |
|
|
&& (gimple_call_flags (new_stmt)
|
598 |
|
|
& (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
|
599 |
|
|
{
|
600 |
|
|
tree vdef;
|
601 |
|
|
if (!laststore)
|
602 |
|
|
vdef = gimple_vdef (stmt);
|
603 |
|
|
else
|
604 |
|
|
vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
|
605 |
|
|
gimple_set_vdef (new_stmt, vdef);
|
606 |
|
|
if (vdef && TREE_CODE (vdef) == SSA_NAME)
|
607 |
|
|
SSA_NAME_DEF_STMT (vdef) = new_stmt;
|
608 |
|
|
laststore = new_stmt;
|
609 |
|
|
}
|
610 |
|
|
}
|
611 |
|
|
|
612 |
|
|
/* Second iterate over the statements forward, assigning virtual
|
613 |
|
|
operands to their uses. */
|
614 |
|
|
last = NULL;
|
615 |
|
|
reaching_vuse = gimple_vuse (stmt);
|
616 |
|
|
for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
|
617 |
|
|
{
|
618 |
|
|
/* Do not insert the last stmt in this loop but remember it
|
619 |
|
|
for replacing the original statement. */
|
620 |
|
|
if (last)
|
621 |
|
|
{
|
622 |
|
|
gsi_insert_before (si_p, last, GSI_NEW_STMT);
|
623 |
|
|
gsi_next (si_p);
|
624 |
|
|
}
|
625 |
|
|
new_stmt = gsi_stmt (i);
|
626 |
|
|
/* The replacement can expose previously unreferenced variables. */
|
627 |
|
|
if (gimple_in_ssa_p (cfun))
|
628 |
|
|
find_new_referenced_vars (new_stmt);
|
629 |
|
|
/* If the new statement possibly has a VUSE, update it with exact SSA
|
630 |
|
|
name we know will reach this one. */
|
631 |
|
|
if (gimple_has_mem_ops (new_stmt))
|
632 |
|
|
gimple_set_vuse (new_stmt, reaching_vuse);
|
633 |
|
|
gimple_set_modified (new_stmt, true);
|
634 |
|
|
if (gimple_vdef (new_stmt))
|
635 |
|
|
reaching_vuse = gimple_vdef (new_stmt);
|
636 |
|
|
last = new_stmt;
|
637 |
|
|
}
|
638 |
|
|
|
639 |
|
|
/* If the new sequence does not do a store release the virtual
|
640 |
|
|
definition of the original statement. */
|
641 |
|
|
if (reaching_vuse
|
642 |
|
|
&& reaching_vuse == gimple_vuse (stmt))
|
643 |
|
|
{
|
644 |
|
|
tree vdef = gimple_vdef (stmt);
|
645 |
|
|
if (vdef
|
646 |
|
|
&& TREE_CODE (vdef) == SSA_NAME)
|
647 |
|
|
{
|
648 |
|
|
unlink_stmt_vdef (stmt);
|
649 |
|
|
release_ssa_name (vdef);
|
650 |
|
|
}
|
651 |
|
|
}
|
652 |
|
|
|
653 |
|
|
/* Finally replace rhe original statement with the last. */
|
654 |
|
|
gsi_replace (si_p, last, false);
|
655 |
|
|
}
|
656 |
|
|
|
657 |
|
|
/* Return the string length, maximum string length or maximum value of
|
658 |
|
|
ARG in LENGTH.
|
659 |
|
|
If ARG is an SSA name variable, follow its use-def chains. If LENGTH
|
660 |
|
|
is not NULL and, for TYPE == 0, its value is not equal to the length
|
661 |
|
|
we determine or if we are unable to determine the length or value,
|
662 |
|
|
return false. VISITED is a bitmap of visited variables.
|
663 |
|
|
TYPE is 0 if string length should be returned, 1 for maximum string
|
664 |
|
|
length and 2 for maximum value ARG can have. */
|
665 |
|
|
|
666 |
|
|
static bool
|
667 |
|
|
get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
|
668 |
|
|
{
|
669 |
|
|
tree var, val;
|
670 |
|
|
gimple def_stmt;
|
671 |
|
|
|
672 |
|
|
if (TREE_CODE (arg) != SSA_NAME)
|
673 |
|
|
{
|
674 |
|
|
if (TREE_CODE (arg) == COND_EXPR)
|
675 |
|
|
return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
|
676 |
|
|
&& get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
|
677 |
|
|
/* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
|
678 |
|
|
else if (TREE_CODE (arg) == ADDR_EXPR
|
679 |
|
|
&& TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
|
680 |
|
|
&& integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
|
681 |
|
|
{
|
682 |
|
|
tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
|
683 |
|
|
if (TREE_CODE (aop0) == INDIRECT_REF
|
684 |
|
|
&& TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
|
685 |
|
|
return get_maxval_strlen (TREE_OPERAND (aop0, 0),
|
686 |
|
|
length, visited, type);
|
687 |
|
|
}
|
688 |
|
|
|
689 |
|
|
if (type == 2)
|
690 |
|
|
{
|
691 |
|
|
val = arg;
|
692 |
|
|
if (TREE_CODE (val) != INTEGER_CST
|
693 |
|
|
|| tree_int_cst_sgn (val) < 0)
|
694 |
|
|
return false;
|
695 |
|
|
}
|
696 |
|
|
else
|
697 |
|
|
val = c_strlen (arg, 1);
|
698 |
|
|
if (!val)
|
699 |
|
|
return false;
|
700 |
|
|
|
701 |
|
|
if (*length)
|
702 |
|
|
{
|
703 |
|
|
if (type > 0)
|
704 |
|
|
{
|
705 |
|
|
if (TREE_CODE (*length) != INTEGER_CST
|
706 |
|
|
|| TREE_CODE (val) != INTEGER_CST)
|
707 |
|
|
return false;
|
708 |
|
|
|
709 |
|
|
if (tree_int_cst_lt (*length, val))
|
710 |
|
|
*length = val;
|
711 |
|
|
return true;
|
712 |
|
|
}
|
713 |
|
|
else if (simple_cst_equal (val, *length) != 1)
|
714 |
|
|
return false;
|
715 |
|
|
}
|
716 |
|
|
|
717 |
|
|
*length = val;
|
718 |
|
|
return true;
|
719 |
|
|
}
|
720 |
|
|
|
721 |
|
|
/* If we were already here, break the infinite cycle. */
|
722 |
|
|
if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
|
723 |
|
|
return true;
|
724 |
|
|
|
725 |
|
|
var = arg;
|
726 |
|
|
def_stmt = SSA_NAME_DEF_STMT (var);
|
727 |
|
|
|
728 |
|
|
switch (gimple_code (def_stmt))
|
729 |
|
|
{
|
730 |
|
|
case GIMPLE_ASSIGN:
|
731 |
|
|
/* The RHS of the statement defining VAR must either have a
|
732 |
|
|
constant length or come from another SSA_NAME with a constant
|
733 |
|
|
length. */
|
734 |
|
|
if (gimple_assign_single_p (def_stmt)
|
735 |
|
|
|| gimple_assign_unary_nop_p (def_stmt))
|
736 |
|
|
{
|
737 |
|
|
tree rhs = gimple_assign_rhs1 (def_stmt);
|
738 |
|
|
return get_maxval_strlen (rhs, length, visited, type);
|
739 |
|
|
}
|
740 |
|
|
return false;
|
741 |
|
|
|
742 |
|
|
case GIMPLE_PHI:
|
743 |
|
|
{
|
744 |
|
|
/* All the arguments of the PHI node must have the same constant
|
745 |
|
|
length. */
|
746 |
|
|
unsigned i;
|
747 |
|
|
|
748 |
|
|
for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
|
749 |
|
|
{
|
750 |
|
|
tree arg = gimple_phi_arg (def_stmt, i)->def;
|
751 |
|
|
|
752 |
|
|
/* If this PHI has itself as an argument, we cannot
|
753 |
|
|
determine the string length of this argument. However,
|
754 |
|
|
if we can find a constant string length for the other
|
755 |
|
|
PHI args then we can still be sure that this is a
|
756 |
|
|
constant string length. So be optimistic and just
|
757 |
|
|
continue with the next argument. */
|
758 |
|
|
if (arg == gimple_phi_result (def_stmt))
|
759 |
|
|
continue;
|
760 |
|
|
|
761 |
|
|
if (!get_maxval_strlen (arg, length, visited, type))
|
762 |
|
|
return false;
|
763 |
|
|
}
|
764 |
|
|
}
|
765 |
|
|
return true;
|
766 |
|
|
|
767 |
|
|
default:
|
768 |
|
|
return false;
|
769 |
|
|
}
|
770 |
|
|
}
|
771 |
|
|
|
772 |
|
|
|
773 |
|
|
/* Fold builtin call in statement STMT. Returns a simplified tree.
|
774 |
|
|
We may return a non-constant expression, including another call
|
775 |
|
|
to a different function and with different arguments, e.g.,
|
776 |
|
|
substituting memcpy for strcpy when the string length is known.
|
777 |
|
|
Note that some builtins expand into inline code that may not
|
778 |
|
|
be valid in GIMPLE. Callers must take care. */
|
779 |
|
|
|
780 |
|
|
tree
|
781 |
|
|
gimple_fold_builtin (gimple stmt)
|
782 |
|
|
{
|
783 |
|
|
tree result, val[3];
|
784 |
|
|
tree callee, a;
|
785 |
|
|
int arg_idx, type;
|
786 |
|
|
bitmap visited;
|
787 |
|
|
bool ignore;
|
788 |
|
|
int nargs;
|
789 |
|
|
location_t loc = gimple_location (stmt);
|
790 |
|
|
|
791 |
|
|
gcc_assert (is_gimple_call (stmt));
|
792 |
|
|
|
793 |
|
|
ignore = (gimple_call_lhs (stmt) == NULL);
|
794 |
|
|
|
795 |
|
|
/* First try the generic builtin folder. If that succeeds, return the
|
796 |
|
|
result directly. */
|
797 |
|
|
result = fold_call_stmt (stmt, ignore);
|
798 |
|
|
if (result)
|
799 |
|
|
{
|
800 |
|
|
if (ignore)
|
801 |
|
|
STRIP_NOPS (result);
|
802 |
|
|
return result;
|
803 |
|
|
}
|
804 |
|
|
|
805 |
|
|
/* Ignore MD builtins. */
|
806 |
|
|
callee = gimple_call_fndecl (stmt);
|
807 |
|
|
if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
|
808 |
|
|
return NULL_TREE;
|
809 |
|
|
|
810 |
|
|
/* Give up for always_inline inline builtins until they are
|
811 |
|
|
inlined. */
|
812 |
|
|
if (avoid_folding_inline_builtin (callee))
|
813 |
|
|
return NULL_TREE;
|
814 |
|
|
|
815 |
|
|
/* If the builtin could not be folded, and it has no argument list,
|
816 |
|
|
we're done. */
|
817 |
|
|
nargs = gimple_call_num_args (stmt);
|
818 |
|
|
if (nargs == 0)
|
819 |
|
|
return NULL_TREE;
|
820 |
|
|
|
821 |
|
|
/* Limit the work only for builtins we know how to simplify. */
|
822 |
|
|
switch (DECL_FUNCTION_CODE (callee))
|
823 |
|
|
{
|
824 |
|
|
case BUILT_IN_STRLEN:
|
825 |
|
|
case BUILT_IN_FPUTS:
|
826 |
|
|
case BUILT_IN_FPUTS_UNLOCKED:
|
827 |
|
|
arg_idx = 0;
|
828 |
|
|
type = 0;
|
829 |
|
|
break;
|
830 |
|
|
case BUILT_IN_STRCPY:
|
831 |
|
|
case BUILT_IN_STRNCPY:
|
832 |
|
|
arg_idx = 1;
|
833 |
|
|
type = 0;
|
834 |
|
|
break;
|
835 |
|
|
case BUILT_IN_MEMCPY_CHK:
|
836 |
|
|
case BUILT_IN_MEMPCPY_CHK:
|
837 |
|
|
case BUILT_IN_MEMMOVE_CHK:
|
838 |
|
|
case BUILT_IN_MEMSET_CHK:
|
839 |
|
|
case BUILT_IN_STRNCPY_CHK:
|
840 |
|
|
case BUILT_IN_STPNCPY_CHK:
|
841 |
|
|
arg_idx = 2;
|
842 |
|
|
type = 2;
|
843 |
|
|
break;
|
844 |
|
|
case BUILT_IN_STRCPY_CHK:
|
845 |
|
|
case BUILT_IN_STPCPY_CHK:
|
846 |
|
|
arg_idx = 1;
|
847 |
|
|
type = 1;
|
848 |
|
|
break;
|
849 |
|
|
case BUILT_IN_SNPRINTF_CHK:
|
850 |
|
|
case BUILT_IN_VSNPRINTF_CHK:
|
851 |
|
|
arg_idx = 1;
|
852 |
|
|
type = 2;
|
853 |
|
|
break;
|
854 |
|
|
default:
|
855 |
|
|
return NULL_TREE;
|
856 |
|
|
}
|
857 |
|
|
|
858 |
|
|
if (arg_idx >= nargs)
|
859 |
|
|
return NULL_TREE;
|
860 |
|
|
|
861 |
|
|
/* Try to use the dataflow information gathered by the CCP process. */
|
862 |
|
|
visited = BITMAP_ALLOC (NULL);
|
863 |
|
|
bitmap_clear (visited);
|
864 |
|
|
|
865 |
|
|
memset (val, 0, sizeof (val));
|
866 |
|
|
a = gimple_call_arg (stmt, arg_idx);
|
867 |
|
|
if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
|
868 |
|
|
val[arg_idx] = NULL_TREE;
|
869 |
|
|
|
870 |
|
|
BITMAP_FREE (visited);
|
871 |
|
|
|
872 |
|
|
result = NULL_TREE;
|
873 |
|
|
switch (DECL_FUNCTION_CODE (callee))
|
874 |
|
|
{
|
875 |
|
|
case BUILT_IN_STRLEN:
|
876 |
|
|
if (val[0] && nargs == 1)
|
877 |
|
|
{
|
878 |
|
|
tree new_val =
|
879 |
|
|
fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
|
880 |
|
|
|
881 |
|
|
/* If the result is not a valid gimple value, or not a cast
|
882 |
|
|
of a valid gimple value, then we cannot use the result. */
|
883 |
|
|
if (is_gimple_val (new_val)
|
884 |
|
|
|| (CONVERT_EXPR_P (new_val)
|
885 |
|
|
&& is_gimple_val (TREE_OPERAND (new_val, 0))))
|
886 |
|
|
return new_val;
|
887 |
|
|
}
|
888 |
|
|
break;
|
889 |
|
|
|
890 |
|
|
case BUILT_IN_STRCPY:
|
891 |
|
|
if (val[1] && is_gimple_val (val[1]) && nargs == 2)
|
892 |
|
|
result = fold_builtin_strcpy (loc, callee,
|
893 |
|
|
gimple_call_arg (stmt, 0),
|
894 |
|
|
gimple_call_arg (stmt, 1),
|
895 |
|
|
val[1]);
|
896 |
|
|
break;
|
897 |
|
|
|
898 |
|
|
case BUILT_IN_STRNCPY:
|
899 |
|
|
if (val[1] && is_gimple_val (val[1]) && nargs == 3)
|
900 |
|
|
result = fold_builtin_strncpy (loc, callee,
|
901 |
|
|
gimple_call_arg (stmt, 0),
|
902 |
|
|
gimple_call_arg (stmt, 1),
|
903 |
|
|
gimple_call_arg (stmt, 2),
|
904 |
|
|
val[1]);
|
905 |
|
|
break;
|
906 |
|
|
|
907 |
|
|
case BUILT_IN_FPUTS:
|
908 |
|
|
if (nargs == 2)
|
909 |
|
|
result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
|
910 |
|
|
gimple_call_arg (stmt, 1),
|
911 |
|
|
ignore, false, val[0]);
|
912 |
|
|
break;
|
913 |
|
|
|
914 |
|
|
case BUILT_IN_FPUTS_UNLOCKED:
|
915 |
|
|
if (nargs == 2)
|
916 |
|
|
result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
|
917 |
|
|
gimple_call_arg (stmt, 1),
|
918 |
|
|
ignore, true, val[0]);
|
919 |
|
|
break;
|
920 |
|
|
|
921 |
|
|
case BUILT_IN_MEMCPY_CHK:
|
922 |
|
|
case BUILT_IN_MEMPCPY_CHK:
|
923 |
|
|
case BUILT_IN_MEMMOVE_CHK:
|
924 |
|
|
case BUILT_IN_MEMSET_CHK:
|
925 |
|
|
if (val[2] && is_gimple_val (val[2]) && nargs == 4)
|
926 |
|
|
result = fold_builtin_memory_chk (loc, callee,
|
927 |
|
|
gimple_call_arg (stmt, 0),
|
928 |
|
|
gimple_call_arg (stmt, 1),
|
929 |
|
|
gimple_call_arg (stmt, 2),
|
930 |
|
|
gimple_call_arg (stmt, 3),
|
931 |
|
|
val[2], ignore,
|
932 |
|
|
DECL_FUNCTION_CODE (callee));
|
933 |
|
|
break;
|
934 |
|
|
|
935 |
|
|
case BUILT_IN_STRCPY_CHK:
|
936 |
|
|
case BUILT_IN_STPCPY_CHK:
|
937 |
|
|
if (val[1] && is_gimple_val (val[1]) && nargs == 3)
|
938 |
|
|
result = fold_builtin_stxcpy_chk (loc, callee,
|
939 |
|
|
gimple_call_arg (stmt, 0),
|
940 |
|
|
gimple_call_arg (stmt, 1),
|
941 |
|
|
gimple_call_arg (stmt, 2),
|
942 |
|
|
val[1], ignore,
|
943 |
|
|
DECL_FUNCTION_CODE (callee));
|
944 |
|
|
break;
|
945 |
|
|
|
946 |
|
|
case BUILT_IN_STRNCPY_CHK:
|
947 |
|
|
case BUILT_IN_STPNCPY_CHK:
|
948 |
|
|
if (val[2] && is_gimple_val (val[2]) && nargs == 4)
|
949 |
|
|
result = fold_builtin_stxncpy_chk (loc, gimple_call_arg (stmt, 0),
|
950 |
|
|
gimple_call_arg (stmt, 1),
|
951 |
|
|
gimple_call_arg (stmt, 2),
|
952 |
|
|
gimple_call_arg (stmt, 3),
|
953 |
|
|
val[2], ignore,
|
954 |
|
|
DECL_FUNCTION_CODE (callee));
|
955 |
|
|
break;
|
956 |
|
|
|
957 |
|
|
case BUILT_IN_SNPRINTF_CHK:
|
958 |
|
|
case BUILT_IN_VSNPRINTF_CHK:
|
959 |
|
|
if (val[1] && is_gimple_val (val[1]))
|
960 |
|
|
result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
|
961 |
|
|
DECL_FUNCTION_CODE (callee));
|
962 |
|
|
break;
|
963 |
|
|
|
964 |
|
|
default:
|
965 |
|
|
gcc_unreachable ();
|
966 |
|
|
}
|
967 |
|
|
|
968 |
|
|
if (result && ignore)
|
969 |
|
|
result = fold_ignored_result (result);
|
970 |
|
|
return result;
|
971 |
|
|
}
|
972 |
|
|
|
973 |
|
|
/* Generate code adjusting the first parameter of a call statement determined
|
974 |
|
|
by GSI by DELTA. */
|
975 |
|
|
|
976 |
|
|
void
|
977 |
|
|
gimple_adjust_this_by_delta (gimple_stmt_iterator *gsi, tree delta)
|
978 |
|
|
{
|
979 |
|
|
gimple call_stmt = gsi_stmt (*gsi);
|
980 |
|
|
tree parm, tmp;
|
981 |
|
|
gimple new_stmt;
|
982 |
|
|
|
983 |
|
|
delta = convert_to_ptrofftype (delta);
|
984 |
|
|
gcc_assert (gimple_call_num_args (call_stmt) >= 1);
|
985 |
|
|
parm = gimple_call_arg (call_stmt, 0);
|
986 |
|
|
gcc_assert (POINTER_TYPE_P (TREE_TYPE (parm)));
|
987 |
|
|
tmp = create_tmp_var (TREE_TYPE (parm), NULL);
|
988 |
|
|
add_referenced_var (tmp);
|
989 |
|
|
|
990 |
|
|
tmp = make_ssa_name (tmp, NULL);
|
991 |
|
|
new_stmt = gimple_build_assign_with_ops (POINTER_PLUS_EXPR, tmp, parm, delta);
|
992 |
|
|
SSA_NAME_DEF_STMT (tmp) = new_stmt;
|
993 |
|
|
gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
|
994 |
|
|
gimple_call_set_arg (call_stmt, 0, tmp);
|
995 |
|
|
}
|
996 |
|
|
|
997 |
|
|
/* Return a binfo to be used for devirtualization of calls based on an object
|
998 |
|
|
represented by a declaration (i.e. a global or automatically allocated one)
|
999 |
|
|
or NULL if it cannot be found or is not safe. CST is expected to be an
|
1000 |
|
|
ADDR_EXPR of such object or the function will return NULL. Currently it is
|
1001 |
|
|
safe to use such binfo only if it has no base binfo (i.e. no ancestors). */
|
1002 |
|
|
|
1003 |
|
|
tree
|
1004 |
|
|
gimple_extract_devirt_binfo_from_cst (tree cst)
|
1005 |
|
|
{
|
1006 |
|
|
HOST_WIDE_INT offset, size, max_size;
|
1007 |
|
|
tree base, type, expected_type, binfo;
|
1008 |
|
|
bool last_artificial = false;
|
1009 |
|
|
|
1010 |
|
|
if (!flag_devirtualize
|
1011 |
|
|
|| TREE_CODE (cst) != ADDR_EXPR
|
1012 |
|
|
|| TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
|
1013 |
|
|
return NULL_TREE;
|
1014 |
|
|
|
1015 |
|
|
cst = TREE_OPERAND (cst, 0);
|
1016 |
|
|
expected_type = TREE_TYPE (cst);
|
1017 |
|
|
base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
|
1018 |
|
|
type = TREE_TYPE (base);
|
1019 |
|
|
if (!DECL_P (base)
|
1020 |
|
|
|| max_size == -1
|
1021 |
|
|
|| max_size != size
|
1022 |
|
|
|| TREE_CODE (type) != RECORD_TYPE)
|
1023 |
|
|
return NULL_TREE;
|
1024 |
|
|
|
1025 |
|
|
/* Find the sub-object the constant actually refers to and mark whether it is
|
1026 |
|
|
an artificial one (as opposed to a user-defined one). */
|
1027 |
|
|
while (true)
|
1028 |
|
|
{
|
1029 |
|
|
HOST_WIDE_INT pos, size;
|
1030 |
|
|
tree fld;
|
1031 |
|
|
|
1032 |
|
|
if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (expected_type))
|
1033 |
|
|
break;
|
1034 |
|
|
if (offset < 0)
|
1035 |
|
|
return NULL_TREE;
|
1036 |
|
|
|
1037 |
|
|
for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
|
1038 |
|
|
{
|
1039 |
|
|
if (TREE_CODE (fld) != FIELD_DECL)
|
1040 |
|
|
continue;
|
1041 |
|
|
|
1042 |
|
|
pos = int_bit_position (fld);
|
1043 |
|
|
size = tree_low_cst (DECL_SIZE (fld), 1);
|
1044 |
|
|
if (pos <= offset && (pos + size) > offset)
|
1045 |
|
|
break;
|
1046 |
|
|
}
|
1047 |
|
|
if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
|
1048 |
|
|
return NULL_TREE;
|
1049 |
|
|
|
1050 |
|
|
last_artificial = DECL_ARTIFICIAL (fld);
|
1051 |
|
|
type = TREE_TYPE (fld);
|
1052 |
|
|
offset -= pos;
|
1053 |
|
|
}
|
1054 |
|
|
/* Artifical sub-objects are ancestors, we do not want to use them for
|
1055 |
|
|
devirtualization, at least not here. */
|
1056 |
|
|
if (last_artificial)
|
1057 |
|
|
return NULL_TREE;
|
1058 |
|
|
binfo = TYPE_BINFO (type);
|
1059 |
|
|
if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
|
1060 |
|
|
return NULL_TREE;
|
1061 |
|
|
else
|
1062 |
|
|
return binfo;
|
1063 |
|
|
}
|
1064 |
|
|
|
1065 |
|
|
/* Attempt to fold a call statement referenced by the statement iterator GSI.
|
1066 |
|
|
The statement may be replaced by another statement, e.g., if the call
|
1067 |
|
|
simplifies to a constant value. Return true if any changes were made.
|
1068 |
|
|
It is assumed that the operands have been previously folded. */
|
1069 |
|
|
|
1070 |
|
|
static bool
|
1071 |
|
|
gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
|
1072 |
|
|
{
|
1073 |
|
|
gimple stmt = gsi_stmt (*gsi);
|
1074 |
|
|
tree callee;
|
1075 |
|
|
bool changed = false;
|
1076 |
|
|
unsigned i;
|
1077 |
|
|
|
1078 |
|
|
/* Fold *& in call arguments. */
|
1079 |
|
|
for (i = 0; i < gimple_call_num_args (stmt); ++i)
|
1080 |
|
|
if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
|
1081 |
|
|
{
|
1082 |
|
|
tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
|
1083 |
|
|
if (tmp)
|
1084 |
|
|
{
|
1085 |
|
|
gimple_call_set_arg (stmt, i, tmp);
|
1086 |
|
|
changed = true;
|
1087 |
|
|
}
|
1088 |
|
|
}
|
1089 |
|
|
|
1090 |
|
|
/* Check for virtual calls that became direct calls. */
|
1091 |
|
|
callee = gimple_call_fn (stmt);
|
1092 |
|
|
if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
|
1093 |
|
|
{
|
1094 |
|
|
if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
|
1095 |
|
|
{
|
1096 |
|
|
gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
|
1097 |
|
|
changed = true;
|
1098 |
|
|
}
|
1099 |
|
|
else
|
1100 |
|
|
{
|
1101 |
|
|
tree obj = OBJ_TYPE_REF_OBJECT (callee);
|
1102 |
|
|
tree binfo = gimple_extract_devirt_binfo_from_cst (obj);
|
1103 |
|
|
if (binfo)
|
1104 |
|
|
{
|
1105 |
|
|
HOST_WIDE_INT token
|
1106 |
|
|
= TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee));
|
1107 |
|
|
tree fndecl = gimple_get_virt_method_for_binfo (token, binfo);
|
1108 |
|
|
if (fndecl)
|
1109 |
|
|
{
|
1110 |
|
|
gimple_call_set_fndecl (stmt, fndecl);
|
1111 |
|
|
changed = true;
|
1112 |
|
|
}
|
1113 |
|
|
}
|
1114 |
|
|
}
|
1115 |
|
|
}
|
1116 |
|
|
|
1117 |
|
|
if (inplace)
|
1118 |
|
|
return changed;
|
1119 |
|
|
|
1120 |
|
|
/* Check for builtins that CCP can handle using information not
|
1121 |
|
|
available in the generic fold routines. */
|
1122 |
|
|
callee = gimple_call_fndecl (stmt);
|
1123 |
|
|
if (callee && DECL_BUILT_IN (callee))
|
1124 |
|
|
{
|
1125 |
|
|
tree result = gimple_fold_builtin (stmt);
|
1126 |
|
|
if (result)
|
1127 |
|
|
{
|
1128 |
|
|
if (!update_call_from_tree (gsi, result))
|
1129 |
|
|
gimplify_and_update_call_from_tree (gsi, result);
|
1130 |
|
|
changed = true;
|
1131 |
|
|
}
|
1132 |
|
|
}
|
1133 |
|
|
|
1134 |
|
|
return changed;
|
1135 |
|
|
}
|
1136 |
|
|
|
1137 |
|
|
/* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
|
1138 |
|
|
distinguishes both cases. */
|
1139 |
|
|
|
1140 |
|
|
static bool
|
1141 |
|
|
fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
|
1142 |
|
|
{
|
1143 |
|
|
bool changed = false;
|
1144 |
|
|
gimple stmt = gsi_stmt (*gsi);
|
1145 |
|
|
unsigned i;
|
1146 |
|
|
gimple_stmt_iterator gsinext = *gsi;
|
1147 |
|
|
gimple next_stmt;
|
1148 |
|
|
|
1149 |
|
|
gsi_next (&gsinext);
|
1150 |
|
|
next_stmt = gsi_end_p (gsinext) ? NULL : gsi_stmt (gsinext);
|
1151 |
|
|
|
1152 |
|
|
/* Fold the main computation performed by the statement. */
|
1153 |
|
|
switch (gimple_code (stmt))
|
1154 |
|
|
{
|
1155 |
|
|
case GIMPLE_ASSIGN:
|
1156 |
|
|
{
|
1157 |
|
|
unsigned old_num_ops = gimple_num_ops (stmt);
|
1158 |
|
|
enum tree_code subcode = gimple_assign_rhs_code (stmt);
|
1159 |
|
|
tree lhs = gimple_assign_lhs (stmt);
|
1160 |
|
|
tree new_rhs;
|
1161 |
|
|
/* First canonicalize operand order. This avoids building new
|
1162 |
|
|
trees if this is the only thing fold would later do. */
|
1163 |
|
|
if ((commutative_tree_code (subcode)
|
1164 |
|
|
|| commutative_ternary_tree_code (subcode))
|
1165 |
|
|
&& tree_swap_operands_p (gimple_assign_rhs1 (stmt),
|
1166 |
|
|
gimple_assign_rhs2 (stmt), false))
|
1167 |
|
|
{
|
1168 |
|
|
tree tem = gimple_assign_rhs1 (stmt);
|
1169 |
|
|
gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
|
1170 |
|
|
gimple_assign_set_rhs2 (stmt, tem);
|
1171 |
|
|
changed = true;
|
1172 |
|
|
}
|
1173 |
|
|
new_rhs = fold_gimple_assign (gsi);
|
1174 |
|
|
if (new_rhs
|
1175 |
|
|
&& !useless_type_conversion_p (TREE_TYPE (lhs),
|
1176 |
|
|
TREE_TYPE (new_rhs)))
|
1177 |
|
|
new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
|
1178 |
|
|
if (new_rhs
|
1179 |
|
|
&& (!inplace
|
1180 |
|
|
|| get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
|
1181 |
|
|
{
|
1182 |
|
|
gimple_assign_set_rhs_from_tree (gsi, new_rhs);
|
1183 |
|
|
changed = true;
|
1184 |
|
|
}
|
1185 |
|
|
break;
|
1186 |
|
|
}
|
1187 |
|
|
|
1188 |
|
|
case GIMPLE_COND:
|
1189 |
|
|
changed |= fold_gimple_cond (stmt);
|
1190 |
|
|
break;
|
1191 |
|
|
|
1192 |
|
|
case GIMPLE_CALL:
|
1193 |
|
|
changed |= gimple_fold_call (gsi, inplace);
|
1194 |
|
|
break;
|
1195 |
|
|
|
1196 |
|
|
case GIMPLE_ASM:
|
1197 |
|
|
/* Fold *& in asm operands. */
|
1198 |
|
|
{
|
1199 |
|
|
size_t noutputs;
|
1200 |
|
|
const char **oconstraints;
|
1201 |
|
|
const char *constraint;
|
1202 |
|
|
bool allows_mem, allows_reg;
|
1203 |
|
|
|
1204 |
|
|
noutputs = gimple_asm_noutputs (stmt);
|
1205 |
|
|
oconstraints = XALLOCAVEC (const char *, noutputs);
|
1206 |
|
|
|
1207 |
|
|
for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
|
1208 |
|
|
{
|
1209 |
|
|
tree link = gimple_asm_output_op (stmt, i);
|
1210 |
|
|
tree op = TREE_VALUE (link);
|
1211 |
|
|
oconstraints[i]
|
1212 |
|
|
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
|
1213 |
|
|
if (REFERENCE_CLASS_P (op)
|
1214 |
|
|
&& (op = maybe_fold_reference (op, true)) != NULL_TREE)
|
1215 |
|
|
{
|
1216 |
|
|
TREE_VALUE (link) = op;
|
1217 |
|
|
changed = true;
|
1218 |
|
|
}
|
1219 |
|
|
}
|
1220 |
|
|
for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
|
1221 |
|
|
{
|
1222 |
|
|
tree link = gimple_asm_input_op (stmt, i);
|
1223 |
|
|
tree op = TREE_VALUE (link);
|
1224 |
|
|
constraint
|
1225 |
|
|
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
|
1226 |
|
|
parse_input_constraint (&constraint, 0, 0, noutputs, 0,
|
1227 |
|
|
oconstraints, &allows_mem, &allows_reg);
|
1228 |
|
|
if (REFERENCE_CLASS_P (op)
|
1229 |
|
|
&& (op = maybe_fold_reference (op, !allows_reg && allows_mem))
|
1230 |
|
|
!= NULL_TREE)
|
1231 |
|
|
{
|
1232 |
|
|
TREE_VALUE (link) = op;
|
1233 |
|
|
changed = true;
|
1234 |
|
|
}
|
1235 |
|
|
}
|
1236 |
|
|
}
|
1237 |
|
|
break;
|
1238 |
|
|
|
1239 |
|
|
case GIMPLE_DEBUG:
|
1240 |
|
|
if (gimple_debug_bind_p (stmt))
|
1241 |
|
|
{
|
1242 |
|
|
tree val = gimple_debug_bind_get_value (stmt);
|
1243 |
|
|
if (val
|
1244 |
|
|
&& REFERENCE_CLASS_P (val))
|
1245 |
|
|
{
|
1246 |
|
|
tree tem = maybe_fold_reference (val, false);
|
1247 |
|
|
if (tem)
|
1248 |
|
|
{
|
1249 |
|
|
gimple_debug_bind_set_value (stmt, tem);
|
1250 |
|
|
changed = true;
|
1251 |
|
|
}
|
1252 |
|
|
}
|
1253 |
|
|
else if (val
|
1254 |
|
|
&& TREE_CODE (val) == ADDR_EXPR)
|
1255 |
|
|
{
|
1256 |
|
|
tree ref = TREE_OPERAND (val, 0);
|
1257 |
|
|
tree tem = maybe_fold_reference (ref, false);
|
1258 |
|
|
if (tem)
|
1259 |
|
|
{
|
1260 |
|
|
tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
|
1261 |
|
|
gimple_debug_bind_set_value (stmt, tem);
|
1262 |
|
|
changed = true;
|
1263 |
|
|
}
|
1264 |
|
|
}
|
1265 |
|
|
}
|
1266 |
|
|
break;
|
1267 |
|
|
|
1268 |
|
|
default:;
|
1269 |
|
|
}
|
1270 |
|
|
|
1271 |
|
|
/* If stmt folds into nothing and it was the last stmt in a bb,
|
1272 |
|
|
don't call gsi_stmt. */
|
1273 |
|
|
if (gsi_end_p (*gsi))
|
1274 |
|
|
{
|
1275 |
|
|
gcc_assert (next_stmt == NULL);
|
1276 |
|
|
return changed;
|
1277 |
|
|
}
|
1278 |
|
|
|
1279 |
|
|
stmt = gsi_stmt (*gsi);
|
1280 |
|
|
|
1281 |
|
|
/* Fold *& on the lhs. Don't do this if stmt folded into nothing,
|
1282 |
|
|
as we'd changing the next stmt. */
|
1283 |
|
|
if (gimple_has_lhs (stmt) && stmt != next_stmt)
|
1284 |
|
|
{
|
1285 |
|
|
tree lhs = gimple_get_lhs (stmt);
|
1286 |
|
|
if (lhs && REFERENCE_CLASS_P (lhs))
|
1287 |
|
|
{
|
1288 |
|
|
tree new_lhs = maybe_fold_reference (lhs, true);
|
1289 |
|
|
if (new_lhs)
|
1290 |
|
|
{
|
1291 |
|
|
gimple_set_lhs (stmt, new_lhs);
|
1292 |
|
|
changed = true;
|
1293 |
|
|
}
|
1294 |
|
|
}
|
1295 |
|
|
}
|
1296 |
|
|
|
1297 |
|
|
return changed;
|
1298 |
|
|
}
|
1299 |
|
|
|
1300 |
|
|
/* Fold the statement pointed to by GSI. In some cases, this function may
|
1301 |
|
|
replace the whole statement with a new one. Returns true iff folding
|
1302 |
|
|
makes any changes.
|
1303 |
|
|
The statement pointed to by GSI should be in valid gimple form but may
|
1304 |
|
|
be in unfolded state as resulting from for example constant propagation
|
1305 |
|
|
which can produce *&x = 0. */
|
1306 |
|
|
|
1307 |
|
|
bool
|
1308 |
|
|
fold_stmt (gimple_stmt_iterator *gsi)
|
1309 |
|
|
{
|
1310 |
|
|
return fold_stmt_1 (gsi, false);
|
1311 |
|
|
}
|
1312 |
|
|
|
1313 |
|
|
/* Perform the minimal folding on statement *GSI. Only operations like
|
1314 |
|
|
*&x created by constant propagation are handled. The statement cannot
|
1315 |
|
|
be replaced with a new one. Return true if the statement was
|
1316 |
|
|
changed, false otherwise.
|
1317 |
|
|
The statement *GSI should be in valid gimple form but may
|
1318 |
|
|
be in unfolded state as resulting from for example constant propagation
|
1319 |
|
|
which can produce *&x = 0. */
|
1320 |
|
|
|
1321 |
|
|
bool
|
1322 |
|
|
fold_stmt_inplace (gimple_stmt_iterator *gsi)
|
1323 |
|
|
{
|
1324 |
|
|
gimple stmt = gsi_stmt (*gsi);
|
1325 |
|
|
bool changed = fold_stmt_1 (gsi, true);
|
1326 |
|
|
gcc_assert (gsi_stmt (*gsi) == stmt);
|
1327 |
|
|
return changed;
|
1328 |
|
|
}
|
1329 |
|
|
|
1330 |
|
|
/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
|
1331 |
|
|
if EXPR is null or we don't know how.
|
1332 |
|
|
If non-null, the result always has boolean type. */
|
1333 |
|
|
|
1334 |
|
|
static tree
|
1335 |
|
|
canonicalize_bool (tree expr, bool invert)
|
1336 |
|
|
{
|
1337 |
|
|
if (!expr)
|
1338 |
|
|
return NULL_TREE;
|
1339 |
|
|
else if (invert)
|
1340 |
|
|
{
|
1341 |
|
|
if (integer_nonzerop (expr))
|
1342 |
|
|
return boolean_false_node;
|
1343 |
|
|
else if (integer_zerop (expr))
|
1344 |
|
|
return boolean_true_node;
|
1345 |
|
|
else if (TREE_CODE (expr) == SSA_NAME)
|
1346 |
|
|
return fold_build2 (EQ_EXPR, boolean_type_node, expr,
|
1347 |
|
|
build_int_cst (TREE_TYPE (expr), 0));
|
1348 |
|
|
else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
|
1349 |
|
|
return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
|
1350 |
|
|
boolean_type_node,
|
1351 |
|
|
TREE_OPERAND (expr, 0),
|
1352 |
|
|
TREE_OPERAND (expr, 1));
|
1353 |
|
|
else
|
1354 |
|
|
return NULL_TREE;
|
1355 |
|
|
}
|
1356 |
|
|
else
|
1357 |
|
|
{
|
1358 |
|
|
if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
|
1359 |
|
|
return expr;
|
1360 |
|
|
if (integer_nonzerop (expr))
|
1361 |
|
|
return boolean_true_node;
|
1362 |
|
|
else if (integer_zerop (expr))
|
1363 |
|
|
return boolean_false_node;
|
1364 |
|
|
else if (TREE_CODE (expr) == SSA_NAME)
|
1365 |
|
|
return fold_build2 (NE_EXPR, boolean_type_node, expr,
|
1366 |
|
|
build_int_cst (TREE_TYPE (expr), 0));
|
1367 |
|
|
else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
|
1368 |
|
|
return fold_build2 (TREE_CODE (expr),
|
1369 |
|
|
boolean_type_node,
|
1370 |
|
|
TREE_OPERAND (expr, 0),
|
1371 |
|
|
TREE_OPERAND (expr, 1));
|
1372 |
|
|
else
|
1373 |
|
|
return NULL_TREE;
|
1374 |
|
|
}
|
1375 |
|
|
}
|
1376 |
|
|
|
1377 |
|
|
/* Check to see if a boolean expression EXPR is logically equivalent to the
|
1378 |
|
|
comparison (OP1 CODE OP2). Check for various identities involving
|
1379 |
|
|
SSA_NAMEs. */
|
1380 |
|
|
|
1381 |
|
|
static bool
|
1382 |
|
|
same_bool_comparison_p (const_tree expr, enum tree_code code,
|
1383 |
|
|
const_tree op1, const_tree op2)
|
1384 |
|
|
{
|
1385 |
|
|
gimple s;
|
1386 |
|
|
|
1387 |
|
|
/* The obvious case. */
|
1388 |
|
|
if (TREE_CODE (expr) == code
|
1389 |
|
|
&& operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
|
1390 |
|
|
&& operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
|
1391 |
|
|
return true;
|
1392 |
|
|
|
1393 |
|
|
/* Check for comparing (name, name != 0) and the case where expr
|
1394 |
|
|
is an SSA_NAME with a definition matching the comparison. */
|
1395 |
|
|
if (TREE_CODE (expr) == SSA_NAME
|
1396 |
|
|
&& TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
|
1397 |
|
|
{
|
1398 |
|
|
if (operand_equal_p (expr, op1, 0))
|
1399 |
|
|
return ((code == NE_EXPR && integer_zerop (op2))
|
1400 |
|
|
|| (code == EQ_EXPR && integer_nonzerop (op2)));
|
1401 |
|
|
s = SSA_NAME_DEF_STMT (expr);
|
1402 |
|
|
if (is_gimple_assign (s)
|
1403 |
|
|
&& gimple_assign_rhs_code (s) == code
|
1404 |
|
|
&& operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
|
1405 |
|
|
&& operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
|
1406 |
|
|
return true;
|
1407 |
|
|
}
|
1408 |
|
|
|
1409 |
|
|
/* If op1 is of the form (name != 0) or (name == 0), and the definition
|
1410 |
|
|
of name is a comparison, recurse. */
|
1411 |
|
|
if (TREE_CODE (op1) == SSA_NAME
|
1412 |
|
|
&& TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
|
1413 |
|
|
{
|
1414 |
|
|
s = SSA_NAME_DEF_STMT (op1);
|
1415 |
|
|
if (is_gimple_assign (s)
|
1416 |
|
|
&& TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
|
1417 |
|
|
{
|
1418 |
|
|
enum tree_code c = gimple_assign_rhs_code (s);
|
1419 |
|
|
if ((c == NE_EXPR && integer_zerop (op2))
|
1420 |
|
|
|| (c == EQ_EXPR && integer_nonzerop (op2)))
|
1421 |
|
|
return same_bool_comparison_p (expr, c,
|
1422 |
|
|
gimple_assign_rhs1 (s),
|
1423 |
|
|
gimple_assign_rhs2 (s));
|
1424 |
|
|
if ((c == EQ_EXPR && integer_zerop (op2))
|
1425 |
|
|
|| (c == NE_EXPR && integer_nonzerop (op2)))
|
1426 |
|
|
return same_bool_comparison_p (expr,
|
1427 |
|
|
invert_tree_comparison (c, false),
|
1428 |
|
|
gimple_assign_rhs1 (s),
|
1429 |
|
|
gimple_assign_rhs2 (s));
|
1430 |
|
|
}
|
1431 |
|
|
}
|
1432 |
|
|
return false;
|
1433 |
|
|
}
|
1434 |
|
|
|
1435 |
|
|
/* Check to see if two boolean expressions OP1 and OP2 are logically
|
1436 |
|
|
equivalent. */
|
1437 |
|
|
|
1438 |
|
|
static bool
|
1439 |
|
|
same_bool_result_p (const_tree op1, const_tree op2)
|
1440 |
|
|
{
|
1441 |
|
|
/* Simple cases first. */
|
1442 |
|
|
if (operand_equal_p (op1, op2, 0))
|
1443 |
|
|
return true;
|
1444 |
|
|
|
1445 |
|
|
/* Check the cases where at least one of the operands is a comparison.
|
1446 |
|
|
These are a bit smarter than operand_equal_p in that they apply some
|
1447 |
|
|
identifies on SSA_NAMEs. */
|
1448 |
|
|
if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
|
1449 |
|
|
&& same_bool_comparison_p (op1, TREE_CODE (op2),
|
1450 |
|
|
TREE_OPERAND (op2, 0),
|
1451 |
|
|
TREE_OPERAND (op2, 1)))
|
1452 |
|
|
return true;
|
1453 |
|
|
if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
|
1454 |
|
|
&& same_bool_comparison_p (op2, TREE_CODE (op1),
|
1455 |
|
|
TREE_OPERAND (op1, 0),
|
1456 |
|
|
TREE_OPERAND (op1, 1)))
|
1457 |
|
|
return true;
|
1458 |
|
|
|
1459 |
|
|
/* Default case. */
|
1460 |
|
|
return false;
|
1461 |
|
|
}
|
1462 |
|
|
|
1463 |
|
|
/* Forward declarations for some mutually recursive functions. */
|
1464 |
|
|
|
1465 |
|
|
static tree
|
1466 |
|
|
and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
|
1467 |
|
|
enum tree_code code2, tree op2a, tree op2b);
|
1468 |
|
|
static tree
|
1469 |
|
|
and_var_with_comparison (tree var, bool invert,
|
1470 |
|
|
enum tree_code code2, tree op2a, tree op2b);
|
1471 |
|
|
static tree
|
1472 |
|
|
and_var_with_comparison_1 (gimple stmt,
|
1473 |
|
|
enum tree_code code2, tree op2a, tree op2b);
|
1474 |
|
|
static tree
|
1475 |
|
|
or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
|
1476 |
|
|
enum tree_code code2, tree op2a, tree op2b);
|
1477 |
|
|
static tree
|
1478 |
|
|
or_var_with_comparison (tree var, bool invert,
|
1479 |
|
|
enum tree_code code2, tree op2a, tree op2b);
|
1480 |
|
|
static tree
|
1481 |
|
|
or_var_with_comparison_1 (gimple stmt,
|
1482 |
|
|
enum tree_code code2, tree op2a, tree op2b);
|
1483 |
|
|
|
1484 |
|
|
/* Helper function for and_comparisons_1: try to simplify the AND of the
|
1485 |
|
|
ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
|
1486 |
|
|
If INVERT is true, invert the value of the VAR before doing the AND.
|
1487 |
|
|
Return NULL_EXPR if we can't simplify this to a single expression. */
|
1488 |
|
|
|
1489 |
|
|
static tree
|
1490 |
|
|
and_var_with_comparison (tree var, bool invert,
|
1491 |
|
|
enum tree_code code2, tree op2a, tree op2b)
|
1492 |
|
|
{
|
1493 |
|
|
tree t;
|
1494 |
|
|
gimple stmt = SSA_NAME_DEF_STMT (var);
|
1495 |
|
|
|
1496 |
|
|
/* We can only deal with variables whose definitions are assignments. */
|
1497 |
|
|
if (!is_gimple_assign (stmt))
|
1498 |
|
|
return NULL_TREE;
|
1499 |
|
|
|
1500 |
|
|
/* If we have an inverted comparison, apply DeMorgan's law and rewrite
|
1501 |
|
|
!var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
|
1502 |
|
|
Then we only have to consider the simpler non-inverted cases. */
|
1503 |
|
|
if (invert)
|
1504 |
|
|
t = or_var_with_comparison_1 (stmt,
|
1505 |
|
|
invert_tree_comparison (code2, false),
|
1506 |
|
|
op2a, op2b);
|
1507 |
|
|
else
|
1508 |
|
|
t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
|
1509 |
|
|
return canonicalize_bool (t, invert);
|
1510 |
|
|
}
|
1511 |
|
|
|
1512 |
|
|
/* Try to simplify the AND of the ssa variable defined by the assignment
|
1513 |
|
|
STMT with the comparison specified by (OP2A CODE2 OP2B).
|
1514 |
|
|
Return NULL_EXPR if we can't simplify this to a single expression. */
|
1515 |
|
|
|
1516 |
|
|
static tree
|
1517 |
|
|
and_var_with_comparison_1 (gimple stmt,
|
1518 |
|
|
enum tree_code code2, tree op2a, tree op2b)
|
1519 |
|
|
{
|
1520 |
|
|
tree var = gimple_assign_lhs (stmt);
|
1521 |
|
|
tree true_test_var = NULL_TREE;
|
1522 |
|
|
tree false_test_var = NULL_TREE;
|
1523 |
|
|
enum tree_code innercode = gimple_assign_rhs_code (stmt);
|
1524 |
|
|
|
1525 |
|
|
/* Check for identities like (var AND (var == 0)) => false. */
|
1526 |
|
|
if (TREE_CODE (op2a) == SSA_NAME
|
1527 |
|
|
&& TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
|
1528 |
|
|
{
|
1529 |
|
|
if ((code2 == NE_EXPR && integer_zerop (op2b))
|
1530 |
|
|
|| (code2 == EQ_EXPR && integer_nonzerop (op2b)))
|
1531 |
|
|
{
|
1532 |
|
|
true_test_var = op2a;
|
1533 |
|
|
if (var == true_test_var)
|
1534 |
|
|
return var;
|
1535 |
|
|
}
|
1536 |
|
|
else if ((code2 == EQ_EXPR && integer_zerop (op2b))
|
1537 |
|
|
|| (code2 == NE_EXPR && integer_nonzerop (op2b)))
|
1538 |
|
|
{
|
1539 |
|
|
false_test_var = op2a;
|
1540 |
|
|
if (var == false_test_var)
|
1541 |
|
|
return boolean_false_node;
|
1542 |
|
|
}
|
1543 |
|
|
}
|
1544 |
|
|
|
1545 |
|
|
/* If the definition is a comparison, recurse on it. */
|
1546 |
|
|
if (TREE_CODE_CLASS (innercode) == tcc_comparison)
|
1547 |
|
|
{
|
1548 |
|
|
tree t = and_comparisons_1 (innercode,
|
1549 |
|
|
gimple_assign_rhs1 (stmt),
|
1550 |
|
|
gimple_assign_rhs2 (stmt),
|
1551 |
|
|
code2,
|
1552 |
|
|
op2a,
|
1553 |
|
|
op2b);
|
1554 |
|
|
if (t)
|
1555 |
|
|
return t;
|
1556 |
|
|
}
|
1557 |
|
|
|
1558 |
|
|
/* If the definition is an AND or OR expression, we may be able to
|
1559 |
|
|
simplify by reassociating. */
|
1560 |
|
|
if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
|
1561 |
|
|
&& (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
|
1562 |
|
|
{
|
1563 |
|
|
tree inner1 = gimple_assign_rhs1 (stmt);
|
1564 |
|
|
tree inner2 = gimple_assign_rhs2 (stmt);
|
1565 |
|
|
gimple s;
|
1566 |
|
|
tree t;
|
1567 |
|
|
tree partial = NULL_TREE;
|
1568 |
|
|
bool is_and = (innercode == BIT_AND_EXPR);
|
1569 |
|
|
|
1570 |
|
|
/* Check for boolean identities that don't require recursive examination
|
1571 |
|
|
of inner1/inner2:
|
1572 |
|
|
inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
|
1573 |
|
|
inner1 AND (inner1 OR inner2) => inner1
|
1574 |
|
|
!inner1 AND (inner1 AND inner2) => false
|
1575 |
|
|
!inner1 AND (inner1 OR inner2) => !inner1 AND inner2
|
1576 |
|
|
Likewise for similar cases involving inner2. */
|
1577 |
|
|
if (inner1 == true_test_var)
|
1578 |
|
|
return (is_and ? var : inner1);
|
1579 |
|
|
else if (inner2 == true_test_var)
|
1580 |
|
|
return (is_and ? var : inner2);
|
1581 |
|
|
else if (inner1 == false_test_var)
|
1582 |
|
|
return (is_and
|
1583 |
|
|
? boolean_false_node
|
1584 |
|
|
: and_var_with_comparison (inner2, false, code2, op2a, op2b));
|
1585 |
|
|
else if (inner2 == false_test_var)
|
1586 |
|
|
return (is_and
|
1587 |
|
|
? boolean_false_node
|
1588 |
|
|
: and_var_with_comparison (inner1, false, code2, op2a, op2b));
|
1589 |
|
|
|
1590 |
|
|
/* Next, redistribute/reassociate the AND across the inner tests.
|
1591 |
|
|
Compute the first partial result, (inner1 AND (op2a code op2b)) */
|
1592 |
|
|
if (TREE_CODE (inner1) == SSA_NAME
|
1593 |
|
|
&& is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
|
1594 |
|
|
&& TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
|
1595 |
|
|
&& (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
|
1596 |
|
|
gimple_assign_rhs1 (s),
|
1597 |
|
|
gimple_assign_rhs2 (s),
|
1598 |
|
|
code2, op2a, op2b)))
|
1599 |
|
|
{
|
1600 |
|
|
/* Handle the AND case, where we are reassociating:
|
1601 |
|
|
(inner1 AND inner2) AND (op2a code2 op2b)
|
1602 |
|
|
=> (t AND inner2)
|
1603 |
|
|
If the partial result t is a constant, we win. Otherwise
|
1604 |
|
|
continue on to try reassociating with the other inner test. */
|
1605 |
|
|
if (is_and)
|
1606 |
|
|
{
|
1607 |
|
|
if (integer_onep (t))
|
1608 |
|
|
return inner2;
|
1609 |
|
|
else if (integer_zerop (t))
|
1610 |
|
|
return boolean_false_node;
|
1611 |
|
|
}
|
1612 |
|
|
|
1613 |
|
|
/* Handle the OR case, where we are redistributing:
|
1614 |
|
|
(inner1 OR inner2) AND (op2a code2 op2b)
|
1615 |
|
|
=> (t OR (inner2 AND (op2a code2 op2b))) */
|
1616 |
|
|
else if (integer_onep (t))
|
1617 |
|
|
return boolean_true_node;
|
1618 |
|
|
|
1619 |
|
|
/* Save partial result for later. */
|
1620 |
|
|
partial = t;
|
1621 |
|
|
}
|
1622 |
|
|
|
1623 |
|
|
/* Compute the second partial result, (inner2 AND (op2a code op2b)) */
|
1624 |
|
|
if (TREE_CODE (inner2) == SSA_NAME
|
1625 |
|
|
&& is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
|
1626 |
|
|
&& TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
|
1627 |
|
|
&& (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
|
1628 |
|
|
gimple_assign_rhs1 (s),
|
1629 |
|
|
gimple_assign_rhs2 (s),
|
1630 |
|
|
code2, op2a, op2b)))
|
1631 |
|
|
{
|
1632 |
|
|
/* Handle the AND case, where we are reassociating:
|
1633 |
|
|
(inner1 AND inner2) AND (op2a code2 op2b)
|
1634 |
|
|
=> (inner1 AND t) */
|
1635 |
|
|
if (is_and)
|
1636 |
|
|
{
|
1637 |
|
|
if (integer_onep (t))
|
1638 |
|
|
return inner1;
|
1639 |
|
|
else if (integer_zerop (t))
|
1640 |
|
|
return boolean_false_node;
|
1641 |
|
|
/* If both are the same, we can apply the identity
|
1642 |
|
|
(x AND x) == x. */
|
1643 |
|
|
else if (partial && same_bool_result_p (t, partial))
|
1644 |
|
|
return t;
|
1645 |
|
|
}
|
1646 |
|
|
|
1647 |
|
|
/* Handle the OR case. where we are redistributing:
|
1648 |
|
|
(inner1 OR inner2) AND (op2a code2 op2b)
|
1649 |
|
|
=> (t OR (inner1 AND (op2a code2 op2b)))
|
1650 |
|
|
=> (t OR partial) */
|
1651 |
|
|
else
|
1652 |
|
|
{
|
1653 |
|
|
if (integer_onep (t))
|
1654 |
|
|
return boolean_true_node;
|
1655 |
|
|
else if (partial)
|
1656 |
|
|
{
|
1657 |
|
|
/* We already got a simplification for the other
|
1658 |
|
|
operand to the redistributed OR expression. The
|
1659 |
|
|
interesting case is when at least one is false.
|
1660 |
|
|
Or, if both are the same, we can apply the identity
|
1661 |
|
|
(x OR x) == x. */
|
1662 |
|
|
if (integer_zerop (partial))
|
1663 |
|
|
return t;
|
1664 |
|
|
else if (integer_zerop (t))
|
1665 |
|
|
return partial;
|
1666 |
|
|
else if (same_bool_result_p (t, partial))
|
1667 |
|
|
return t;
|
1668 |
|
|
}
|
1669 |
|
|
}
|
1670 |
|
|
}
|
1671 |
|
|
}
|
1672 |
|
|
return NULL_TREE;
|
1673 |
|
|
}
|
1674 |
|
|
|
1675 |
|
|
/* Try to simplify the AND of two comparisons defined by
|
1676 |
|
|
(OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
|
1677 |
|
|
If this can be done without constructing an intermediate value,
|
1678 |
|
|
return the resulting tree; otherwise NULL_TREE is returned.
|
1679 |
|
|
This function is deliberately asymmetric as it recurses on SSA_DEFs
|
1680 |
|
|
in the first comparison but not the second. */
|
1681 |
|
|
|
1682 |
|
|
static tree
|
1683 |
|
|
and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
|
1684 |
|
|
enum tree_code code2, tree op2a, tree op2b)
|
1685 |
|
|
{
|
1686 |
|
|
/* First check for ((x CODE1 y) AND (x CODE2 y)). */
|
1687 |
|
|
if (operand_equal_p (op1a, op2a, 0)
|
1688 |
|
|
&& operand_equal_p (op1b, op2b, 0))
|
1689 |
|
|
{
|
1690 |
|
|
/* Result will be either NULL_TREE, or a combined comparison. */
|
1691 |
|
|
tree t = combine_comparisons (UNKNOWN_LOCATION,
|
1692 |
|
|
TRUTH_ANDIF_EXPR, code1, code2,
|
1693 |
|
|
boolean_type_node, op1a, op1b);
|
1694 |
|
|
if (t)
|
1695 |
|
|
return t;
|
1696 |
|
|
}
|
1697 |
|
|
|
1698 |
|
|
/* Likewise the swapped case of the above. */
|
1699 |
|
|
if (operand_equal_p (op1a, op2b, 0)
|
1700 |
|
|
&& operand_equal_p (op1b, op2a, 0))
|
1701 |
|
|
{
|
1702 |
|
|
/* Result will be either NULL_TREE, or a combined comparison. */
|
1703 |
|
|
tree t = combine_comparisons (UNKNOWN_LOCATION,
|
1704 |
|
|
TRUTH_ANDIF_EXPR, code1,
|
1705 |
|
|
swap_tree_comparison (code2),
|
1706 |
|
|
boolean_type_node, op1a, op1b);
|
1707 |
|
|
if (t)
|
1708 |
|
|
return t;
|
1709 |
|
|
}
|
1710 |
|
|
|
1711 |
|
|
/* If both comparisons are of the same value against constants, we might
|
1712 |
|
|
be able to merge them. */
|
1713 |
|
|
if (operand_equal_p (op1a, op2a, 0)
|
1714 |
|
|
&& TREE_CODE (op1b) == INTEGER_CST
|
1715 |
|
|
&& TREE_CODE (op2b) == INTEGER_CST)
|
1716 |
|
|
{
|
1717 |
|
|
int cmp = tree_int_cst_compare (op1b, op2b);
|
1718 |
|
|
|
1719 |
|
|
/* If we have (op1a == op1b), we should either be able to
|
1720 |
|
|
return that or FALSE, depending on whether the constant op1b
|
1721 |
|
|
also satisfies the other comparison against op2b. */
|
1722 |
|
|
if (code1 == EQ_EXPR)
|
1723 |
|
|
{
|
1724 |
|
|
bool done = true;
|
1725 |
|
|
bool val;
|
1726 |
|
|
switch (code2)
|
1727 |
|
|
{
|
1728 |
|
|
case EQ_EXPR: val = (cmp == 0); break;
|
1729 |
|
|
case NE_EXPR: val = (cmp != 0); break;
|
1730 |
|
|
case LT_EXPR: val = (cmp < 0); break;
|
1731 |
|
|
case GT_EXPR: val = (cmp > 0); break;
|
1732 |
|
|
case LE_EXPR: val = (cmp <= 0); break;
|
1733 |
|
|
case GE_EXPR: val = (cmp >= 0); break;
|
1734 |
|
|
default: done = false;
|
1735 |
|
|
}
|
1736 |
|
|
if (done)
|
1737 |
|
|
{
|
1738 |
|
|
if (val)
|
1739 |
|
|
return fold_build2 (code1, boolean_type_node, op1a, op1b);
|
1740 |
|
|
else
|
1741 |
|
|
return boolean_false_node;
|
1742 |
|
|
}
|
1743 |
|
|
}
|
1744 |
|
|
/* Likewise if the second comparison is an == comparison. */
|
1745 |
|
|
else if (code2 == EQ_EXPR)
|
1746 |
|
|
{
|
1747 |
|
|
bool done = true;
|
1748 |
|
|
bool val;
|
1749 |
|
|
switch (code1)
|
1750 |
|
|
{
|
1751 |
|
|
case EQ_EXPR: val = (cmp == 0); break;
|
1752 |
|
|
case NE_EXPR: val = (cmp != 0); break;
|
1753 |
|
|
case LT_EXPR: val = (cmp > 0); break;
|
1754 |
|
|
case GT_EXPR: val = (cmp < 0); break;
|
1755 |
|
|
case LE_EXPR: val = (cmp >= 0); break;
|
1756 |
|
|
case GE_EXPR: val = (cmp <= 0); break;
|
1757 |
|
|
default: done = false;
|
1758 |
|
|
}
|
1759 |
|
|
if (done)
|
1760 |
|
|
{
|
1761 |
|
|
if (val)
|
1762 |
|
|
return fold_build2 (code2, boolean_type_node, op2a, op2b);
|
1763 |
|
|
else
|
1764 |
|
|
return boolean_false_node;
|
1765 |
|
|
}
|
1766 |
|
|
}
|
1767 |
|
|
|
1768 |
|
|
/* Same business with inequality tests. */
|
1769 |
|
|
else if (code1 == NE_EXPR)
|
1770 |
|
|
{
|
1771 |
|
|
bool val;
|
1772 |
|
|
switch (code2)
|
1773 |
|
|
{
|
1774 |
|
|
case EQ_EXPR: val = (cmp != 0); break;
|
1775 |
|
|
case NE_EXPR: val = (cmp == 0); break;
|
1776 |
|
|
case LT_EXPR: val = (cmp >= 0); break;
|
1777 |
|
|
case GT_EXPR: val = (cmp <= 0); break;
|
1778 |
|
|
case LE_EXPR: val = (cmp > 0); break;
|
1779 |
|
|
case GE_EXPR: val = (cmp < 0); break;
|
1780 |
|
|
default:
|
1781 |
|
|
val = false;
|
1782 |
|
|
}
|
1783 |
|
|
if (val)
|
1784 |
|
|
return fold_build2 (code2, boolean_type_node, op2a, op2b);
|
1785 |
|
|
}
|
1786 |
|
|
else if (code2 == NE_EXPR)
|
1787 |
|
|
{
|
1788 |
|
|
bool val;
|
1789 |
|
|
switch (code1)
|
1790 |
|
|
{
|
1791 |
|
|
case EQ_EXPR: val = (cmp == 0); break;
|
1792 |
|
|
case NE_EXPR: val = (cmp != 0); break;
|
1793 |
|
|
case LT_EXPR: val = (cmp <= 0); break;
|
1794 |
|
|
case GT_EXPR: val = (cmp >= 0); break;
|
1795 |
|
|
case LE_EXPR: val = (cmp < 0); break;
|
1796 |
|
|
case GE_EXPR: val = (cmp > 0); break;
|
1797 |
|
|
default:
|
1798 |
|
|
val = false;
|
1799 |
|
|
}
|
1800 |
|
|
if (val)
|
1801 |
|
|
return fold_build2 (code1, boolean_type_node, op1a, op1b);
|
1802 |
|
|
}
|
1803 |
|
|
|
1804 |
|
|
/* Chose the more restrictive of two < or <= comparisons. */
|
1805 |
|
|
else if ((code1 == LT_EXPR || code1 == LE_EXPR)
|
1806 |
|
|
&& (code2 == LT_EXPR || code2 == LE_EXPR))
|
1807 |
|
|
{
|
1808 |
|
|
if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
|
1809 |
|
|
return fold_build2 (code1, boolean_type_node, op1a, op1b);
|
1810 |
|
|
else
|
1811 |
|
|
return fold_build2 (code2, boolean_type_node, op2a, op2b);
|
1812 |
|
|
}
|
1813 |
|
|
|
1814 |
|
|
/* Likewise chose the more restrictive of two > or >= comparisons. */
|
1815 |
|
|
else if ((code1 == GT_EXPR || code1 == GE_EXPR)
|
1816 |
|
|
&& (code2 == GT_EXPR || code2 == GE_EXPR))
|
1817 |
|
|
{
|
1818 |
|
|
if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
|
1819 |
|
|
return fold_build2 (code1, boolean_type_node, op1a, op1b);
|
1820 |
|
|
else
|
1821 |
|
|
return fold_build2 (code2, boolean_type_node, op2a, op2b);
|
1822 |
|
|
}
|
1823 |
|
|
|
1824 |
|
|
/* Check for singleton ranges. */
|
1825 |
|
|
else if (cmp == 0
|
1826 |
|
|
&& ((code1 == LE_EXPR && code2 == GE_EXPR)
|
1827 |
|
|
|| (code1 == GE_EXPR && code2 == LE_EXPR)))
|
1828 |
|
|
return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
|
1829 |
|
|
|
1830 |
|
|
/* Check for disjoint ranges. */
|
1831 |
|
|
else if (cmp <= 0
|
1832 |
|
|
&& (code1 == LT_EXPR || code1 == LE_EXPR)
|
1833 |
|
|
&& (code2 == GT_EXPR || code2 == GE_EXPR))
|
1834 |
|
|
return boolean_false_node;
|
1835 |
|
|
else if (cmp >= 0
|
1836 |
|
|
&& (code1 == GT_EXPR || code1 == GE_EXPR)
|
1837 |
|
|
&& (code2 == LT_EXPR || code2 == LE_EXPR))
|
1838 |
|
|
return boolean_false_node;
|
1839 |
|
|
}
|
1840 |
|
|
|
1841 |
|
|
/* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
|
1842 |
|
|
NAME's definition is a truth value. See if there are any simplifications
|
1843 |
|
|
that can be done against the NAME's definition. */
|
1844 |
|
|
if (TREE_CODE (op1a) == SSA_NAME
|
1845 |
|
|
&& (code1 == NE_EXPR || code1 == EQ_EXPR)
|
1846 |
|
|
&& (integer_zerop (op1b) || integer_onep (op1b)))
|
1847 |
|
|
{
|
1848 |
|
|
bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
|
1849 |
|
|
|| (code1 == NE_EXPR && integer_onep (op1b)));
|
1850 |
|
|
gimple stmt = SSA_NAME_DEF_STMT (op1a);
|
1851 |
|
|
switch (gimple_code (stmt))
|
1852 |
|
|
{
|
1853 |
|
|
case GIMPLE_ASSIGN:
|
1854 |
|
|
/* Try to simplify by copy-propagating the definition. */
|
1855 |
|
|
return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
|
1856 |
|
|
|
1857 |
|
|
case GIMPLE_PHI:
|
1858 |
|
|
/* If every argument to the PHI produces the same result when
|
1859 |
|
|
ANDed with the second comparison, we win.
|
1860 |
|
|
Do not do this unless the type is bool since we need a bool
|
1861 |
|
|
result here anyway. */
|
1862 |
|
|
if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
|
1863 |
|
|
{
|
1864 |
|
|
tree result = NULL_TREE;
|
1865 |
|
|
unsigned i;
|
1866 |
|
|
for (i = 0; i < gimple_phi_num_args (stmt); i++)
|
1867 |
|
|
{
|
1868 |
|
|
tree arg = gimple_phi_arg_def (stmt, i);
|
1869 |
|
|
|
1870 |
|
|
/* If this PHI has itself as an argument, ignore it.
|
1871 |
|
|
If all the other args produce the same result,
|
1872 |
|
|
we're still OK. */
|
1873 |
|
|
if (arg == gimple_phi_result (stmt))
|
1874 |
|
|
continue;
|
1875 |
|
|
else if (TREE_CODE (arg) == INTEGER_CST)
|
1876 |
|
|
{
|
1877 |
|
|
if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
|
1878 |
|
|
{
|
1879 |
|
|
if (!result)
|
1880 |
|
|
result = boolean_false_node;
|
1881 |
|
|
else if (!integer_zerop (result))
|
1882 |
|
|
return NULL_TREE;
|
1883 |
|
|
}
|
1884 |
|
|
else if (!result)
|
1885 |
|
|
result = fold_build2 (code2, boolean_type_node,
|
1886 |
|
|
op2a, op2b);
|
1887 |
|
|
else if (!same_bool_comparison_p (result,
|
1888 |
|
|
code2, op2a, op2b))
|
1889 |
|
|
return NULL_TREE;
|
1890 |
|
|
}
|
1891 |
|
|
else if (TREE_CODE (arg) == SSA_NAME
|
1892 |
|
|
&& !SSA_NAME_IS_DEFAULT_DEF (arg))
|
1893 |
|
|
{
|
1894 |
|
|
tree temp;
|
1895 |
|
|
gimple def_stmt = SSA_NAME_DEF_STMT (arg);
|
1896 |
|
|
/* In simple cases we can look through PHI nodes,
|
1897 |
|
|
but we have to be careful with loops.
|
1898 |
|
|
See PR49073. */
|
1899 |
|
|
if (! dom_info_available_p (CDI_DOMINATORS)
|
1900 |
|
|
|| gimple_bb (def_stmt) == gimple_bb (stmt)
|
1901 |
|
|
|| dominated_by_p (CDI_DOMINATORS,
|
1902 |
|
|
gimple_bb (def_stmt),
|
1903 |
|
|
gimple_bb (stmt)))
|
1904 |
|
|
return NULL_TREE;
|
1905 |
|
|
temp = and_var_with_comparison (arg, invert, code2,
|
1906 |
|
|
op2a, op2b);
|
1907 |
|
|
if (!temp)
|
1908 |
|
|
return NULL_TREE;
|
1909 |
|
|
else if (!result)
|
1910 |
|
|
result = temp;
|
1911 |
|
|
else if (!same_bool_result_p (result, temp))
|
1912 |
|
|
return NULL_TREE;
|
1913 |
|
|
}
|
1914 |
|
|
else
|
1915 |
|
|
return NULL_TREE;
|
1916 |
|
|
}
|
1917 |
|
|
return result;
|
1918 |
|
|
}
|
1919 |
|
|
|
1920 |
|
|
default:
|
1921 |
|
|
break;
|
1922 |
|
|
}
|
1923 |
|
|
}
|
1924 |
|
|
return NULL_TREE;
|
1925 |
|
|
}
|
1926 |
|
|
|
1927 |
|
|
/* Try to simplify the AND of two comparisons, specified by
|
1928 |
|
|
(OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
|
1929 |
|
|
If this can be simplified to a single expression (without requiring
|
1930 |
|
|
introducing more SSA variables to hold intermediate values),
|
1931 |
|
|
return the resulting tree. Otherwise return NULL_TREE.
|
1932 |
|
|
If the result expression is non-null, it has boolean type. */
|
1933 |
|
|
|
1934 |
|
|
tree
|
1935 |
|
|
maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
|
1936 |
|
|
enum tree_code code2, tree op2a, tree op2b)
|
1937 |
|
|
{
|
1938 |
|
|
tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
|
1939 |
|
|
if (t)
|
1940 |
|
|
return t;
|
1941 |
|
|
else
|
1942 |
|
|
return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
|
1943 |
|
|
}
|
1944 |
|
|
|
1945 |
|
|
/* Helper function for or_comparisons_1: try to simplify the OR of the
|
1946 |
|
|
ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
|
1947 |
|
|
If INVERT is true, invert the value of VAR before doing the OR.
|
1948 |
|
|
Return NULL_EXPR if we can't simplify this to a single expression. */
|
1949 |
|
|
|
1950 |
|
|
static tree
|
1951 |
|
|
or_var_with_comparison (tree var, bool invert,
|
1952 |
|
|
enum tree_code code2, tree op2a, tree op2b)
|
1953 |
|
|
{
|
1954 |
|
|
tree t;
|
1955 |
|
|
gimple stmt = SSA_NAME_DEF_STMT (var);
|
1956 |
|
|
|
1957 |
|
|
/* We can only deal with variables whose definitions are assignments. */
|
1958 |
|
|
if (!is_gimple_assign (stmt))
|
1959 |
|
|
return NULL_TREE;
|
1960 |
|
|
|
1961 |
|
|
/* If we have an inverted comparison, apply DeMorgan's law and rewrite
|
1962 |
|
|
!var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
|
1963 |
|
|
Then we only have to consider the simpler non-inverted cases. */
|
1964 |
|
|
if (invert)
|
1965 |
|
|
t = and_var_with_comparison_1 (stmt,
|
1966 |
|
|
invert_tree_comparison (code2, false),
|
1967 |
|
|
op2a, op2b);
|
1968 |
|
|
else
|
1969 |
|
|
t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
|
1970 |
|
|
return canonicalize_bool (t, invert);
|
1971 |
|
|
}
|
1972 |
|
|
|
1973 |
|
|
/* Try to simplify the OR of the ssa variable defined by the assignment
|
1974 |
|
|
STMT with the comparison specified by (OP2A CODE2 OP2B).
|
1975 |
|
|
Return NULL_EXPR if we can't simplify this to a single expression. */
|
1976 |
|
|
|
1977 |
|
|
static tree
|
1978 |
|
|
or_var_with_comparison_1 (gimple stmt,
|
1979 |
|
|
enum tree_code code2, tree op2a, tree op2b)
|
1980 |
|
|
{
|
1981 |
|
|
tree var = gimple_assign_lhs (stmt);
|
1982 |
|
|
tree true_test_var = NULL_TREE;
|
1983 |
|
|
tree false_test_var = NULL_TREE;
|
1984 |
|
|
enum tree_code innercode = gimple_assign_rhs_code (stmt);
|
1985 |
|
|
|
1986 |
|
|
/* Check for identities like (var OR (var != 0)) => true . */
|
1987 |
|
|
if (TREE_CODE (op2a) == SSA_NAME
|
1988 |
|
|
&& TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
|
1989 |
|
|
{
|
1990 |
|
|
if ((code2 == NE_EXPR && integer_zerop (op2b))
|
1991 |
|
|
|| (code2 == EQ_EXPR && integer_nonzerop (op2b)))
|
1992 |
|
|
{
|
1993 |
|
|
true_test_var = op2a;
|
1994 |
|
|
if (var == true_test_var)
|
1995 |
|
|
return var;
|
1996 |
|
|
}
|
1997 |
|
|
else if ((code2 == EQ_EXPR && integer_zerop (op2b))
|
1998 |
|
|
|| (code2 == NE_EXPR && integer_nonzerop (op2b)))
|
1999 |
|
|
{
|
2000 |
|
|
false_test_var = op2a;
|
2001 |
|
|
if (var == false_test_var)
|
2002 |
|
|
return boolean_true_node;
|
2003 |
|
|
}
|
2004 |
|
|
}
|
2005 |
|
|
|
2006 |
|
|
/* If the definition is a comparison, recurse on it. */
|
2007 |
|
|
if (TREE_CODE_CLASS (innercode) == tcc_comparison)
|
2008 |
|
|
{
|
2009 |
|
|
tree t = or_comparisons_1 (innercode,
|
2010 |
|
|
gimple_assign_rhs1 (stmt),
|
2011 |
|
|
gimple_assign_rhs2 (stmt),
|
2012 |
|
|
code2,
|
2013 |
|
|
op2a,
|
2014 |
|
|
op2b);
|
2015 |
|
|
if (t)
|
2016 |
|
|
return t;
|
2017 |
|
|
}
|
2018 |
|
|
|
2019 |
|
|
/* If the definition is an AND or OR expression, we may be able to
|
2020 |
|
|
simplify by reassociating. */
|
2021 |
|
|
if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
|
2022 |
|
|
&& (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
|
2023 |
|
|
{
|
2024 |
|
|
tree inner1 = gimple_assign_rhs1 (stmt);
|
2025 |
|
|
tree inner2 = gimple_assign_rhs2 (stmt);
|
2026 |
|
|
gimple s;
|
2027 |
|
|
tree t;
|
2028 |
|
|
tree partial = NULL_TREE;
|
2029 |
|
|
bool is_or = (innercode == BIT_IOR_EXPR);
|
2030 |
|
|
|
2031 |
|
|
/* Check for boolean identities that don't require recursive examination
|
2032 |
|
|
of inner1/inner2:
|
2033 |
|
|
inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
|
2034 |
|
|
inner1 OR (inner1 AND inner2) => inner1
|
2035 |
|
|
!inner1 OR (inner1 OR inner2) => true
|
2036 |
|
|
!inner1 OR (inner1 AND inner2) => !inner1 OR inner2
|
2037 |
|
|
*/
|
2038 |
|
|
if (inner1 == true_test_var)
|
2039 |
|
|
return (is_or ? var : inner1);
|
2040 |
|
|
else if (inner2 == true_test_var)
|
2041 |
|
|
return (is_or ? var : inner2);
|
2042 |
|
|
else if (inner1 == false_test_var)
|
2043 |
|
|
return (is_or
|
2044 |
|
|
? boolean_true_node
|
2045 |
|
|
: or_var_with_comparison (inner2, false, code2, op2a, op2b));
|
2046 |
|
|
else if (inner2 == false_test_var)
|
2047 |
|
|
return (is_or
|
2048 |
|
|
? boolean_true_node
|
2049 |
|
|
: or_var_with_comparison (inner1, false, code2, op2a, op2b));
|
2050 |
|
|
|
2051 |
|
|
/* Next, redistribute/reassociate the OR across the inner tests.
|
2052 |
|
|
Compute the first partial result, (inner1 OR (op2a code op2b)) */
|
2053 |
|
|
if (TREE_CODE (inner1) == SSA_NAME
|
2054 |
|
|
&& is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
|
2055 |
|
|
&& TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
|
2056 |
|
|
&& (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
|
2057 |
|
|
gimple_assign_rhs1 (s),
|
2058 |
|
|
gimple_assign_rhs2 (s),
|
2059 |
|
|
code2, op2a, op2b)))
|
2060 |
|
|
{
|
2061 |
|
|
/* Handle the OR case, where we are reassociating:
|
2062 |
|
|
(inner1 OR inner2) OR (op2a code2 op2b)
|
2063 |
|
|
=> (t OR inner2)
|
2064 |
|
|
If the partial result t is a constant, we win. Otherwise
|
2065 |
|
|
continue on to try reassociating with the other inner test. */
|
2066 |
|
|
if (is_or)
|
2067 |
|
|
{
|
2068 |
|
|
if (integer_onep (t))
|
2069 |
|
|
return boolean_true_node;
|
2070 |
|
|
else if (integer_zerop (t))
|
2071 |
|
|
return inner2;
|
2072 |
|
|
}
|
2073 |
|
|
|
2074 |
|
|
/* Handle the AND case, where we are redistributing:
|
2075 |
|
|
(inner1 AND inner2) OR (op2a code2 op2b)
|
2076 |
|
|
=> (t AND (inner2 OR (op2a code op2b))) */
|
2077 |
|
|
else if (integer_zerop (t))
|
2078 |
|
|
return boolean_false_node;
|
2079 |
|
|
|
2080 |
|
|
/* Save partial result for later. */
|
2081 |
|
|
partial = t;
|
2082 |
|
|
}
|
2083 |
|
|
|
2084 |
|
|
/* Compute the second partial result, (inner2 OR (op2a code op2b)) */
|
2085 |
|
|
if (TREE_CODE (inner2) == SSA_NAME
|
2086 |
|
|
&& is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
|
2087 |
|
|
&& TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
|
2088 |
|
|
&& (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
|
2089 |
|
|
gimple_assign_rhs1 (s),
|
2090 |
|
|
gimple_assign_rhs2 (s),
|
2091 |
|
|
code2, op2a, op2b)))
|
2092 |
|
|
{
|
2093 |
|
|
/* Handle the OR case, where we are reassociating:
|
2094 |
|
|
(inner1 OR inner2) OR (op2a code2 op2b)
|
2095 |
|
|
=> (inner1 OR t)
|
2096 |
|
|
=> (t OR partial) */
|
2097 |
|
|
if (is_or)
|
2098 |
|
|
{
|
2099 |
|
|
if (integer_zerop (t))
|
2100 |
|
|
return inner1;
|
2101 |
|
|
else if (integer_onep (t))
|
2102 |
|
|
return boolean_true_node;
|
2103 |
|
|
/* If both are the same, we can apply the identity
|
2104 |
|
|
(x OR x) == x. */
|
2105 |
|
|
else if (partial && same_bool_result_p (t, partial))
|
2106 |
|
|
return t;
|
2107 |
|
|
}
|
2108 |
|
|
|
2109 |
|
|
/* Handle the AND case, where we are redistributing:
|
2110 |
|
|
(inner1 AND inner2) OR (op2a code2 op2b)
|
2111 |
|
|
=> (t AND (inner1 OR (op2a code2 op2b)))
|
2112 |
|
|
=> (t AND partial) */
|
2113 |
|
|
else
|
2114 |
|
|
{
|
2115 |
|
|
if (integer_zerop (t))
|
2116 |
|
|
return boolean_false_node;
|
2117 |
|
|
else if (partial)
|
2118 |
|
|
{
|
2119 |
|
|
/* We already got a simplification for the other
|
2120 |
|
|
operand to the redistributed AND expression. The
|
2121 |
|
|
interesting case is when at least one is true.
|
2122 |
|
|
Or, if both are the same, we can apply the identity
|
2123 |
|
|
(x AND x) == x. */
|
2124 |
|
|
if (integer_onep (partial))
|
2125 |
|
|
return t;
|
2126 |
|
|
else if (integer_onep (t))
|
2127 |
|
|
return partial;
|
2128 |
|
|
else if (same_bool_result_p (t, partial))
|
2129 |
|
|
return t;
|
2130 |
|
|
}
|
2131 |
|
|
}
|
2132 |
|
|
}
|
2133 |
|
|
}
|
2134 |
|
|
return NULL_TREE;
|
2135 |
|
|
}
|
2136 |
|
|
|
2137 |
|
|
/* Try to simplify the OR of two comparisons defined by
|
2138 |
|
|
(OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
|
2139 |
|
|
If this can be done without constructing an intermediate value,
|
2140 |
|
|
return the resulting tree; otherwise NULL_TREE is returned.
|
2141 |
|
|
This function is deliberately asymmetric as it recurses on SSA_DEFs
|
2142 |
|
|
in the first comparison but not the second. */
|
2143 |
|
|
|
2144 |
|
|
static tree
|
2145 |
|
|
or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
|
2146 |
|
|
enum tree_code code2, tree op2a, tree op2b)
|
2147 |
|
|
{
|
2148 |
|
|
/* First check for ((x CODE1 y) OR (x CODE2 y)). */
|
2149 |
|
|
if (operand_equal_p (op1a, op2a, 0)
|
2150 |
|
|
&& operand_equal_p (op1b, op2b, 0))
|
2151 |
|
|
{
|
2152 |
|
|
/* Result will be either NULL_TREE, or a combined comparison. */
|
2153 |
|
|
tree t = combine_comparisons (UNKNOWN_LOCATION,
|
2154 |
|
|
TRUTH_ORIF_EXPR, code1, code2,
|
2155 |
|
|
boolean_type_node, op1a, op1b);
|
2156 |
|
|
if (t)
|
2157 |
|
|
return t;
|
2158 |
|
|
}
|
2159 |
|
|
|
2160 |
|
|
/* Likewise the swapped case of the above. */
|
2161 |
|
|
if (operand_equal_p (op1a, op2b, 0)
|
2162 |
|
|
&& operand_equal_p (op1b, op2a, 0))
|
2163 |
|
|
{
|
2164 |
|
|
/* Result will be either NULL_TREE, or a combined comparison. */
|
2165 |
|
|
tree t = combine_comparisons (UNKNOWN_LOCATION,
|
2166 |
|
|
TRUTH_ORIF_EXPR, code1,
|
2167 |
|
|
swap_tree_comparison (code2),
|
2168 |
|
|
boolean_type_node, op1a, op1b);
|
2169 |
|
|
if (t)
|
2170 |
|
|
return t;
|
2171 |
|
|
}
|
2172 |
|
|
|
2173 |
|
|
/* If both comparisons are of the same value against constants, we might
|
2174 |
|
|
be able to merge them. */
|
2175 |
|
|
if (operand_equal_p (op1a, op2a, 0)
|
2176 |
|
|
&& TREE_CODE (op1b) == INTEGER_CST
|
2177 |
|
|
&& TREE_CODE (op2b) == INTEGER_CST)
|
2178 |
|
|
{
|
2179 |
|
|
int cmp = tree_int_cst_compare (op1b, op2b);
|
2180 |
|
|
|
2181 |
|
|
/* If we have (op1a != op1b), we should either be able to
|
2182 |
|
|
return that or TRUE, depending on whether the constant op1b
|
2183 |
|
|
also satisfies the other comparison against op2b. */
|
2184 |
|
|
if (code1 == NE_EXPR)
|
2185 |
|
|
{
|
2186 |
|
|
bool done = true;
|
2187 |
|
|
bool val;
|
2188 |
|
|
switch (code2)
|
2189 |
|
|
{
|
2190 |
|
|
case EQ_EXPR: val = (cmp == 0); break;
|
2191 |
|
|
case NE_EXPR: val = (cmp != 0); break;
|
2192 |
|
|
case LT_EXPR: val = (cmp < 0); break;
|
2193 |
|
|
case GT_EXPR: val = (cmp > 0); break;
|
2194 |
|
|
case LE_EXPR: val = (cmp <= 0); break;
|
2195 |
|
|
case GE_EXPR: val = (cmp >= 0); break;
|
2196 |
|
|
default: done = false;
|
2197 |
|
|
}
|
2198 |
|
|
if (done)
|
2199 |
|
|
{
|
2200 |
|
|
if (val)
|
2201 |
|
|
return boolean_true_node;
|
2202 |
|
|
else
|
2203 |
|
|
return fold_build2 (code1, boolean_type_node, op1a, op1b);
|
2204 |
|
|
}
|
2205 |
|
|
}
|
2206 |
|
|
/* Likewise if the second comparison is a != comparison. */
|
2207 |
|
|
else if (code2 == NE_EXPR)
|
2208 |
|
|
{
|
2209 |
|
|
bool done = true;
|
2210 |
|
|
bool val;
|
2211 |
|
|
switch (code1)
|
2212 |
|
|
{
|
2213 |
|
|
case EQ_EXPR: val = (cmp == 0); break;
|
2214 |
|
|
case NE_EXPR: val = (cmp != 0); break;
|
2215 |
|
|
case LT_EXPR: val = (cmp > 0); break;
|
2216 |
|
|
case GT_EXPR: val = (cmp < 0); break;
|
2217 |
|
|
case LE_EXPR: val = (cmp >= 0); break;
|
2218 |
|
|
case GE_EXPR: val = (cmp <= 0); break;
|
2219 |
|
|
default: done = false;
|
2220 |
|
|
}
|
2221 |
|
|
if (done)
|
2222 |
|
|
{
|
2223 |
|
|
if (val)
|
2224 |
|
|
return boolean_true_node;
|
2225 |
|
|
else
|
2226 |
|
|
return fold_build2 (code2, boolean_type_node, op2a, op2b);
|
2227 |
|
|
}
|
2228 |
|
|
}
|
2229 |
|
|
|
2230 |
|
|
/* See if an equality test is redundant with the other comparison. */
|
2231 |
|
|
else if (code1 == EQ_EXPR)
|
2232 |
|
|
{
|
2233 |
|
|
bool val;
|
2234 |
|
|
switch (code2)
|
2235 |
|
|
{
|
2236 |
|
|
case EQ_EXPR: val = (cmp == 0); break;
|
2237 |
|
|
case NE_EXPR: val = (cmp != 0); break;
|
2238 |
|
|
case LT_EXPR: val = (cmp < 0); break;
|
2239 |
|
|
case GT_EXPR: val = (cmp > 0); break;
|
2240 |
|
|
case LE_EXPR: val = (cmp <= 0); break;
|
2241 |
|
|
case GE_EXPR: val = (cmp >= 0); break;
|
2242 |
|
|
default:
|
2243 |
|
|
val = false;
|
2244 |
|
|
}
|
2245 |
|
|
if (val)
|
2246 |
|
|
return fold_build2 (code2, boolean_type_node, op2a, op2b);
|
2247 |
|
|
}
|
2248 |
|
|
else if (code2 == EQ_EXPR)
|
2249 |
|
|
{
|
2250 |
|
|
bool val;
|
2251 |
|
|
switch (code1)
|
2252 |
|
|
{
|
2253 |
|
|
case EQ_EXPR: val = (cmp == 0); break;
|
2254 |
|
|
case NE_EXPR: val = (cmp != 0); break;
|
2255 |
|
|
case LT_EXPR: val = (cmp > 0); break;
|
2256 |
|
|
case GT_EXPR: val = (cmp < 0); break;
|
2257 |
|
|
case LE_EXPR: val = (cmp >= 0); break;
|
2258 |
|
|
case GE_EXPR: val = (cmp <= 0); break;
|
2259 |
|
|
default:
|
2260 |
|
|
val = false;
|
2261 |
|
|
}
|
2262 |
|
|
if (val)
|
2263 |
|
|
return fold_build2 (code1, boolean_type_node, op1a, op1b);
|
2264 |
|
|
}
|
2265 |
|
|
|
2266 |
|
|
/* Chose the less restrictive of two < or <= comparisons. */
|
2267 |
|
|
else if ((code1 == LT_EXPR || code1 == LE_EXPR)
|
2268 |
|
|
&& (code2 == LT_EXPR || code2 == LE_EXPR))
|
2269 |
|
|
{
|
2270 |
|
|
if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
|
2271 |
|
|
return fold_build2 (code2, boolean_type_node, op2a, op2b);
|
2272 |
|
|
else
|
2273 |
|
|
return fold_build2 (code1, boolean_type_node, op1a, op1b);
|
2274 |
|
|
}
|
2275 |
|
|
|
2276 |
|
|
/* Likewise chose the less restrictive of two > or >= comparisons. */
|
2277 |
|
|
else if ((code1 == GT_EXPR || code1 == GE_EXPR)
|
2278 |
|
|
&& (code2 == GT_EXPR || code2 == GE_EXPR))
|
2279 |
|
|
{
|
2280 |
|
|
if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
|
2281 |
|
|
return fold_build2 (code2, boolean_type_node, op2a, op2b);
|
2282 |
|
|
else
|
2283 |
|
|
return fold_build2 (code1, boolean_type_node, op1a, op1b);
|
2284 |
|
|
}
|
2285 |
|
|
|
2286 |
|
|
/* Check for singleton ranges. */
|
2287 |
|
|
else if (cmp == 0
|
2288 |
|
|
&& ((code1 == LT_EXPR && code2 == GT_EXPR)
|
2289 |
|
|
|| (code1 == GT_EXPR && code2 == LT_EXPR)))
|
2290 |
|
|
return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
|
2291 |
|
|
|
2292 |
|
|
/* Check for less/greater pairs that don't restrict the range at all. */
|
2293 |
|
|
else if (cmp >= 0
|
2294 |
|
|
&& (code1 == LT_EXPR || code1 == LE_EXPR)
|
2295 |
|
|
&& (code2 == GT_EXPR || code2 == GE_EXPR))
|
2296 |
|
|
return boolean_true_node;
|
2297 |
|
|
else if (cmp <= 0
|
2298 |
|
|
&& (code1 == GT_EXPR || code1 == GE_EXPR)
|
2299 |
|
|
&& (code2 == LT_EXPR || code2 == LE_EXPR))
|
2300 |
|
|
return boolean_true_node;
|
2301 |
|
|
}
|
2302 |
|
|
|
2303 |
|
|
/* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
|
2304 |
|
|
NAME's definition is a truth value. See if there are any simplifications
|
2305 |
|
|
that can be done against the NAME's definition. */
|
2306 |
|
|
if (TREE_CODE (op1a) == SSA_NAME
|
2307 |
|
|
&& (code1 == NE_EXPR || code1 == EQ_EXPR)
|
2308 |
|
|
&& (integer_zerop (op1b) || integer_onep (op1b)))
|
2309 |
|
|
{
|
2310 |
|
|
bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
|
2311 |
|
|
|| (code1 == NE_EXPR && integer_onep (op1b)));
|
2312 |
|
|
gimple stmt = SSA_NAME_DEF_STMT (op1a);
|
2313 |
|
|
switch (gimple_code (stmt))
|
2314 |
|
|
{
|
2315 |
|
|
case GIMPLE_ASSIGN:
|
2316 |
|
|
/* Try to simplify by copy-propagating the definition. */
|
2317 |
|
|
return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
|
2318 |
|
|
|
2319 |
|
|
case GIMPLE_PHI:
|
2320 |
|
|
/* If every argument to the PHI produces the same result when
|
2321 |
|
|
ORed with the second comparison, we win.
|
2322 |
|
|
Do not do this unless the type is bool since we need a bool
|
2323 |
|
|
result here anyway. */
|
2324 |
|
|
if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
|
2325 |
|
|
{
|
2326 |
|
|
tree result = NULL_TREE;
|
2327 |
|
|
unsigned i;
|
2328 |
|
|
for (i = 0; i < gimple_phi_num_args (stmt); i++)
|
2329 |
|
|
{
|
2330 |
|
|
tree arg = gimple_phi_arg_def (stmt, i);
|
2331 |
|
|
|
2332 |
|
|
/* If this PHI has itself as an argument, ignore it.
|
2333 |
|
|
If all the other args produce the same result,
|
2334 |
|
|
we're still OK. */
|
2335 |
|
|
if (arg == gimple_phi_result (stmt))
|
2336 |
|
|
continue;
|
2337 |
|
|
else if (TREE_CODE (arg) == INTEGER_CST)
|
2338 |
|
|
{
|
2339 |
|
|
if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
|
2340 |
|
|
{
|
2341 |
|
|
if (!result)
|
2342 |
|
|
result = boolean_true_node;
|
2343 |
|
|
else if (!integer_onep (result))
|
2344 |
|
|
return NULL_TREE;
|
2345 |
|
|
}
|
2346 |
|
|
else if (!result)
|
2347 |
|
|
result = fold_build2 (code2, boolean_type_node,
|
2348 |
|
|
op2a, op2b);
|
2349 |
|
|
else if (!same_bool_comparison_p (result,
|
2350 |
|
|
code2, op2a, op2b))
|
2351 |
|
|
return NULL_TREE;
|
2352 |
|
|
}
|
2353 |
|
|
else if (TREE_CODE (arg) == SSA_NAME
|
2354 |
|
|
&& !SSA_NAME_IS_DEFAULT_DEF (arg))
|
2355 |
|
|
{
|
2356 |
|
|
tree temp;
|
2357 |
|
|
gimple def_stmt = SSA_NAME_DEF_STMT (arg);
|
2358 |
|
|
/* In simple cases we can look through PHI nodes,
|
2359 |
|
|
but we have to be careful with loops.
|
2360 |
|
|
See PR49073. */
|
2361 |
|
|
if (! dom_info_available_p (CDI_DOMINATORS)
|
2362 |
|
|
|| gimple_bb (def_stmt) == gimple_bb (stmt)
|
2363 |
|
|
|| dominated_by_p (CDI_DOMINATORS,
|
2364 |
|
|
gimple_bb (def_stmt),
|
2365 |
|
|
gimple_bb (stmt)))
|
2366 |
|
|
return NULL_TREE;
|
2367 |
|
|
temp = or_var_with_comparison (arg, invert, code2,
|
2368 |
|
|
op2a, op2b);
|
2369 |
|
|
if (!temp)
|
2370 |
|
|
return NULL_TREE;
|
2371 |
|
|
else if (!result)
|
2372 |
|
|
result = temp;
|
2373 |
|
|
else if (!same_bool_result_p (result, temp))
|
2374 |
|
|
return NULL_TREE;
|
2375 |
|
|
}
|
2376 |
|
|
else
|
2377 |
|
|
return NULL_TREE;
|
2378 |
|
|
}
|
2379 |
|
|
return result;
|
2380 |
|
|
}
|
2381 |
|
|
|
2382 |
|
|
default:
|
2383 |
|
|
break;
|
2384 |
|
|
}
|
2385 |
|
|
}
|
2386 |
|
|
return NULL_TREE;
|
2387 |
|
|
}
|
2388 |
|
|
|
2389 |
|
|
/* Try to simplify the OR of two comparisons, specified by
|
2390 |
|
|
(OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
|
2391 |
|
|
If this can be simplified to a single expression (without requiring
|
2392 |
|
|
introducing more SSA variables to hold intermediate values),
|
2393 |
|
|
return the resulting tree. Otherwise return NULL_TREE.
|
2394 |
|
|
If the result expression is non-null, it has boolean type. */
|
2395 |
|
|
|
2396 |
|
|
tree
|
2397 |
|
|
maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
|
2398 |
|
|
enum tree_code code2, tree op2a, tree op2b)
|
2399 |
|
|
{
|
2400 |
|
|
tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
|
2401 |
|
|
if (t)
|
2402 |
|
|
return t;
|
2403 |
|
|
else
|
2404 |
|
|
return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
|
2405 |
|
|
}
|
2406 |
|
|
|
2407 |
|
|
|
2408 |
|
|
/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
|
2409 |
|
|
|
2410 |
|
|
Either NULL_TREE, a simplified but non-constant or a constant
|
2411 |
|
|
is returned.
|
2412 |
|
|
|
2413 |
|
|
??? This should go into a gimple-fold-inline.h file to be eventually
|
2414 |
|
|
privatized with the single valueize function used in the various TUs
|
2415 |
|
|
to avoid the indirect function call overhead. */
|
2416 |
|
|
|
2417 |
|
|
tree
|
2418 |
|
|
gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
|
2419 |
|
|
{
|
2420 |
|
|
location_t loc = gimple_location (stmt);
|
2421 |
|
|
switch (gimple_code (stmt))
|
2422 |
|
|
{
|
2423 |
|
|
case GIMPLE_ASSIGN:
|
2424 |
|
|
{
|
2425 |
|
|
enum tree_code subcode = gimple_assign_rhs_code (stmt);
|
2426 |
|
|
|
2427 |
|
|
switch (get_gimple_rhs_class (subcode))
|
2428 |
|
|
{
|
2429 |
|
|
case GIMPLE_SINGLE_RHS:
|
2430 |
|
|
{
|
2431 |
|
|
tree rhs = gimple_assign_rhs1 (stmt);
|
2432 |
|
|
enum tree_code_class kind = TREE_CODE_CLASS (subcode);
|
2433 |
|
|
|
2434 |
|
|
if (TREE_CODE (rhs) == SSA_NAME)
|
2435 |
|
|
{
|
2436 |
|
|
/* If the RHS is an SSA_NAME, return its known constant value,
|
2437 |
|
|
if any. */
|
2438 |
|
|
return (*valueize) (rhs);
|
2439 |
|
|
}
|
2440 |
|
|
/* Handle propagating invariant addresses into address
|
2441 |
|
|
operations. */
|
2442 |
|
|
else if (TREE_CODE (rhs) == ADDR_EXPR
|
2443 |
|
|
&& !is_gimple_min_invariant (rhs))
|
2444 |
|
|
{
|
2445 |
|
|
HOST_WIDE_INT offset;
|
2446 |
|
|
tree base;
|
2447 |
|
|
base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
|
2448 |
|
|
&offset,
|
2449 |
|
|
valueize);
|
2450 |
|
|
if (base
|
2451 |
|
|
&& (CONSTANT_CLASS_P (base)
|
2452 |
|
|
|| decl_address_invariant_p (base)))
|
2453 |
|
|
return build_invariant_address (TREE_TYPE (rhs),
|
2454 |
|
|
base, offset);
|
2455 |
|
|
}
|
2456 |
|
|
else if (TREE_CODE (rhs) == CONSTRUCTOR
|
2457 |
|
|
&& TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
|
2458 |
|
|
&& (CONSTRUCTOR_NELTS (rhs)
|
2459 |
|
|
== TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
|
2460 |
|
|
{
|
2461 |
|
|
unsigned i;
|
2462 |
|
|
tree val, list;
|
2463 |
|
|
|
2464 |
|
|
list = NULL_TREE;
|
2465 |
|
|
FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
|
2466 |
|
|
{
|
2467 |
|
|
val = (*valueize) (val);
|
2468 |
|
|
if (TREE_CODE (val) == INTEGER_CST
|
2469 |
|
|
|| TREE_CODE (val) == REAL_CST
|
2470 |
|
|
|| TREE_CODE (val) == FIXED_CST)
|
2471 |
|
|
list = tree_cons (NULL_TREE, val, list);
|
2472 |
|
|
else
|
2473 |
|
|
return NULL_TREE;
|
2474 |
|
|
}
|
2475 |
|
|
|
2476 |
|
|
return build_vector (TREE_TYPE (rhs), nreverse (list));
|
2477 |
|
|
}
|
2478 |
|
|
|
2479 |
|
|
if (kind == tcc_reference)
|
2480 |
|
|
{
|
2481 |
|
|
if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
|
2482 |
|
|
|| TREE_CODE (rhs) == REALPART_EXPR
|
2483 |
|
|
|| TREE_CODE (rhs) == IMAGPART_EXPR)
|
2484 |
|
|
&& TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
|
2485 |
|
|
{
|
2486 |
|
|
tree val = (*valueize) (TREE_OPERAND (rhs, 0));
|
2487 |
|
|
return fold_unary_loc (EXPR_LOCATION (rhs),
|
2488 |
|
|
TREE_CODE (rhs),
|
2489 |
|
|
TREE_TYPE (rhs), val);
|
2490 |
|
|
}
|
2491 |
|
|
else if (TREE_CODE (rhs) == BIT_FIELD_REF
|
2492 |
|
|
&& TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
|
2493 |
|
|
{
|
2494 |
|
|
tree val = (*valueize) (TREE_OPERAND (rhs, 0));
|
2495 |
|
|
return fold_ternary_loc (EXPR_LOCATION (rhs),
|
2496 |
|
|
TREE_CODE (rhs),
|
2497 |
|
|
TREE_TYPE (rhs), val,
|
2498 |
|
|
TREE_OPERAND (rhs, 1),
|
2499 |
|
|
TREE_OPERAND (rhs, 2));
|
2500 |
|
|
}
|
2501 |
|
|
else if (TREE_CODE (rhs) == MEM_REF
|
2502 |
|
|
&& TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
|
2503 |
|
|
{
|
2504 |
|
|
tree val = (*valueize) (TREE_OPERAND (rhs, 0));
|
2505 |
|
|
if (TREE_CODE (val) == ADDR_EXPR
|
2506 |
|
|
&& is_gimple_min_invariant (val))
|
2507 |
|
|
{
|
2508 |
|
|
tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
|
2509 |
|
|
unshare_expr (val),
|
2510 |
|
|
TREE_OPERAND (rhs, 1));
|
2511 |
|
|
if (tem)
|
2512 |
|
|
rhs = tem;
|
2513 |
|
|
}
|
2514 |
|
|
}
|
2515 |
|
|
return fold_const_aggregate_ref_1 (rhs, valueize);
|
2516 |
|
|
}
|
2517 |
|
|
else if (kind == tcc_declaration)
|
2518 |
|
|
return get_symbol_constant_value (rhs);
|
2519 |
|
|
return rhs;
|
2520 |
|
|
}
|
2521 |
|
|
|
2522 |
|
|
case GIMPLE_UNARY_RHS:
|
2523 |
|
|
{
|
2524 |
|
|
/* Handle unary operators that can appear in GIMPLE form.
|
2525 |
|
|
Note that we know the single operand must be a constant,
|
2526 |
|
|
so this should almost always return a simplified RHS. */
|
2527 |
|
|
tree lhs = gimple_assign_lhs (stmt);
|
2528 |
|
|
tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
|
2529 |
|
|
|
2530 |
|
|
/* Conversions are useless for CCP purposes if they are
|
2531 |
|
|
value-preserving. Thus the restrictions that
|
2532 |
|
|
useless_type_conversion_p places for restrict qualification
|
2533 |
|
|
of pointer types should not apply here.
|
2534 |
|
|
Substitution later will only substitute to allowed places. */
|
2535 |
|
|
if (CONVERT_EXPR_CODE_P (subcode)
|
2536 |
|
|
&& POINTER_TYPE_P (TREE_TYPE (lhs))
|
2537 |
|
|
&& POINTER_TYPE_P (TREE_TYPE (op0))
|
2538 |
|
|
&& TYPE_ADDR_SPACE (TREE_TYPE (lhs))
|
2539 |
|
|
== TYPE_ADDR_SPACE (TREE_TYPE (op0))
|
2540 |
|
|
&& TYPE_MODE (TREE_TYPE (lhs))
|
2541 |
|
|
== TYPE_MODE (TREE_TYPE (op0)))
|
2542 |
|
|
return op0;
|
2543 |
|
|
|
2544 |
|
|
return
|
2545 |
|
|
fold_unary_ignore_overflow_loc (loc, subcode,
|
2546 |
|
|
gimple_expr_type (stmt), op0);
|
2547 |
|
|
}
|
2548 |
|
|
|
2549 |
|
|
case GIMPLE_BINARY_RHS:
|
2550 |
|
|
{
|
2551 |
|
|
/* Handle binary operators that can appear in GIMPLE form. */
|
2552 |
|
|
tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
|
2553 |
|
|
tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
|
2554 |
|
|
|
2555 |
|
|
/* Translate &x + CST into an invariant form suitable for
|
2556 |
|
|
further propagation. */
|
2557 |
|
|
if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
|
2558 |
|
|
&& TREE_CODE (op0) == ADDR_EXPR
|
2559 |
|
|
&& TREE_CODE (op1) == INTEGER_CST)
|
2560 |
|
|
{
|
2561 |
|
|
tree off = fold_convert (ptr_type_node, op1);
|
2562 |
|
|
return build_fold_addr_expr_loc
|
2563 |
|
|
(loc,
|
2564 |
|
|
fold_build2 (MEM_REF,
|
2565 |
|
|
TREE_TYPE (TREE_TYPE (op0)),
|
2566 |
|
|
unshare_expr (op0), off));
|
2567 |
|
|
}
|
2568 |
|
|
|
2569 |
|
|
return fold_binary_loc (loc, subcode,
|
2570 |
|
|
gimple_expr_type (stmt), op0, op1);
|
2571 |
|
|
}
|
2572 |
|
|
|
2573 |
|
|
case GIMPLE_TERNARY_RHS:
|
2574 |
|
|
{
|
2575 |
|
|
/* Handle ternary operators that can appear in GIMPLE form. */
|
2576 |
|
|
tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
|
2577 |
|
|
tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
|
2578 |
|
|
tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
|
2579 |
|
|
|
2580 |
|
|
/* Fold embedded expressions in ternary codes. */
|
2581 |
|
|
if ((subcode == COND_EXPR
|
2582 |
|
|
|| subcode == VEC_COND_EXPR)
|
2583 |
|
|
&& COMPARISON_CLASS_P (op0))
|
2584 |
|
|
{
|
2585 |
|
|
tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
|
2586 |
|
|
tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
|
2587 |
|
|
tree tem = fold_binary_loc (loc, TREE_CODE (op0),
|
2588 |
|
|
TREE_TYPE (op0), op00, op01);
|
2589 |
|
|
if (tem)
|
2590 |
|
|
op0 = tem;
|
2591 |
|
|
}
|
2592 |
|
|
|
2593 |
|
|
return fold_ternary_loc (loc, subcode,
|
2594 |
|
|
gimple_expr_type (stmt), op0, op1, op2);
|
2595 |
|
|
}
|
2596 |
|
|
|
2597 |
|
|
default:
|
2598 |
|
|
gcc_unreachable ();
|
2599 |
|
|
}
|
2600 |
|
|
}
|
2601 |
|
|
|
2602 |
|
|
case GIMPLE_CALL:
|
2603 |
|
|
{
|
2604 |
|
|
tree fn;
|
2605 |
|
|
|
2606 |
|
|
if (gimple_call_internal_p (stmt))
|
2607 |
|
|
/* No folding yet for these functions. */
|
2608 |
|
|
return NULL_TREE;
|
2609 |
|
|
|
2610 |
|
|
fn = (*valueize) (gimple_call_fn (stmt));
|
2611 |
|
|
if (TREE_CODE (fn) == ADDR_EXPR
|
2612 |
|
|
&& TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
|
2613 |
|
|
&& DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
|
2614 |
|
|
{
|
2615 |
|
|
tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
|
2616 |
|
|
tree call, retval;
|
2617 |
|
|
unsigned i;
|
2618 |
|
|
for (i = 0; i < gimple_call_num_args (stmt); ++i)
|
2619 |
|
|
args[i] = (*valueize) (gimple_call_arg (stmt, i));
|
2620 |
|
|
call = build_call_array_loc (loc,
|
2621 |
|
|
gimple_call_return_type (stmt),
|
2622 |
|
|
fn, gimple_call_num_args (stmt), args);
|
2623 |
|
|
retval = fold_call_expr (EXPR_LOCATION (call), call, false);
|
2624 |
|
|
if (retval)
|
2625 |
|
|
/* fold_call_expr wraps the result inside a NOP_EXPR. */
|
2626 |
|
|
STRIP_NOPS (retval);
|
2627 |
|
|
return retval;
|
2628 |
|
|
}
|
2629 |
|
|
return NULL_TREE;
|
2630 |
|
|
}
|
2631 |
|
|
|
2632 |
|
|
default:
|
2633 |
|
|
return NULL_TREE;
|
2634 |
|
|
}
|
2635 |
|
|
}
|
2636 |
|
|
|
2637 |
|
|
/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
|
2638 |
|
|
Returns NULL_TREE if folding to a constant is not possible, otherwise
|
2639 |
|
|
returns a constant according to is_gimple_min_invariant. */
|
2640 |
|
|
|
2641 |
|
|
tree
|
2642 |
|
|
gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
|
2643 |
|
|
{
|
2644 |
|
|
tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
|
2645 |
|
|
if (res && is_gimple_min_invariant (res))
|
2646 |
|
|
return res;
|
2647 |
|
|
return NULL_TREE;
|
2648 |
|
|
}
|
2649 |
|
|
|
2650 |
|
|
|
2651 |
|
|
/* The following set of functions are supposed to fold references using
|
2652 |
|
|
their constant initializers. */
|
2653 |
|
|
|
2654 |
|
|
static tree fold_ctor_reference (tree type, tree ctor,
|
2655 |
|
|
unsigned HOST_WIDE_INT offset,
|
2656 |
|
|
unsigned HOST_WIDE_INT size);
|
2657 |
|
|
|
2658 |
|
|
/* See if we can find constructor defining value of BASE.
|
2659 |
|
|
When we know the consructor with constant offset (such as
|
2660 |
|
|
base is array[40] and we do know constructor of array), then
|
2661 |
|
|
BIT_OFFSET is adjusted accordingly.
|
2662 |
|
|
|
2663 |
|
|
As a special case, return error_mark_node when constructor
|
2664 |
|
|
is not explicitly available, but it is known to be zero
|
2665 |
|
|
such as 'static const int a;'. */
|
2666 |
|
|
static tree
|
2667 |
|
|
get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
|
2668 |
|
|
tree (*valueize)(tree))
|
2669 |
|
|
{
|
2670 |
|
|
HOST_WIDE_INT bit_offset2, size, max_size;
|
2671 |
|
|
if (TREE_CODE (base) == MEM_REF)
|
2672 |
|
|
{
|
2673 |
|
|
if (!integer_zerop (TREE_OPERAND (base, 1)))
|
2674 |
|
|
{
|
2675 |
|
|
if (!host_integerp (TREE_OPERAND (base, 1), 0))
|
2676 |
|
|
return NULL_TREE;
|
2677 |
|
|
*bit_offset += (mem_ref_offset (base).low
|
2678 |
|
|
* BITS_PER_UNIT);
|
2679 |
|
|
}
|
2680 |
|
|
|
2681 |
|
|
if (valueize
|
2682 |
|
|
&& TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
|
2683 |
|
|
base = valueize (TREE_OPERAND (base, 0));
|
2684 |
|
|
if (!base || TREE_CODE (base) != ADDR_EXPR)
|
2685 |
|
|
return NULL_TREE;
|
2686 |
|
|
base = TREE_OPERAND (base, 0);
|
2687 |
|
|
}
|
2688 |
|
|
|
2689 |
|
|
/* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
|
2690 |
|
|
DECL_INITIAL. If BASE is a nested reference into another
|
2691 |
|
|
ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
|
2692 |
|
|
the inner reference. */
|
2693 |
|
|
switch (TREE_CODE (base))
|
2694 |
|
|
{
|
2695 |
|
|
case VAR_DECL:
|
2696 |
|
|
if (!const_value_known_p (base))
|
2697 |
|
|
return NULL_TREE;
|
2698 |
|
|
|
2699 |
|
|
/* Fallthru. */
|
2700 |
|
|
case CONST_DECL:
|
2701 |
|
|
if (!DECL_INITIAL (base)
|
2702 |
|
|
&& (TREE_STATIC (base) || DECL_EXTERNAL (base)))
|
2703 |
|
|
return error_mark_node;
|
2704 |
|
|
return DECL_INITIAL (base);
|
2705 |
|
|
|
2706 |
|
|
case ARRAY_REF:
|
2707 |
|
|
case COMPONENT_REF:
|
2708 |
|
|
base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
|
2709 |
|
|
if (max_size == -1 || size != max_size)
|
2710 |
|
|
return NULL_TREE;
|
2711 |
|
|
*bit_offset += bit_offset2;
|
2712 |
|
|
return get_base_constructor (base, bit_offset, valueize);
|
2713 |
|
|
|
2714 |
|
|
case STRING_CST:
|
2715 |
|
|
case CONSTRUCTOR:
|
2716 |
|
|
return base;
|
2717 |
|
|
|
2718 |
|
|
default:
|
2719 |
|
|
return NULL_TREE;
|
2720 |
|
|
}
|
2721 |
|
|
}
|
2722 |
|
|
|
2723 |
|
|
/* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
|
2724 |
|
|
to the memory at bit OFFSET.
|
2725 |
|
|
|
2726 |
|
|
We do only simple job of folding byte accesses. */
|
2727 |
|
|
|
2728 |
|
|
static tree
|
2729 |
|
|
fold_string_cst_ctor_reference (tree type, tree ctor,
|
2730 |
|
|
unsigned HOST_WIDE_INT offset,
|
2731 |
|
|
unsigned HOST_WIDE_INT size)
|
2732 |
|
|
{
|
2733 |
|
|
if (INTEGRAL_TYPE_P (type)
|
2734 |
|
|
&& (TYPE_MODE (type)
|
2735 |
|
|
== TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
|
2736 |
|
|
&& (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
|
2737 |
|
|
== MODE_INT)
|
2738 |
|
|
&& GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
|
2739 |
|
|
&& size == BITS_PER_UNIT
|
2740 |
|
|
&& !(offset % BITS_PER_UNIT))
|
2741 |
|
|
{
|
2742 |
|
|
offset /= BITS_PER_UNIT;
|
2743 |
|
|
if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
|
2744 |
|
|
return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
|
2745 |
|
|
[offset]));
|
2746 |
|
|
/* Folding
|
2747 |
|
|
const char a[20]="hello";
|
2748 |
|
|
return a[10];
|
2749 |
|
|
|
2750 |
|
|
might lead to offset greater than string length. In this case we
|
2751 |
|
|
know value is either initialized to 0 or out of bounds. Return 0
|
2752 |
|
|
in both cases. */
|
2753 |
|
|
return build_zero_cst (type);
|
2754 |
|
|
}
|
2755 |
|
|
return NULL_TREE;
|
2756 |
|
|
}
|
2757 |
|
|
|
2758 |
|
|
/* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
|
2759 |
|
|
SIZE to the memory at bit OFFSET. */
|
2760 |
|
|
|
2761 |
|
|
static tree
|
2762 |
|
|
fold_array_ctor_reference (tree type, tree ctor,
|
2763 |
|
|
unsigned HOST_WIDE_INT offset,
|
2764 |
|
|
unsigned HOST_WIDE_INT size)
|
2765 |
|
|
{
|
2766 |
|
|
unsigned HOST_WIDE_INT cnt;
|
2767 |
|
|
tree cfield, cval;
|
2768 |
|
|
double_int low_bound, elt_size;
|
2769 |
|
|
double_int index, max_index;
|
2770 |
|
|
double_int access_index;
|
2771 |
|
|
tree domain_type = NULL_TREE;
|
2772 |
|
|
HOST_WIDE_INT inner_offset;
|
2773 |
|
|
|
2774 |
|
|
/* Compute low bound and elt size. */
|
2775 |
|
|
if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
|
2776 |
|
|
domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
|
2777 |
|
|
if (domain_type && TYPE_MIN_VALUE (domain_type))
|
2778 |
|
|
{
|
2779 |
|
|
/* Static constructors for variably sized objects makes no sense. */
|
2780 |
|
|
gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
|
2781 |
|
|
low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
|
2782 |
|
|
}
|
2783 |
|
|
else
|
2784 |
|
|
low_bound = double_int_zero;
|
2785 |
|
|
/* Static constructors for variably sized objects makes no sense. */
|
2786 |
|
|
gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
|
2787 |
|
|
== INTEGER_CST);
|
2788 |
|
|
elt_size =
|
2789 |
|
|
tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
|
2790 |
|
|
|
2791 |
|
|
|
2792 |
|
|
/* We can handle only constantly sized accesses that are known to not
|
2793 |
|
|
be larger than size of array element. */
|
2794 |
|
|
if (!TYPE_SIZE_UNIT (type)
|
2795 |
|
|
|| TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
|
2796 |
|
|
|| double_int_cmp (elt_size,
|
2797 |
|
|
tree_to_double_int (TYPE_SIZE_UNIT (type)), 0) < 0)
|
2798 |
|
|
return NULL_TREE;
|
2799 |
|
|
|
2800 |
|
|
/* Compute the array index we look for. */
|
2801 |
|
|
access_index = double_int_udiv (uhwi_to_double_int (offset / BITS_PER_UNIT),
|
2802 |
|
|
elt_size, TRUNC_DIV_EXPR);
|
2803 |
|
|
access_index = double_int_add (access_index, low_bound);
|
2804 |
|
|
|
2805 |
|
|
/* And offset within the access. */
|
2806 |
|
|
inner_offset = offset % (double_int_to_uhwi (elt_size) * BITS_PER_UNIT);
|
2807 |
|
|
|
2808 |
|
|
/* See if the array field is large enough to span whole access. We do not
|
2809 |
|
|
care to fold accesses spanning multiple array indexes. */
|
2810 |
|
|
if (inner_offset + size > double_int_to_uhwi (elt_size) * BITS_PER_UNIT)
|
2811 |
|
|
return NULL_TREE;
|
2812 |
|
|
|
2813 |
|
|
index = double_int_sub (low_bound, double_int_one);
|
2814 |
|
|
FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
|
2815 |
|
|
{
|
2816 |
|
|
/* Array constructor might explicitely set index, or specify range
|
2817 |
|
|
or leave index NULL meaning that it is next index after previous
|
2818 |
|
|
one. */
|
2819 |
|
|
if (cfield)
|
2820 |
|
|
{
|
2821 |
|
|
if (TREE_CODE (cfield) == INTEGER_CST)
|
2822 |
|
|
max_index = index = tree_to_double_int (cfield);
|
2823 |
|
|
else
|
2824 |
|
|
{
|
2825 |
|
|
gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
|
2826 |
|
|
index = tree_to_double_int (TREE_OPERAND (cfield, 0));
|
2827 |
|
|
max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
|
2828 |
|
|
}
|
2829 |
|
|
}
|
2830 |
|
|
else
|
2831 |
|
|
max_index = index = double_int_add (index, double_int_one);
|
2832 |
|
|
|
2833 |
|
|
/* Do we have match? */
|
2834 |
|
|
if (double_int_cmp (access_index, index, 1) >= 0
|
2835 |
|
|
&& double_int_cmp (access_index, max_index, 1) <= 0)
|
2836 |
|
|
return fold_ctor_reference (type, cval, inner_offset, size);
|
2837 |
|
|
}
|
2838 |
|
|
/* When memory is not explicitely mentioned in constructor,
|
2839 |
|
|
it is 0 (or out of range). */
|
2840 |
|
|
return build_zero_cst (type);
|
2841 |
|
|
}
|
2842 |
|
|
|
2843 |
|
|
/* CTOR is CONSTRUCTOR of an aggregate or vector.
|
2844 |
|
|
Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
|
2845 |
|
|
|
2846 |
|
|
static tree
|
2847 |
|
|
fold_nonarray_ctor_reference (tree type, tree ctor,
|
2848 |
|
|
unsigned HOST_WIDE_INT offset,
|
2849 |
|
|
unsigned HOST_WIDE_INT size)
|
2850 |
|
|
{
|
2851 |
|
|
unsigned HOST_WIDE_INT cnt;
|
2852 |
|
|
tree cfield, cval;
|
2853 |
|
|
|
2854 |
|
|
FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
|
2855 |
|
|
cval)
|
2856 |
|
|
{
|
2857 |
|
|
tree byte_offset = DECL_FIELD_OFFSET (cfield);
|
2858 |
|
|
tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
|
2859 |
|
|
tree field_size = DECL_SIZE (cfield);
|
2860 |
|
|
double_int bitoffset;
|
2861 |
|
|
double_int byte_offset_cst = tree_to_double_int (byte_offset);
|
2862 |
|
|
double_int bits_per_unit_cst = uhwi_to_double_int (BITS_PER_UNIT);
|
2863 |
|
|
double_int bitoffset_end, access_end;
|
2864 |
|
|
|
2865 |
|
|
/* Variable sized objects in static constructors makes no sense,
|
2866 |
|
|
but field_size can be NULL for flexible array members. */
|
2867 |
|
|
gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
|
2868 |
|
|
&& TREE_CODE (byte_offset) == INTEGER_CST
|
2869 |
|
|
&& (field_size != NULL_TREE
|
2870 |
|
|
? TREE_CODE (field_size) == INTEGER_CST
|
2871 |
|
|
: TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
|
2872 |
|
|
|
2873 |
|
|
/* Compute bit offset of the field. */
|
2874 |
|
|
bitoffset = double_int_add (tree_to_double_int (field_offset),
|
2875 |
|
|
double_int_mul (byte_offset_cst,
|
2876 |
|
|
bits_per_unit_cst));
|
2877 |
|
|
/* Compute bit offset where the field ends. */
|
2878 |
|
|
if (field_size != NULL_TREE)
|
2879 |
|
|
bitoffset_end = double_int_add (bitoffset,
|
2880 |
|
|
tree_to_double_int (field_size));
|
2881 |
|
|
else
|
2882 |
|
|
bitoffset_end = double_int_zero;
|
2883 |
|
|
|
2884 |
|
|
access_end = double_int_add (uhwi_to_double_int (offset),
|
2885 |
|
|
uhwi_to_double_int (size));
|
2886 |
|
|
|
2887 |
|
|
/* Is there any overlap between [OFFSET, OFFSET+SIZE) and
|
2888 |
|
|
[BITOFFSET, BITOFFSET_END)? */
|
2889 |
|
|
if (double_int_cmp (access_end, bitoffset, 0) > 0
|
2890 |
|
|
&& (field_size == NULL_TREE
|
2891 |
|
|
|| double_int_cmp (uhwi_to_double_int (offset),
|
2892 |
|
|
bitoffset_end, 0) < 0))
|
2893 |
|
|
{
|
2894 |
|
|
double_int inner_offset = double_int_sub (uhwi_to_double_int (offset),
|
2895 |
|
|
bitoffset);
|
2896 |
|
|
/* We do have overlap. Now see if field is large enough to
|
2897 |
|
|
cover the access. Give up for accesses spanning multiple
|
2898 |
|
|
fields. */
|
2899 |
|
|
if (double_int_cmp (access_end, bitoffset_end, 0) > 0)
|
2900 |
|
|
return NULL_TREE;
|
2901 |
|
|
if (double_int_cmp (uhwi_to_double_int (offset), bitoffset, 0) < 0)
|
2902 |
|
|
return NULL_TREE;
|
2903 |
|
|
return fold_ctor_reference (type, cval,
|
2904 |
|
|
double_int_to_uhwi (inner_offset), size);
|
2905 |
|
|
}
|
2906 |
|
|
}
|
2907 |
|
|
/* When memory is not explicitely mentioned in constructor, it is 0. */
|
2908 |
|
|
return build_zero_cst (type);
|
2909 |
|
|
}
|
2910 |
|
|
|
2911 |
|
|
/* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
|
2912 |
|
|
to the memory at bit OFFSET. */
|
2913 |
|
|
|
2914 |
|
|
static tree
|
2915 |
|
|
fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
|
2916 |
|
|
unsigned HOST_WIDE_INT size)
|
2917 |
|
|
{
|
2918 |
|
|
tree ret;
|
2919 |
|
|
|
2920 |
|
|
/* We found the field with exact match. */
|
2921 |
|
|
if (useless_type_conversion_p (type, TREE_TYPE (ctor))
|
2922 |
|
|
&& !offset)
|
2923 |
|
|
return canonicalize_constructor_val (ctor);
|
2924 |
|
|
|
2925 |
|
|
/* We are at the end of walk, see if we can view convert the
|
2926 |
|
|
result. */
|
2927 |
|
|
if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
|
2928 |
|
|
/* VIEW_CONVERT_EXPR is defined only for matching sizes. */
|
2929 |
|
|
&& operand_equal_p (TYPE_SIZE (type),
|
2930 |
|
|
TYPE_SIZE (TREE_TYPE (ctor)), 0))
|
2931 |
|
|
{
|
2932 |
|
|
ret = canonicalize_constructor_val (ctor);
|
2933 |
|
|
ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
|
2934 |
|
|
if (ret)
|
2935 |
|
|
STRIP_NOPS (ret);
|
2936 |
|
|
return ret;
|
2937 |
|
|
}
|
2938 |
|
|
if (TREE_CODE (ctor) == STRING_CST)
|
2939 |
|
|
return fold_string_cst_ctor_reference (type, ctor, offset, size);
|
2940 |
|
|
if (TREE_CODE (ctor) == CONSTRUCTOR)
|
2941 |
|
|
{
|
2942 |
|
|
|
2943 |
|
|
if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
|
2944 |
|
|
|| TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
|
2945 |
|
|
return fold_array_ctor_reference (type, ctor, offset, size);
|
2946 |
|
|
else
|
2947 |
|
|
return fold_nonarray_ctor_reference (type, ctor, offset, size);
|
2948 |
|
|
}
|
2949 |
|
|
|
2950 |
|
|
return NULL_TREE;
|
2951 |
|
|
}
|
2952 |
|
|
|
2953 |
|
|
/* Return the tree representing the element referenced by T if T is an
|
2954 |
|
|
ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
|
2955 |
|
|
names using VALUEIZE. Return NULL_TREE otherwise. */
|
2956 |
|
|
|
2957 |
|
|
tree
|
2958 |
|
|
fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
|
2959 |
|
|
{
|
2960 |
|
|
tree ctor, idx, base;
|
2961 |
|
|
HOST_WIDE_INT offset, size, max_size;
|
2962 |
|
|
tree tem;
|
2963 |
|
|
|
2964 |
|
|
if (TREE_THIS_VOLATILE (t))
|
2965 |
|
|
return NULL_TREE;
|
2966 |
|
|
|
2967 |
|
|
if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
|
2968 |
|
|
return get_symbol_constant_value (t);
|
2969 |
|
|
|
2970 |
|
|
tem = fold_read_from_constant_string (t);
|
2971 |
|
|
if (tem)
|
2972 |
|
|
return tem;
|
2973 |
|
|
|
2974 |
|
|
switch (TREE_CODE (t))
|
2975 |
|
|
{
|
2976 |
|
|
case ARRAY_REF:
|
2977 |
|
|
case ARRAY_RANGE_REF:
|
2978 |
|
|
/* Constant indexes are handled well by get_base_constructor.
|
2979 |
|
|
Only special case variable offsets.
|
2980 |
|
|
FIXME: This code can't handle nested references with variable indexes
|
2981 |
|
|
(they will be handled only by iteration of ccp). Perhaps we can bring
|
2982 |
|
|
get_ref_base_and_extent here and make it use a valueize callback. */
|
2983 |
|
|
if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
|
2984 |
|
|
&& valueize
|
2985 |
|
|
&& (idx = (*valueize) (TREE_OPERAND (t, 1)))
|
2986 |
|
|
&& host_integerp (idx, 0))
|
2987 |
|
|
{
|
2988 |
|
|
tree low_bound, unit_size;
|
2989 |
|
|
|
2990 |
|
|
/* If the resulting bit-offset is constant, track it. */
|
2991 |
|
|
if ((low_bound = array_ref_low_bound (t),
|
2992 |
|
|
host_integerp (low_bound, 0))
|
2993 |
|
|
&& (unit_size = array_ref_element_size (t),
|
2994 |
|
|
host_integerp (unit_size, 1)))
|
2995 |
|
|
{
|
2996 |
|
|
offset = TREE_INT_CST_LOW (idx);
|
2997 |
|
|
offset -= TREE_INT_CST_LOW (low_bound);
|
2998 |
|
|
offset *= TREE_INT_CST_LOW (unit_size);
|
2999 |
|
|
offset *= BITS_PER_UNIT;
|
3000 |
|
|
|
3001 |
|
|
base = TREE_OPERAND (t, 0);
|
3002 |
|
|
ctor = get_base_constructor (base, &offset, valueize);
|
3003 |
|
|
/* Empty constructor. Always fold to 0. */
|
3004 |
|
|
if (ctor == error_mark_node)
|
3005 |
|
|
return build_zero_cst (TREE_TYPE (t));
|
3006 |
|
|
/* Out of bound array access. Value is undefined,
|
3007 |
|
|
but don't fold. */
|
3008 |
|
|
if (offset < 0)
|
3009 |
|
|
return NULL_TREE;
|
3010 |
|
|
/* We can not determine ctor. */
|
3011 |
|
|
if (!ctor)
|
3012 |
|
|
return NULL_TREE;
|
3013 |
|
|
return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
|
3014 |
|
|
TREE_INT_CST_LOW (unit_size)
|
3015 |
|
|
* BITS_PER_UNIT);
|
3016 |
|
|
}
|
3017 |
|
|
}
|
3018 |
|
|
/* Fallthru. */
|
3019 |
|
|
|
3020 |
|
|
case COMPONENT_REF:
|
3021 |
|
|
case BIT_FIELD_REF:
|
3022 |
|
|
case TARGET_MEM_REF:
|
3023 |
|
|
case MEM_REF:
|
3024 |
|
|
base = get_ref_base_and_extent (t, &offset, &size, &max_size);
|
3025 |
|
|
ctor = get_base_constructor (base, &offset, valueize);
|
3026 |
|
|
|
3027 |
|
|
/* Empty constructor. Always fold to 0. */
|
3028 |
|
|
if (ctor == error_mark_node)
|
3029 |
|
|
return build_zero_cst (TREE_TYPE (t));
|
3030 |
|
|
/* We do not know precise address. */
|
3031 |
|
|
if (max_size == -1 || max_size != size)
|
3032 |
|
|
return NULL_TREE;
|
3033 |
|
|
/* We can not determine ctor. */
|
3034 |
|
|
if (!ctor)
|
3035 |
|
|
return NULL_TREE;
|
3036 |
|
|
|
3037 |
|
|
/* Out of bound array access. Value is undefined, but don't fold. */
|
3038 |
|
|
if (offset < 0)
|
3039 |
|
|
return NULL_TREE;
|
3040 |
|
|
|
3041 |
|
|
return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size);
|
3042 |
|
|
|
3043 |
|
|
case REALPART_EXPR:
|
3044 |
|
|
case IMAGPART_EXPR:
|
3045 |
|
|
{
|
3046 |
|
|
tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
|
3047 |
|
|
if (c && TREE_CODE (c) == COMPLEX_CST)
|
3048 |
|
|
return fold_build1_loc (EXPR_LOCATION (t),
|
3049 |
|
|
TREE_CODE (t), TREE_TYPE (t), c);
|
3050 |
|
|
break;
|
3051 |
|
|
}
|
3052 |
|
|
|
3053 |
|
|
default:
|
3054 |
|
|
break;
|
3055 |
|
|
}
|
3056 |
|
|
|
3057 |
|
|
return NULL_TREE;
|
3058 |
|
|
}
|
3059 |
|
|
|
3060 |
|
|
tree
|
3061 |
|
|
fold_const_aggregate_ref (tree t)
|
3062 |
|
|
{
|
3063 |
|
|
return fold_const_aggregate_ref_1 (t, NULL);
|
3064 |
|
|
}
|
3065 |
|
|
|
3066 |
|
|
/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
|
3067 |
|
|
is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
|
3068 |
|
|
KNOWN_BINFO carries the binfo describing the true type of
|
3069 |
|
|
OBJ_TYPE_REF_OBJECT(REF). */
|
3070 |
|
|
|
3071 |
|
|
tree
|
3072 |
|
|
gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo)
|
3073 |
|
|
{
|
3074 |
|
|
unsigned HOST_WIDE_INT offset, size;
|
3075 |
|
|
tree v, fn;
|
3076 |
|
|
|
3077 |
|
|
v = BINFO_VTABLE (known_binfo);
|
3078 |
|
|
/* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
|
3079 |
|
|
if (!v)
|
3080 |
|
|
return NULL_TREE;
|
3081 |
|
|
|
3082 |
|
|
if (TREE_CODE (v) == POINTER_PLUS_EXPR)
|
3083 |
|
|
{
|
3084 |
|
|
offset = tree_low_cst (TREE_OPERAND (v, 1), 1) * BITS_PER_UNIT;
|
3085 |
|
|
v = TREE_OPERAND (v, 0);
|
3086 |
|
|
}
|
3087 |
|
|
else
|
3088 |
|
|
offset = 0;
|
3089 |
|
|
|
3090 |
|
|
if (TREE_CODE (v) != ADDR_EXPR)
|
3091 |
|
|
return NULL_TREE;
|
3092 |
|
|
v = TREE_OPERAND (v, 0);
|
3093 |
|
|
|
3094 |
|
|
if (TREE_CODE (v) != VAR_DECL
|
3095 |
|
|
|| !DECL_VIRTUAL_P (v)
|
3096 |
|
|
|| !DECL_INITIAL (v)
|
3097 |
|
|
|| DECL_INITIAL (v) == error_mark_node)
|
3098 |
|
|
return NULL_TREE;
|
3099 |
|
|
gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
|
3100 |
|
|
size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))), 1);
|
3101 |
|
|
offset += token * size;
|
3102 |
|
|
fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), DECL_INITIAL (v),
|
3103 |
|
|
offset, size);
|
3104 |
|
|
if (!fn)
|
3105 |
|
|
return NULL_TREE;
|
3106 |
|
|
gcc_assert (TREE_CODE (fn) == ADDR_EXPR
|
3107 |
|
|
|| TREE_CODE (fn) == FDESC_EXPR);
|
3108 |
|
|
fn = TREE_OPERAND (fn, 0);
|
3109 |
|
|
gcc_assert (TREE_CODE (fn) == FUNCTION_DECL);
|
3110 |
|
|
|
3111 |
|
|
/* When cgraph node is missing and function is not public, we cannot
|
3112 |
|
|
devirtualize. This can happen in WHOPR when the actual method
|
3113 |
|
|
ends up in other partition, because we found devirtualization
|
3114 |
|
|
possibility too late. */
|
3115 |
|
|
if (!can_refer_decl_in_current_unit_p (fn))
|
3116 |
|
|
return NULL_TREE;
|
3117 |
|
|
|
3118 |
|
|
return fn;
|
3119 |
|
|
}
|
3120 |
|
|
|
3121 |
|
|
/* Return true iff VAL is a gimple expression that is known to be
|
3122 |
|
|
non-negative. Restricted to floating-point inputs. */
|
3123 |
|
|
|
3124 |
|
|
bool
|
3125 |
|
|
gimple_val_nonnegative_real_p (tree val)
|
3126 |
|
|
{
|
3127 |
|
|
gimple def_stmt;
|
3128 |
|
|
|
3129 |
|
|
gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
|
3130 |
|
|
|
3131 |
|
|
/* Use existing logic for non-gimple trees. */
|
3132 |
|
|
if (tree_expr_nonnegative_p (val))
|
3133 |
|
|
return true;
|
3134 |
|
|
|
3135 |
|
|
if (TREE_CODE (val) != SSA_NAME)
|
3136 |
|
|
return false;
|
3137 |
|
|
|
3138 |
|
|
/* Currently we look only at the immediately defining statement
|
3139 |
|
|
to make this determination, since recursion on defining
|
3140 |
|
|
statements of operands can lead to quadratic behavior in the
|
3141 |
|
|
worst case. This is expected to catch almost all occurrences
|
3142 |
|
|
in practice. It would be possible to implement limited-depth
|
3143 |
|
|
recursion if important cases are lost. Alternatively, passes
|
3144 |
|
|
that need this information (such as the pow/powi lowering code
|
3145 |
|
|
in the cse_sincos pass) could be revised to provide it through
|
3146 |
|
|
dataflow propagation. */
|
3147 |
|
|
|
3148 |
|
|
def_stmt = SSA_NAME_DEF_STMT (val);
|
3149 |
|
|
|
3150 |
|
|
if (is_gimple_assign (def_stmt))
|
3151 |
|
|
{
|
3152 |
|
|
tree op0, op1;
|
3153 |
|
|
|
3154 |
|
|
/* See fold-const.c:tree_expr_nonnegative_p for additional
|
3155 |
|
|
cases that could be handled with recursion. */
|
3156 |
|
|
|
3157 |
|
|
switch (gimple_assign_rhs_code (def_stmt))
|
3158 |
|
|
{
|
3159 |
|
|
case ABS_EXPR:
|
3160 |
|
|
/* Always true for floating-point operands. */
|
3161 |
|
|
return true;
|
3162 |
|
|
|
3163 |
|
|
case MULT_EXPR:
|
3164 |
|
|
/* True if the two operands are identical (since we are
|
3165 |
|
|
restricted to floating-point inputs). */
|
3166 |
|
|
op0 = gimple_assign_rhs1 (def_stmt);
|
3167 |
|
|
op1 = gimple_assign_rhs2 (def_stmt);
|
3168 |
|
|
|
3169 |
|
|
if (op0 == op1
|
3170 |
|
|
|| operand_equal_p (op0, op1, 0))
|
3171 |
|
|
return true;
|
3172 |
|
|
|
3173 |
|
|
default:
|
3174 |
|
|
return false;
|
3175 |
|
|
}
|
3176 |
|
|
}
|
3177 |
|
|
else if (is_gimple_call (def_stmt))
|
3178 |
|
|
{
|
3179 |
|
|
tree fndecl = gimple_call_fndecl (def_stmt);
|
3180 |
|
|
if (fndecl
|
3181 |
|
|
&& DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
|
3182 |
|
|
{
|
3183 |
|
|
tree arg1;
|
3184 |
|
|
|
3185 |
|
|
switch (DECL_FUNCTION_CODE (fndecl))
|
3186 |
|
|
{
|
3187 |
|
|
CASE_FLT_FN (BUILT_IN_ACOS):
|
3188 |
|
|
CASE_FLT_FN (BUILT_IN_ACOSH):
|
3189 |
|
|
CASE_FLT_FN (BUILT_IN_CABS):
|
3190 |
|
|
CASE_FLT_FN (BUILT_IN_COSH):
|
3191 |
|
|
CASE_FLT_FN (BUILT_IN_ERFC):
|
3192 |
|
|
CASE_FLT_FN (BUILT_IN_EXP):
|
3193 |
|
|
CASE_FLT_FN (BUILT_IN_EXP10):
|
3194 |
|
|
CASE_FLT_FN (BUILT_IN_EXP2):
|
3195 |
|
|
CASE_FLT_FN (BUILT_IN_FABS):
|
3196 |
|
|
CASE_FLT_FN (BUILT_IN_FDIM):
|
3197 |
|
|
CASE_FLT_FN (BUILT_IN_HYPOT):
|
3198 |
|
|
CASE_FLT_FN (BUILT_IN_POW10):
|
3199 |
|
|
return true;
|
3200 |
|
|
|
3201 |
|
|
CASE_FLT_FN (BUILT_IN_SQRT):
|
3202 |
|
|
/* sqrt(-0.0) is -0.0, and sqrt is not defined over other
|
3203 |
|
|
nonnegative inputs. */
|
3204 |
|
|
if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
|
3205 |
|
|
return true;
|
3206 |
|
|
|
3207 |
|
|
break;
|
3208 |
|
|
|
3209 |
|
|
CASE_FLT_FN (BUILT_IN_POWI):
|
3210 |
|
|
/* True if the second argument is an even integer. */
|
3211 |
|
|
arg1 = gimple_call_arg (def_stmt, 1);
|
3212 |
|
|
|
3213 |
|
|
if (TREE_CODE (arg1) == INTEGER_CST
|
3214 |
|
|
&& (TREE_INT_CST_LOW (arg1) & 1) == 0)
|
3215 |
|
|
return true;
|
3216 |
|
|
|
3217 |
|
|
break;
|
3218 |
|
|
|
3219 |
|
|
CASE_FLT_FN (BUILT_IN_POW):
|
3220 |
|
|
/* True if the second argument is an even integer-valued
|
3221 |
|
|
real. */
|
3222 |
|
|
arg1 = gimple_call_arg (def_stmt, 1);
|
3223 |
|
|
|
3224 |
|
|
if (TREE_CODE (arg1) == REAL_CST)
|
3225 |
|
|
{
|
3226 |
|
|
REAL_VALUE_TYPE c;
|
3227 |
|
|
HOST_WIDE_INT n;
|
3228 |
|
|
|
3229 |
|
|
c = TREE_REAL_CST (arg1);
|
3230 |
|
|
n = real_to_integer (&c);
|
3231 |
|
|
|
3232 |
|
|
if ((n & 1) == 0)
|
3233 |
|
|
{
|
3234 |
|
|
REAL_VALUE_TYPE cint;
|
3235 |
|
|
real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
|
3236 |
|
|
if (real_identical (&c, &cint))
|
3237 |
|
|
return true;
|
3238 |
|
|
}
|
3239 |
|
|
}
|
3240 |
|
|
|
3241 |
|
|
break;
|
3242 |
|
|
|
3243 |
|
|
default:
|
3244 |
|
|
return false;
|
3245 |
|
|
}
|
3246 |
|
|
}
|
3247 |
|
|
}
|
3248 |
|
|
|
3249 |
|
|
return false;
|
3250 |
|
|
}
|